mirror of
https://github.com/safing/portmaster
synced 2025-09-03 02:59:13 +00:00
Rename filterlist to filterlists
This commit is contained in:
parent
f630df0b1f
commit
e77d971259
17 changed files with 41 additions and 41 deletions
|
@ -8,7 +8,7 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/safing/portbase/log"
|
"github.com/safing/portbase/log"
|
||||||
"github.com/safing/portmaster/intel/filterlist"
|
"github.com/safing/portmaster/intel/filterlists"
|
||||||
"github.com/safing/portmaster/intel/geoip"
|
"github.com/safing/portmaster/intel/geoip"
|
||||||
"github.com/safing/portmaster/network/netutils"
|
"github.com/safing/portmaster/network/netutils"
|
||||||
"github.com/safing/portmaster/status"
|
"github.com/safing/portmaster/status"
|
||||||
|
@ -35,7 +35,7 @@ type Entity struct {
|
||||||
fetchLocationOnce sync.Once
|
fetchLocationOnce sync.Once
|
||||||
|
|
||||||
Lists []string
|
Lists []string
|
||||||
ListsMap filterlist.LookupMap
|
ListsMap filterlists.LookupMap
|
||||||
|
|
||||||
// we only load each data above at most once
|
// we only load each data above at most once
|
||||||
loadDomainListOnce sync.Once
|
loadDomainListOnce sync.Once
|
||||||
|
@ -231,7 +231,7 @@ func (e *Entity) getDomainLists() {
|
||||||
|
|
||||||
e.loadDomainListOnce.Do(func() {
|
e.loadDomainListOnce.Do(func() {
|
||||||
log.Debugf("intel: loading domain list for %s", domain)
|
log.Debugf("intel: loading domain list for %s", domain)
|
||||||
list, err := filterlist.LookupDomain(domain)
|
list, err := filterlists.LookupDomain(domain)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("intel: failed to get domain blocklists for %s: %s", domain, err)
|
log.Errorf("intel: failed to get domain blocklists for %s: %s", domain, err)
|
||||||
e.loadDomainListOnce = sync.Once{}
|
e.loadDomainListOnce = sync.Once{}
|
||||||
|
@ -255,7 +255,7 @@ func (e *Entity) getASNLists() {
|
||||||
|
|
||||||
log.Debugf("intel: loading ASN list for %d", asn)
|
log.Debugf("intel: loading ASN list for %d", asn)
|
||||||
e.loadAsnListOnce.Do(func() {
|
e.loadAsnListOnce.Do(func() {
|
||||||
list, err := filterlist.LookupASNString(fmt.Sprintf("%d", asn))
|
list, err := filterlists.LookupASNString(fmt.Sprintf("%d", asn))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("intel: failed to get ASN blocklist for %d: %s", asn, err)
|
log.Errorf("intel: failed to get ASN blocklist for %d: %s", asn, err)
|
||||||
e.loadAsnListOnce = sync.Once{}
|
e.loadAsnListOnce = sync.Once{}
|
||||||
|
@ -279,7 +279,7 @@ func (e *Entity) getCountryLists() {
|
||||||
|
|
||||||
log.Debugf("intel: loading country list for %s", country)
|
log.Debugf("intel: loading country list for %s", country)
|
||||||
e.loadCoutryListOnce.Do(func() {
|
e.loadCoutryListOnce.Do(func() {
|
||||||
list, err := filterlist.LookupCountry(country)
|
list, err := filterlists.LookupCountry(country)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("intel: failed to load country blocklist for %s: %s", country, err)
|
log.Errorf("intel: failed to load country blocklist for %s: %s", country, err)
|
||||||
e.loadCoutryListOnce = sync.Once{}
|
e.loadCoutryListOnce = sync.Once{}
|
||||||
|
@ -312,7 +312,7 @@ func (e *Entity) getIPLists() {
|
||||||
|
|
||||||
log.Debugf("intel: loading IP list for %s", ip)
|
log.Debugf("intel: loading IP list for %s", ip)
|
||||||
e.loadIPListOnce.Do(func() {
|
e.loadIPListOnce.Do(func() {
|
||||||
list, err := filterlist.LookupIP(ip)
|
list, err := filterlists.LookupIP(ip)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("intel: failed to get IP blocklist for %s: %s", ip.String(), err)
|
log.Errorf("intel: failed to get IP blocklist for %s: %s", ip.String(), err)
|
||||||
|
@ -335,7 +335,7 @@ func (e *Entity) GetLists() ([]string, bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetListsMap is like GetLists but returns a lookup map for list IDs.
|
// GetListsMap is like GetLists but returns a lookup map for list IDs.
|
||||||
func (e *Entity) GetListsMap() (filterlist.LookupMap, bool) {
|
func (e *Entity) GetListsMap() (filterlists.LookupMap, bool) {
|
||||||
e.getLists()
|
e.getLists()
|
||||||
|
|
||||||
if e.ListsMap == nil {
|
if e.ListsMap == nil {
|
||||||
|
@ -361,8 +361,8 @@ func mergeStringList(a, b []string) []string {
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildLookupMap(l []string) filterlist.LookupMap {
|
func buildLookupMap(l []string) filterlists.LookupMap {
|
||||||
m := make(filterlist.LookupMap, len(l))
|
m := make(filterlists.LookupMap, len(l))
|
||||||
|
|
||||||
for _, s := range l {
|
for _, s := range l {
|
||||||
m[s] = struct{}{}
|
m[s] = struct{}{}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
@ -59,7 +59,7 @@ func (bf *scopedBloom) getBloomForType(entityType string) (*ring.Ring, error) {
|
||||||
case "country":
|
case "country":
|
||||||
r = bf.country
|
r = bf.country
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unsupported filterlist entity type %q", entityType)
|
return nil, fmt.Errorf("unsupported filterlists entity type %q", entityType)
|
||||||
}
|
}
|
||||||
|
|
||||||
return r, nil
|
return r, nil
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
@ -78,7 +78,7 @@ func isLoaded() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// processListFile opens the latest version of f ile and decodes it's DSDL
|
// processListFile opens the latest version of f ile and decodes it's DSDL
|
||||||
// content. It calls processEntry for each decoded filterlist entry.
|
// content. It calls processEntry for each decoded filterlists entry.
|
||||||
func processListFile(ctx context.Context, filter *scopedBloom, file *updater.File) error {
|
func processListFile(ctx context.Context, filter *scopedBloom, file *updater.File) error {
|
||||||
f, err := os.Open(file.Path())
|
f, err := os.Open(file.Path())
|
||||||
if err != nil {
|
if err != nil {
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
|
@ -17,7 +17,7 @@ type listEntry struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// decodeFile decodes a DSDL filterlist file and sends decoded entities to
|
// decodeFile decodes a DSDL filterlists file and sends decoded entities to
|
||||||
// ch. It blocks until all list entries have been consumed or ctx is cancelled.
|
// ch. It blocks until all list entries have been consumed or ctx is cancelled.
|
||||||
func decodeFile(ctx context.Context, r io.Reader, ch chan<- *listEntry) error {
|
func decodeFile(ctx context.Context, r io.Reader, ch chan<- *listEntry) error {
|
||||||
compressed, format, err := parseHeader(r)
|
compressed, format, err := parseHeader(r)
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -37,7 +37,7 @@ type Category struct {
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Source defines an external filterlist source.
|
// Source defines an external filterlists source.
|
||||||
type Source struct {
|
type Source struct {
|
||||||
// ID is a unique ID for the source. Entities always reference the
|
// ID is a unique ID for the source. Entities always reference the
|
||||||
// sources they have been observed in using this ID. Refer to the
|
// sources they have been observed in using this ID. Refer to the
|
||||||
|
@ -56,7 +56,7 @@ type Source struct {
|
||||||
// to the Type definition for more information and well-known types.
|
// to the Type definition for more information and well-known types.
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
|
|
||||||
// URL points to the filterlist file.
|
// URL points to the filterlists file.
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
|
||||||
// Category holds the unique ID of a category the source belongs to. Since
|
// Category holds the unique ID of a category the source belongs to. Since
|
|
@ -1,15 +1,15 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
const (
|
const (
|
||||||
cacheDBPrefix = "cache:intel/filterlists"
|
cacheDBPrefix = "cache:intel/filterlists"
|
||||||
|
|
||||||
// filterListCacheVersionKey is used to store the highest version
|
// filterListCacheVersionKey is used to store the highest version
|
||||||
// of a filterlist file (base, intermediate or urgent) in the
|
// of a filterlists file (base, intermediate or urgent) in the
|
||||||
// cache database. It's used to decide if the cache database and
|
// cache database. It's used to decide if the cache database and
|
||||||
// bloomfilters need to be resetted and rebuilt.
|
// bloomfilters need to be resetted and rebuilt.
|
||||||
filterListCacheVersionKey = cacheDBPrefix + "/version"
|
filterListCacheVersionKey = cacheDBPrefix + "/version"
|
||||||
|
|
||||||
// filterListIndexKey is used to store the filterlist index.
|
// filterListIndexKey is used to store the filterlists index.
|
||||||
filterListIndexKey = cacheDBPrefix + "/index"
|
filterListIndexKey = cacheDBPrefix + "/index"
|
||||||
|
|
||||||
// filterListKeyPrefix is the prefix inside that cache database
|
// filterListKeyPrefix is the prefix inside that cache database
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
@ -18,7 +18,7 @@ import (
|
||||||
func lookupBlockLists(entity, value string) ([]string, error) {
|
func lookupBlockLists(entity, value string) ([]string, error) {
|
||||||
key := makeListCacheKey(entity, value)
|
key := makeListCacheKey(entity, value)
|
||||||
if !isLoaded() {
|
if !isLoaded() {
|
||||||
log.Warningf("intel/filterlist: not searching for %s because filterlists not loaded", key)
|
log.Warningf("intel/filterlists: not searching for %s because filterlists not loaded", key)
|
||||||
// filterLists have not yet been loaded so
|
// filterLists have not yet been loaded so
|
||||||
// there's no point querying into the cache
|
// there's no point querying into the cache
|
||||||
// database.
|
// database.
|
||||||
|
@ -32,7 +32,7 @@ func lookupBlockLists(entity, value string) ([]string, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debugf("intel/filterlist: searching for entries with %s", key)
|
log.Debugf("intel/filterlists: searching for entries with %s", key)
|
||||||
entry, err := getEntityRecordByKey(key)
|
entry, err := getEntityRecordByKey(key)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == database.ErrNotFound {
|
if err == database.ErrNotFound {
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ func TestLookupDomain(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// testMarkNotLoaded ensures that functions believe
|
// testMarkNotLoaded ensures that functions believe
|
||||||
// filterlist are not yet loaded. It returns a
|
// filterlists are not yet loaded. It returns a
|
||||||
// func that restores the previous state.
|
// func that restores the previous state.
|
||||||
func testMarkNotLoaded() func() {
|
func testMarkNotLoaded() func() {
|
||||||
if isLoaded() {
|
if isLoaded() {
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
@ -23,7 +23,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
module = modules.Register("filterlist", prep, start, nil, "core", "netenv")
|
module = modules.Register("filterlists", prep, start, nil, "core", "netenv")
|
||||||
}
|
}
|
||||||
|
|
||||||
func prep() error {
|
func prep() error {
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
/*
|
/*
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
|
@ -1,4 +1,4 @@
|
||||||
package filterlist
|
package filterlists
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
@ -97,11 +97,11 @@ func performUpdate(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// if we processed the base file we need to perform
|
// if we processed the base file we need to perform
|
||||||
// some cleanup on filterlist entities that have not
|
// some cleanup on filterlists entities that have not
|
||||||
// been updated now. Once we are done, start a worker
|
// been updated now. Once we are done, start a worker
|
||||||
// for that purpose.
|
// for that purpose.
|
||||||
if cleanupRequired {
|
if cleanupRequired {
|
||||||
defer module.StartWorker("filterlist:cleanup", removeAllObsoleteFilterEntries)
|
defer module.StartWorker("filterlists:cleanup", removeAllObsoleteFilterEntries)
|
||||||
}
|
}
|
||||||
|
|
||||||
// try to save the highest version of our files.
|
// try to save the highest version of our files.
|
||||||
|
@ -166,7 +166,7 @@ func removeObsoleteFilterEntries(batchSize int) (bool, error) {
|
||||||
return cnt < batchSize, nil
|
return cnt < batchSize, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getUpgradableFiles returns a slice of filterlist files
|
// getUpgradableFiles returns a slice of filterlists files
|
||||||
// that should be updated. The files MUST be updated and
|
// that should be updated. The files MUST be updated and
|
||||||
// processed in the returned order!
|
// processed in the returned order!
|
||||||
func getUpgradableFiles() ([]*updater.File, error) {
|
func getUpgradableFiles() ([]*updater.File, error) {
|
|
@ -10,5 +10,5 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
Module = modules.Register("intel", nil, nil, nil, "geoip", "filterlist")
|
Module = modules.Register("intel", nil, nil, nil, "geoip", "filterlists")
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/safing/portmaster/intel/filterlist"
|
"github.com/safing/portmaster/intel/filterlists"
|
||||||
"github.com/safing/portmaster/profile/endpoints"
|
"github.com/safing/portmaster/profile/endpoints"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ func updateGlobalConfigProfile(ctx context.Context, data interface{}) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
list = cfgOptionFilterLists()
|
list = cfgOptionFilterLists()
|
||||||
cfgFilterLists, err = filterlist.ResolveListIDs(list)
|
cfgFilterLists, err = filterlists.ResolveListIDs(list)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
lastErr = err
|
lastErr = err
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
|
|
||||||
"github.com/safing/portbase/config"
|
"github.com/safing/portbase/config"
|
||||||
"github.com/safing/portbase/database/record"
|
"github.com/safing/portbase/database/record"
|
||||||
"github.com/safing/portmaster/intel/filterlist"
|
"github.com/safing/portmaster/intel/filterlists"
|
||||||
"github.com/safing/portmaster/profile/endpoints"
|
"github.com/safing/portmaster/profile/endpoints"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ func (profile *Profile) parseConfig() error {
|
||||||
|
|
||||||
list, ok = profile.configPerspective.GetAsStringArray(CfgOptionFilterListKey)
|
list, ok = profile.configPerspective.GetAsStringArray(CfgOptionFilterListKey)
|
||||||
if ok {
|
if ok {
|
||||||
profile.filterListIDs, err = filterlist.ResolveListIDs(list)
|
profile.filterListIDs, err = filterlists.ResolveListIDs(list)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
lastErr = err
|
lastErr = err
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue