mirror of
https://github.com/v2fly/domain-list-community.git
synced 2026-02-20 04:40:41 +07:00
Compare commits
4 Commits
2026021713
...
2026021909
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db9c0fe466 | ||
|
|
109a50f048 | ||
|
|
b258a6594e | ||
|
|
8d5ef88729 |
40
data/category-bank-ru
Normal file
40
data/category-bank-ru
Normal file
@@ -0,0 +1,40 @@
|
||||
# Alfa Bank
|
||||
alfabank.ru
|
||||
|
||||
# GazpromBank
|
||||
gazprombank.ru
|
||||
gpb.ru
|
||||
|
||||
# Mts dengi
|
||||
dbo-dengi.online
|
||||
mtsdengi.ru
|
||||
|
||||
# PSB Bank
|
||||
psbank.ru
|
||||
|
||||
# RosBank
|
||||
bankline.ru
|
||||
rosbank.ru
|
||||
|
||||
# RUSSIA Bank
|
||||
abr.ru
|
||||
|
||||
# Russian Agricultural Bank
|
||||
rshb.ru
|
||||
|
||||
# SberBank
|
||||
sber.ru
|
||||
sberbank.com
|
||||
sberbank.ru
|
||||
|
||||
# T-Bank (Tinkoff)
|
||||
cdn-tinkoff.ru
|
||||
tbank-online.com
|
||||
tbank.ru
|
||||
|
||||
# Tochka bank
|
||||
tochka-tech.com
|
||||
tochka.com
|
||||
|
||||
# VTB Bank
|
||||
vtb.ru
|
||||
@@ -1,2 +1,3 @@
|
||||
include:avito
|
||||
include:ozon
|
||||
include:wildberries
|
||||
|
||||
@@ -89,6 +89,7 @@ joox.com
|
||||
kpacg.com
|
||||
linetv.tw
|
||||
megaphone.fm
|
||||
megogo.net
|
||||
mikanani.me
|
||||
moov.hk
|
||||
odysee.com
|
||||
|
||||
@@ -1,3 +1,22 @@
|
||||
# CDNvideo is a leading CDN provider in Russia and the CIS
|
||||
cdnvideo.ru
|
||||
trbcdn.net
|
||||
|
||||
# Information about films and series
|
||||
kinorium.com
|
||||
|
||||
# Streaming services for watching TV series and movies
|
||||
include:kinopoisk
|
||||
include:okko
|
||||
include:rutube
|
||||
include:wink
|
||||
24h.tv
|
||||
amediateka.ru
|
||||
ivi.ru
|
||||
premier.one
|
||||
smotreshka.tv
|
||||
start.ru
|
||||
tvigle.ru
|
||||
viju.ru
|
||||
|
||||
# Video hosting
|
||||
include:rutube
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
include:category-bank-ir
|
||||
include:category-bank-jp
|
||||
include:category-bank-mm
|
||||
include:category-bank-ru
|
||||
|
||||
include:fibank
|
||||
include:futu
|
||||
|
||||
@@ -15,7 +15,13 @@ edu.ru
|
||||
|
||||
# Other Federal Resources
|
||||
cbr.ru # Central Bank of Russia
|
||||
cikrf.ru # Central Electoral Commission of the Russian Federation
|
||||
ebs.ru # Unified Biometric System
|
||||
goskey.ru # GosKey - an electronic signature on a smartphone
|
||||
izbirkom.ru # Information on ongoing elections and referendums
|
||||
kremlin.ru # Online representation of the President of Russia
|
||||
nalog.ru # Federal Tax Service
|
||||
xn--80ajghhoc2aj1c8b.xn--p1ai # Honest Sign - State Labeling System
|
||||
|
||||
#
|
||||
# Regional sites and Public Services
|
||||
@@ -149,3 +155,6 @@ chukotka.ru # Chukotka Autonomous Area
|
||||
jamal.ru # Yamal-Nenets Autonomous Area
|
||||
surgut.ru # Khanty-Mansi Autonomous Area – Yugra
|
||||
yamal.ru # Yamal-Nenets Autonomous Area
|
||||
|
||||
# Regional health services
|
||||
zdrav10.ru # Republic of Karelia
|
||||
|
||||
@@ -2223,6 +2223,8 @@ heavyfetish.com
|
||||
hegre.com
|
||||
heiguab.top
|
||||
heijidi.life
|
||||
heiliao.com
|
||||
heiliao88.com
|
||||
heise360181.buzz
|
||||
heise360182.buzz
|
||||
helixstudios.net
|
||||
@@ -6157,6 +6159,7 @@ regexp:(^|\.)tqav[1-9][0-9]\.com$
|
||||
regexp:(^|\.)tt[1-2][0-9]\.tv$
|
||||
regexp:(^|\.)ttghg[1-9][0-9]\.xyz$
|
||||
regexp:(^|\.)tttv([1-9][0-9]?|100)\.com$
|
||||
regexp:(^|\.)twav[1-9]\.xyz$
|
||||
regexp:(^|\.)twseb([1-9][0-9]?)?\.com$
|
||||
regexp:(^|\.)uu[a-z][1-9][0-9]?\.com$
|
||||
regexp:(^|\.)whtdh0[1-3]\.cc$
|
||||
|
||||
@@ -1,6 +1,69 @@
|
||||
# ABC of Taste
|
||||
av.ru
|
||||
|
||||
# Auchan
|
||||
auchan.ru
|
||||
|
||||
# Bristol
|
||||
bristol.ru
|
||||
|
||||
# Children's World
|
||||
detmir.ru
|
||||
|
||||
# Dixie
|
||||
dixy.ru
|
||||
|
||||
# Dodo Pizza
|
||||
dodois.com
|
||||
dodois.io
|
||||
dodopizza.com
|
||||
dodopizza.ru
|
||||
dodostatic.net
|
||||
|
||||
# Dostaevsky
|
||||
dostaevsky.ru
|
||||
|
||||
# Eurasia
|
||||
evrasia.rest
|
||||
|
||||
# Fast food restaurant chain "Vkusno i tochka"
|
||||
vkusnoitochka.ru
|
||||
|
||||
# Fix Price
|
||||
fix-price.com
|
||||
|
||||
# Lenta
|
||||
lenta.com
|
||||
lenta.tech
|
||||
|
||||
# Magnet
|
||||
magnit.ru
|
||||
|
||||
# Metro
|
||||
metro-cc.ru
|
||||
|
||||
# Pyaterochka, Perekrestok, Chizhik
|
||||
include:x5
|
||||
|
||||
# Red and White
|
||||
krasnoeibeloe.ru
|
||||
|
||||
# Rostics
|
||||
rostics.ru
|
||||
uni.rest
|
||||
unirest.tech
|
||||
|
||||
# Sakura
|
||||
ilovesakura.ru
|
||||
|
||||
# Spar
|
||||
myspar.ru
|
||||
|
||||
# Sushi Wok
|
||||
sushiwok.ru
|
||||
|
||||
# Vinlab
|
||||
winelab.ru
|
||||
|
||||
# VkusVill
|
||||
vkusvill.ru
|
||||
|
||||
@@ -7,28 +7,51 @@ include:category-ecommerce-ru
|
||||
include:category-entertainment-ru
|
||||
include:category-gov-ru
|
||||
include:category-retail-ru
|
||||
include:category-travel-ru
|
||||
|
||||
# Public transportation
|
||||
include:aviasales
|
||||
include:mosmetro
|
||||
|
||||
include:avito
|
||||
# Well-known companies
|
||||
include:mailru-group
|
||||
include:x5
|
||||
include:yandex
|
||||
|
||||
# Dodo Pizza
|
||||
dodopizza.ru
|
||||
dodopizza.com
|
||||
dodostatic.net
|
||||
dodois.com
|
||||
dodois.io
|
||||
# Bank & Finance & Insurance & Securities
|
||||
include:category-bank-ru
|
||||
# Credit History Bureaus
|
||||
credistory.ru
|
||||
nbki.ru
|
||||
# Currency and stock exchanges
|
||||
moex.com # Moscow Stock Exchange
|
||||
spvb.ru # Saint-Petersburg Stock Exchange
|
||||
# Financial marketplace
|
||||
banki.ru
|
||||
finuslugi.ru
|
||||
# Investment
|
||||
sistema-capital.com
|
||||
# Mir payment system
|
||||
mirpayonline.ru
|
||||
# National Payment Card System
|
||||
nspk.ru
|
||||
# Tipping service
|
||||
netmonet.co
|
||||
tips.tips
|
||||
|
||||
# Rostics
|
||||
rostics.ru
|
||||
uni.rest
|
||||
unirest.tech
|
||||
# Telecom operators
|
||||
beeline.ru
|
||||
dom.ru
|
||||
megafon.ru
|
||||
mts.ru
|
||||
mymts.ru
|
||||
rostelecom.ru
|
||||
rt.ru
|
||||
t2.ru
|
||||
tele2.ru
|
||||
yota.ru
|
||||
|
||||
trbcdn.net
|
||||
tbank-online.com
|
||||
taxsee.com
|
||||
# Other domains
|
||||
gazfond-pn.ru # Non-state pension fund GAZFOND pension savings
|
||||
litres.ru # E-book and audiobook service
|
||||
meteoinfo.ru # Hydrometeorological Center of Russia
|
||||
ngenix.net # NGENIX is a Russian provider of acceleration and security services for public web resources
|
||||
pochta.ru # Russian post
|
||||
qms.ru # Russian internet speed testing service
|
||||
rustore.ru # RuStore is a Russian mobile app store for Android
|
||||
taxsee.com # Taxi for business (self-employed drivers)
|
||||
|
||||
38
data/category-travel-ru
Normal file
38
data/category-travel-ru
Normal file
@@ -0,0 +1,38 @@
|
||||
# Aviasales
|
||||
include:aviasales
|
||||
|
||||
# BlaBlaCar
|
||||
blablacar.ru
|
||||
|
||||
# Bus stations
|
||||
avtovokzaly.ru
|
||||
|
||||
# Ostrovok
|
||||
ostrovok.ru
|
||||
|
||||
# Othello from 2GIS
|
||||
otello.ru
|
||||
|
||||
# Public transport and ticketing
|
||||
include:mosmetro
|
||||
full:bilet.nspk.ru
|
||||
full:metro.spb.ru
|
||||
full:mos.transport.vtb.ru
|
||||
full:podorozhnik.spb.ru
|
||||
full:transport.mos.ru
|
||||
gorodpay.ru
|
||||
rrtp.ru
|
||||
sbertroika.ru
|
||||
|
||||
# Russian Railways
|
||||
rzd-bonus.ru
|
||||
rzd.ru
|
||||
|
||||
# Tutu
|
||||
tutu.ru
|
||||
|
||||
# Yandex Rent
|
||||
full:arenda.yandex.ru
|
||||
|
||||
# Yandex Taxi
|
||||
full:taxi.yandex.ru
|
||||
2
data/kinopoisk
Normal file
2
data/kinopoisk
Normal file
@@ -0,0 +1,2 @@
|
||||
kinopoisk.ru
|
||||
kinopoisk-ru.clstorage.net
|
||||
@@ -5,5 +5,7 @@ include:vk
|
||||
|
||||
boosty.to
|
||||
donationalerts.com
|
||||
max.ru
|
||||
memealerts.com
|
||||
oneme.ru
|
||||
tamtam.chat
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
missav.ai
|
||||
missav.com
|
||||
missav.live
|
||||
missav.uno
|
||||
missav.vip
|
||||
missav.ws
|
||||
|
||||
@@ -58,8 +58,7 @@ yastat.net
|
||||
yastatic.net
|
||||
|
||||
# Watching movies, included in the Yandex subscription
|
||||
kinopoisk.ru
|
||||
kinopoisk-ru.clstorage.net
|
||||
include:kinopoisk
|
||||
|
||||
# Weather service
|
||||
full:yandex-pogoda.static-storage.net
|
||||
|
||||
222
main.go
222
main.go
@@ -22,18 +22,11 @@ var (
|
||||
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
|
||||
)
|
||||
|
||||
var (
|
||||
plMap = make(map[string]*ParsedList)
|
||||
finalMap = make(map[string][]*Entry)
|
||||
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
||||
)
|
||||
|
||||
type Entry struct {
|
||||
Type string
|
||||
Value string
|
||||
Attrs []string
|
||||
Plain string
|
||||
Affs []string
|
||||
}
|
||||
|
||||
type Inclusion struct {
|
||||
@@ -48,6 +41,12 @@ type ParsedList struct {
|
||||
Entries []*Entry
|
||||
}
|
||||
|
||||
type Processor struct {
|
||||
plMap map[string]*ParsedList
|
||||
finalMap map[string][]*Entry
|
||||
cirIncMap map[string]bool
|
||||
}
|
||||
|
||||
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
||||
site := &router.GeoSite{
|
||||
CountryCode: listName,
|
||||
@@ -90,29 +89,27 @@ func writePlainList(listname string, entries []*Entry) error {
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func parseEntry(line string) (Entry, error) {
|
||||
var entry Entry
|
||||
func parseEntry(line string) (*Entry, []string, error) {
|
||||
entry := new(Entry)
|
||||
parts := strings.Fields(line)
|
||||
if len(parts) == 0 {
|
||||
return entry, fmt.Errorf("empty line")
|
||||
return entry, nil, fmt.Errorf("empty line")
|
||||
}
|
||||
|
||||
// Parse type and value
|
||||
v := parts[0]
|
||||
colonIndex := strings.Index(v, ":")
|
||||
if colonIndex == -1 {
|
||||
entry.Type = dlc.RuleTypeDomain // Default type
|
||||
entry.Value = strings.ToLower(v)
|
||||
if !validateDomainChars(entry.Value) {
|
||||
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
typ, val, isTypeSpecified := strings.Cut(parts[0], ":")
|
||||
typ = strings.ToLower(typ)
|
||||
if !isTypeSpecified { // Default RuleType
|
||||
if !validateDomainChars(typ) {
|
||||
return entry, nil, fmt.Errorf("invalid domain: %q", typ)
|
||||
}
|
||||
entry.Type = dlc.RuleTypeDomain
|
||||
entry.Value = typ
|
||||
} else {
|
||||
typ := strings.ToLower(v[:colonIndex])
|
||||
val := v[colonIndex+1:]
|
||||
switch typ {
|
||||
case dlc.RuleTypeRegexp:
|
||||
if _, err := regexp.Compile(val); err != nil {
|
||||
return entry, fmt.Errorf("invalid regexp %q: %w", val, err)
|
||||
return entry, nil, fmt.Errorf("invalid regexp %q: %w", val, err)
|
||||
}
|
||||
entry.Type = dlc.RuleTypeRegexp
|
||||
entry.Value = val
|
||||
@@ -120,57 +117,60 @@ func parseEntry(line string) (Entry, error) {
|
||||
entry.Type = dlc.RuleTypeInclude
|
||||
entry.Value = strings.ToUpper(val)
|
||||
if !validateSiteName(entry.Value) {
|
||||
return entry, fmt.Errorf("invalid include list name: %q", entry.Value)
|
||||
return entry, nil, fmt.Errorf("invalid included list name: %q", entry.Value)
|
||||
}
|
||||
case dlc.RuleTypeDomain, dlc.RuleTypeFullDomain, dlc.RuleTypeKeyword:
|
||||
entry.Type = typ
|
||||
entry.Value = strings.ToLower(val)
|
||||
if !validateDomainChars(entry.Value) {
|
||||
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
return entry, nil, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
}
|
||||
default:
|
||||
return entry, fmt.Errorf("invalid type: %q", typ)
|
||||
return entry, nil, fmt.Errorf("invalid type: %q", typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse attributes and affiliations
|
||||
var affs []string
|
||||
for _, part := range parts[1:] {
|
||||
if strings.HasPrefix(part, "@") {
|
||||
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
||||
switch part[0] {
|
||||
case '@':
|
||||
attr := strings.ToLower(part[1:])
|
||||
if !validateAttrChars(attr) {
|
||||
return entry, fmt.Errorf("invalid attribute: %q", attr)
|
||||
return entry, affs, fmt.Errorf("invalid attribute: %q", attr)
|
||||
}
|
||||
entry.Attrs = append(entry.Attrs, attr)
|
||||
} else if strings.HasPrefix(part, "&") {
|
||||
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
|
||||
case '&':
|
||||
aff := strings.ToUpper(part[1:])
|
||||
if !validateSiteName(aff) {
|
||||
return entry, fmt.Errorf("invalid affiliation: %q", aff)
|
||||
return entry, affs, fmt.Errorf("invalid affiliation: %q", aff)
|
||||
}
|
||||
entry.Affs = append(entry.Affs, aff)
|
||||
} else {
|
||||
return entry, fmt.Errorf("invalid attribute/affiliation: %q", part)
|
||||
affs = append(affs, aff)
|
||||
default:
|
||||
return entry, affs, fmt.Errorf("invalid attribute/affiliation: %q", part)
|
||||
}
|
||||
}
|
||||
// Sort attributes
|
||||
slices.Sort(entry.Attrs)
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
var plain strings.Builder
|
||||
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||
plain.WriteString(entry.Type)
|
||||
plain.WriteByte(':')
|
||||
plain.WriteString(entry.Value)
|
||||
for i, attr := range entry.Attrs {
|
||||
if i == 0 {
|
||||
plain.WriteByte(':')
|
||||
} else {
|
||||
plain.WriteByte(',')
|
||||
}
|
||||
plain.WriteByte('@')
|
||||
plain.WriteString(attr)
|
||||
}
|
||||
entry.Plain = plain.String()
|
||||
|
||||
return entry, nil
|
||||
if entry.Type != dlc.RuleTypeInclude {
|
||||
slices.Sort(entry.Attrs) // Sort attributes
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
var plain strings.Builder
|
||||
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||
plain.WriteString(entry.Type)
|
||||
plain.WriteByte(':')
|
||||
plain.WriteString(entry.Value)
|
||||
for i, attr := range entry.Attrs {
|
||||
if i == 0 {
|
||||
plain.WriteByte(':')
|
||||
} else {
|
||||
plain.WriteByte(',')
|
||||
}
|
||||
plain.WriteByte('@')
|
||||
plain.WriteString(attr)
|
||||
}
|
||||
entry.Plain = plain.String()
|
||||
}
|
||||
return entry, affs, nil
|
||||
}
|
||||
|
||||
func validateDomainChars(domain string) bool {
|
||||
@@ -206,62 +206,54 @@ func validateSiteName(name string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func loadData(path string) ([]*Entry, error) {
|
||||
func (p *Processor) getOrCreateParsedList(name string) *ParsedList {
|
||||
pl, exist := p.plMap[name]
|
||||
if !exist {
|
||||
pl = &ParsedList{Name: name}
|
||||
p.plMap[name] = pl
|
||||
}
|
||||
return pl
|
||||
}
|
||||
|
||||
func (p *Processor) loadData(listName string, path string) error {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var entries []*Entry
|
||||
pl := p.getOrCreateParsedList(listName)
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineIdx := 0
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
lineIdx++
|
||||
if idx := strings.Index(line, "#"); idx != -1 {
|
||||
line = line[:idx] // Remove comments
|
||||
}
|
||||
line, _, _ := strings.Cut(scanner.Text(), "#") // Remove comments
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
entry, err := parseEntry(line)
|
||||
entry, affs, err := parseEntry(line)
|
||||
if err != nil {
|
||||
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||
return fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||
}
|
||||
entries = append(entries, &entry)
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func parseList(refName string, refList []*Entry) error {
|
||||
pl, _ := plMap[refName]
|
||||
if pl == nil {
|
||||
pl = &ParsedList{Name: refName}
|
||||
plMap[refName] = pl
|
||||
}
|
||||
for _, entry := range refList {
|
||||
if entry.Type == dlc.RuleTypeInclude {
|
||||
if len(entry.Affs) != 0 {
|
||||
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
|
||||
}
|
||||
inc := &Inclusion{Source: entry.Value}
|
||||
for _, attr := range entry.Attrs {
|
||||
if strings.HasPrefix(attr, "-") {
|
||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
|
||||
if attr[0] == '-' {
|
||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:])
|
||||
} else {
|
||||
inc.MustAttrs = append(inc.MustAttrs, attr)
|
||||
}
|
||||
}
|
||||
for _, aff := range affs {
|
||||
apl := p.getOrCreateParsedList(aff)
|
||||
apl.Inclusions = append(apl.Inclusions, inc)
|
||||
}
|
||||
pl.Inclusions = append(pl.Inclusions, inc)
|
||||
} else {
|
||||
for _, aff := range entry.Affs {
|
||||
apl, _ := plMap[aff]
|
||||
if apl == nil {
|
||||
apl = &ParsedList{Name: aff}
|
||||
plMap[aff] = apl
|
||||
}
|
||||
for _, aff := range affs {
|
||||
apl := p.getOrCreateParsedList(aff)
|
||||
apl.Entries = append(apl.Entries, entry)
|
||||
}
|
||||
pl.Entries = append(pl.Entries, entry)
|
||||
@@ -296,9 +288,7 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
domainsMap := make(map[string]bool)
|
||||
for _, entry := range roughMap {
|
||||
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
||||
case dlc.RuleTypeRegexp:
|
||||
finalList = append(finalList, entry)
|
||||
case dlc.RuleTypeKeyword:
|
||||
case dlc.RuleTypeRegexp, dlc.RuleTypeKeyword:
|
||||
finalList = append(finalList, entry)
|
||||
case dlc.RuleTypeDomain:
|
||||
domainsMap[entry.Value] = true
|
||||
@@ -323,11 +313,11 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
pd = "." + pd // So that `domain:example.org` overrides `full:example.org`
|
||||
}
|
||||
for {
|
||||
idx := strings.Index(pd, ".")
|
||||
if idx == -1 {
|
||||
var hasParent bool
|
||||
_, pd, hasParent = strings.Cut(pd, ".") // Go for next parent
|
||||
if !hasParent {
|
||||
break
|
||||
}
|
||||
pd = pd[idx+1:] // Go for next parent
|
||||
if domainsMap[pd] {
|
||||
isRedundant = true
|
||||
break
|
||||
@@ -344,36 +334,38 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
return finalList
|
||||
}
|
||||
|
||||
func resolveList(pl *ParsedList) error {
|
||||
if _, pldone := finalMap[pl.Name]; pldone {
|
||||
func (p *Processor) resolveList(plname string) error {
|
||||
if _, pldone := p.finalMap[plname]; pldone {
|
||||
return nil
|
||||
}
|
||||
|
||||
if cirIncMap[pl.Name] {
|
||||
return fmt.Errorf("circular inclusion in: %q", pl.Name)
|
||||
pl, plexist := p.plMap[plname]
|
||||
if !plexist {
|
||||
return fmt.Errorf("list %q not found", plname)
|
||||
}
|
||||
cirIncMap[pl.Name] = true
|
||||
defer delete(cirIncMap, pl.Name)
|
||||
if p.cirIncMap[plname] {
|
||||
return fmt.Errorf("circular inclusion in: %q", plname)
|
||||
}
|
||||
p.cirIncMap[plname] = true
|
||||
defer delete(p.cirIncMap, plname)
|
||||
|
||||
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
||||
for _, dentry := range pl.Entries { // Add direct entries
|
||||
roughMap[dentry.Plain] = dentry
|
||||
}
|
||||
for _, inc := range pl.Inclusions {
|
||||
incPl, exist := plMap[inc.Source]
|
||||
if !exist {
|
||||
return fmt.Errorf("list %q includes a non-existent list: %q", pl.Name, inc.Source)
|
||||
if _, exist := p.plMap[inc.Source]; !exist {
|
||||
return fmt.Errorf("list %q includes a non-existent list: %q", plname, inc.Source)
|
||||
}
|
||||
if err := resolveList(incPl); err != nil {
|
||||
if err := p.resolveList(inc.Source); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ientry := range finalMap[inc.Source] {
|
||||
for _, ientry := range p.finalMap[inc.Source] {
|
||||
if isMatchAttrFilters(ientry, inc) { // Add included entries
|
||||
roughMap[ientry.Plain] = ientry
|
||||
}
|
||||
}
|
||||
}
|
||||
finalMap[pl.Name] = polishList(roughMap)
|
||||
p.finalMap[plname] = polishList(roughMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -381,8 +373,8 @@ func run() error {
|
||||
dir := *dataPath
|
||||
fmt.Printf("using domain lists data in %q\n", dir)
|
||||
|
||||
// Generate refMap
|
||||
refMap := make(map[string][]*Entry)
|
||||
// Generate plMap
|
||||
processor := &Processor{plMap: make(map[string]*ParsedList)}
|
||||
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -394,23 +386,16 @@ func run() error {
|
||||
if !validateSiteName(listName) {
|
||||
return fmt.Errorf("invalid list name: %q", listName)
|
||||
}
|
||||
refMap[listName], err = loadData(path)
|
||||
return err
|
||||
return processor.loadData(listName, path)
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to loadData: %w", err)
|
||||
}
|
||||
|
||||
// Generate plMap
|
||||
for refName, refList := range refMap {
|
||||
if err := parseList(refName, refList); err != nil {
|
||||
return fmt.Errorf("failed to parseList %q: %w", refName, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate finalMap
|
||||
for plname, pl := range plMap {
|
||||
if err := resolveList(pl); err != nil {
|
||||
processor.finalMap = make(map[string][]*Entry, len(processor.plMap))
|
||||
processor.cirIncMap = make(map[string]bool)
|
||||
for plname := range processor.plMap {
|
||||
if err := processor.resolveList(plname); err != nil {
|
||||
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
|
||||
}
|
||||
}
|
||||
@@ -419,11 +404,10 @@ func run() error {
|
||||
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create output directory: %w", err)
|
||||
}
|
||||
|
||||
// Export plaintext list
|
||||
// Export plaintext lists
|
||||
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
|
||||
if epList := strings.TrimSpace(rawEpList); epList != "" {
|
||||
entries, exist := finalMap[strings.ToUpper(epList)]
|
||||
entries, exist := processor.finalMap[strings.ToUpper(epList)]
|
||||
if !exist || len(entries) == 0 {
|
||||
fmt.Printf("list %q does not exist or is empty\n", epList)
|
||||
continue
|
||||
@@ -438,7 +422,7 @@ func run() error {
|
||||
|
||||
// Generate dat file
|
||||
protoList := new(router.GeoSiteList)
|
||||
for siteName, siteEntries := range finalMap {
|
||||
for siteName, siteEntries := range processor.finalMap {
|
||||
site, err := makeProtoList(siteName, siteEntries)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
|
||||
|
||||
Reference in New Issue
Block a user