mirror of
https://github.com/v2fly/domain-list-community.git
synced 2026-02-20 21:00:42 +07:00
Compare commits
1 Commits
2026022008
...
non-cn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ba660347d |
@@ -5,10 +5,6 @@ alfabank.ru
|
||||
gazprombank.ru
|
||||
gpb.ru
|
||||
|
||||
# Mts dengi
|
||||
dbo-dengi.online
|
||||
mtsdengi.ru
|
||||
|
||||
# PSB Bank
|
||||
psbank.ru
|
||||
|
||||
@@ -32,9 +28,5 @@ cdn-tinkoff.ru
|
||||
tbank-online.com
|
||||
tbank.ru
|
||||
|
||||
# Tochka bank
|
||||
tochka-tech.com
|
||||
tochka.com
|
||||
|
||||
# VTB Bank
|
||||
vtb.ru
|
||||
|
||||
@@ -35,7 +35,6 @@ bitmex.com
|
||||
bitquick.co
|
||||
bitstamp.net
|
||||
bittrex.com
|
||||
blockchain.com
|
||||
blockfrost.io
|
||||
btcbox.co.jp
|
||||
cex.io
|
||||
|
||||
31
data/category-non-domestic-cn
Normal file
31
data/category-non-domestic-cn
Normal file
@@ -0,0 +1,31 @@
|
||||
# Part of Chinese entities but exclusively serving for non-cn area
|
||||
|
||||
include:alibaba @!cn
|
||||
include:anker @!cn
|
||||
include:bilibili @!cn
|
||||
include:boc @!cn
|
||||
include:bytedance @!cn
|
||||
include:ccb @!cn
|
||||
include:chinamobile @!cn
|
||||
include:chinatelecom @!cn
|
||||
include:chinaunicom @!cn
|
||||
include:citic @!cn
|
||||
include:cmb @!cn
|
||||
include:ctrip @!cn
|
||||
include:deepin @!cn
|
||||
include:dewu @!cn
|
||||
include:didi @!cn
|
||||
include:eastmoney @!cn
|
||||
include:huawei @!cn
|
||||
include:icbc @!cn
|
||||
include:ipip @!cn
|
||||
include:iqiyi @!cn
|
||||
include:jd @!cn
|
||||
include:oppo @!cn
|
||||
include:pingan @!cn
|
||||
include:sina @!cn
|
||||
include:tencent @!cn
|
||||
include:vivo @!cn
|
||||
include:xd @!cn
|
||||
include:xiaohongshu @!cn
|
||||
include:xiaomi @!cn
|
||||
@@ -137,7 +137,6 @@ porn
|
||||
18hmanga.com
|
||||
18insta.com
|
||||
18j.tv
|
||||
18jav.tv
|
||||
18jms.com
|
||||
18mh.co
|
||||
18mh.me
|
||||
@@ -2224,8 +2223,6 @@ heavyfetish.com
|
||||
hegre.com
|
||||
heiguab.top
|
||||
heijidi.life
|
||||
heiliao.com
|
||||
heiliao88.com
|
||||
heise360181.buzz
|
||||
heise360182.buzz
|
||||
helixstudios.net
|
||||
@@ -2693,7 +2690,6 @@ jasmin.com
|
||||
jav-angel.net
|
||||
jav-subtitles.com
|
||||
jav-vr.net
|
||||
jav.com.co
|
||||
jav.dog
|
||||
jav.gallery
|
||||
jav.guru
|
||||
@@ -2796,7 +2792,6 @@ javpub.me
|
||||
javpush.com
|
||||
javqd.com
|
||||
javrank.com
|
||||
javrate.com
|
||||
javrave.club
|
||||
javroot.com
|
||||
javscat.net
|
||||
@@ -2821,7 +2816,6 @@ javtrust.com
|
||||
javtube.cc
|
||||
javtube.com
|
||||
javtube.net
|
||||
javvideoporn.com
|
||||
javvids.com
|
||||
javxspot.com
|
||||
javxxx.me
|
||||
@@ -3593,7 +3587,6 @@ ninpu.cyou
|
||||
niuc2.com
|
||||
niziero.info
|
||||
njav.tv
|
||||
njavtv.com
|
||||
nlsexfilmpjes.com
|
||||
nlt-media.com
|
||||
noc.syosetu.com
|
||||
@@ -5573,6 +5566,9 @@ xgtd3.com
|
||||
xgtdr.buzz
|
||||
xh-porn.com
|
||||
xh.video
|
||||
xhamster.com
|
||||
xhamster.desi
|
||||
xhamster2.com
|
||||
xhot.pro
|
||||
xhub.tv
|
||||
xiangrikui-app.com
|
||||
@@ -6161,7 +6157,6 @@ regexp:(^|\.)tqav[1-9][0-9]\.com$
|
||||
regexp:(^|\.)tt[1-2][0-9]\.tv$
|
||||
regexp:(^|\.)ttghg[1-9][0-9]\.xyz$
|
||||
regexp:(^|\.)tttv([1-9][0-9]?|100)\.com$
|
||||
regexp:(^|\.)twav[1-9]\.xyz$
|
||||
regexp:(^|\.)twseb([1-9][0-9]?)?\.com$
|
||||
regexp:(^|\.)uu[a-z][1-9][0-9]?\.com$
|
||||
regexp:(^|\.)whtdh0[1-3]\.cc$
|
||||
|
||||
@@ -24,15 +24,10 @@ spvb.ru # Saint-Petersburg Stock Exchange
|
||||
# Financial marketplace
|
||||
banki.ru
|
||||
finuslugi.ru
|
||||
# Investment
|
||||
sistema-capital.com
|
||||
# Mir payment system
|
||||
mirpayonline.ru
|
||||
# National Payment Card System
|
||||
nspk.ru
|
||||
# Tipping service
|
||||
netmonet.co
|
||||
tips.tips
|
||||
|
||||
# Telecom operators
|
||||
beeline.ru
|
||||
|
||||
1
data/cn
1
data/cn
@@ -2,3 +2,4 @@
|
||||
|
||||
include:tld-cn
|
||||
include:geolocation-cn
|
||||
#include:category-non-domestic-cn
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# This list contains domains that don't have access point in China mainland. This is opposite to geolocation-cn.
|
||||
|
||||
# Part of Chinese entities but exclusively serving for non-cn area
|
||||
include:category-non-domestic-cn
|
||||
|
||||
# AI Chat
|
||||
include:category-ai-!cn
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
missav.ai
|
||||
missav.com
|
||||
missav.live
|
||||
missav.uno
|
||||
missav.vip
|
||||
missav.ws
|
||||
missav123.com
|
||||
|
||||
@@ -3,8 +3,6 @@ xhamster.desi
|
||||
xhamster.xxx
|
||||
xhamster18.com
|
||||
xhamster18.desi
|
||||
xhamster19.com
|
||||
xhamster3.com
|
||||
xhamsterlive.com
|
||||
xhcdn.com
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ yandexcom.net
|
||||
yandexmetrica.com
|
||||
yandexwebcache.org
|
||||
yastat.net
|
||||
yastatic-net.ru
|
||||
yastatic.net
|
||||
|
||||
# Watching movies, included in the Yandex subscription
|
||||
|
||||
222
main.go
222
main.go
@@ -22,11 +22,18 @@ var (
|
||||
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
|
||||
)
|
||||
|
||||
var (
|
||||
plMap = make(map[string]*ParsedList)
|
||||
finalMap = make(map[string][]*Entry)
|
||||
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
||||
)
|
||||
|
||||
type Entry struct {
|
||||
Type string
|
||||
Value string
|
||||
Attrs []string
|
||||
Plain string
|
||||
Affs []string
|
||||
}
|
||||
|
||||
type Inclusion struct {
|
||||
@@ -41,12 +48,6 @@ type ParsedList struct {
|
||||
Entries []*Entry
|
||||
}
|
||||
|
||||
type Processor struct {
|
||||
plMap map[string]*ParsedList
|
||||
finalMap map[string][]*Entry
|
||||
cirIncMap map[string]bool
|
||||
}
|
||||
|
||||
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
||||
site := &router.GeoSite{
|
||||
CountryCode: listName,
|
||||
@@ -89,27 +90,29 @@ func writePlainList(listname string, entries []*Entry) error {
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func parseEntry(line string) (*Entry, []string, error) {
|
||||
entry := new(Entry)
|
||||
func parseEntry(line string) (Entry, error) {
|
||||
var entry Entry
|
||||
parts := strings.Fields(line)
|
||||
if len(parts) == 0 {
|
||||
return entry, nil, fmt.Errorf("empty line")
|
||||
return entry, fmt.Errorf("empty line")
|
||||
}
|
||||
|
||||
// Parse type and value
|
||||
typ, val, isTypeSpecified := strings.Cut(parts[0], ":")
|
||||
typ = strings.ToLower(typ)
|
||||
if !isTypeSpecified { // Default RuleType
|
||||
if !validateDomainChars(typ) {
|
||||
return entry, nil, fmt.Errorf("invalid domain: %q", typ)
|
||||
v := parts[0]
|
||||
colonIndex := strings.Index(v, ":")
|
||||
if colonIndex == -1 {
|
||||
entry.Type = dlc.RuleTypeDomain // Default type
|
||||
entry.Value = strings.ToLower(v)
|
||||
if !validateDomainChars(entry.Value) {
|
||||
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
}
|
||||
entry.Type = dlc.RuleTypeDomain
|
||||
entry.Value = typ
|
||||
} else {
|
||||
typ := strings.ToLower(v[:colonIndex])
|
||||
val := v[colonIndex+1:]
|
||||
switch typ {
|
||||
case dlc.RuleTypeRegexp:
|
||||
if _, err := regexp.Compile(val); err != nil {
|
||||
return entry, nil, fmt.Errorf("invalid regexp %q: %w", val, err)
|
||||
return entry, fmt.Errorf("invalid regexp %q: %w", val, err)
|
||||
}
|
||||
entry.Type = dlc.RuleTypeRegexp
|
||||
entry.Value = val
|
||||
@@ -117,60 +120,57 @@ func parseEntry(line string) (*Entry, []string, error) {
|
||||
entry.Type = dlc.RuleTypeInclude
|
||||
entry.Value = strings.ToUpper(val)
|
||||
if !validateSiteName(entry.Value) {
|
||||
return entry, nil, fmt.Errorf("invalid included list name: %q", entry.Value)
|
||||
return entry, fmt.Errorf("invalid include list name: %q", entry.Value)
|
||||
}
|
||||
case dlc.RuleTypeDomain, dlc.RuleTypeFullDomain, dlc.RuleTypeKeyword:
|
||||
entry.Type = typ
|
||||
entry.Value = strings.ToLower(val)
|
||||
if !validateDomainChars(entry.Value) {
|
||||
return entry, nil, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||
}
|
||||
default:
|
||||
return entry, nil, fmt.Errorf("invalid type: %q", typ)
|
||||
return entry, fmt.Errorf("invalid type: %q", typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse attributes and affiliations
|
||||
var affs []string
|
||||
for _, part := range parts[1:] {
|
||||
switch part[0] {
|
||||
case '@':
|
||||
attr := strings.ToLower(part[1:])
|
||||
if strings.HasPrefix(part, "@") {
|
||||
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
||||
if !validateAttrChars(attr) {
|
||||
return entry, affs, fmt.Errorf("invalid attribute: %q", attr)
|
||||
return entry, fmt.Errorf("invalid attribute: %q", attr)
|
||||
}
|
||||
entry.Attrs = append(entry.Attrs, attr)
|
||||
case '&':
|
||||
aff := strings.ToUpper(part[1:])
|
||||
} else if strings.HasPrefix(part, "&") {
|
||||
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
|
||||
if !validateSiteName(aff) {
|
||||
return entry, affs, fmt.Errorf("invalid affiliation: %q", aff)
|
||||
return entry, fmt.Errorf("invalid affiliation: %q", aff)
|
||||
}
|
||||
affs = append(affs, aff)
|
||||
default:
|
||||
return entry, affs, fmt.Errorf("invalid attribute/affiliation: %q", part)
|
||||
entry.Affs = append(entry.Affs, aff)
|
||||
} else {
|
||||
return entry, fmt.Errorf("invalid attribute/affiliation: %q", part)
|
||||
}
|
||||
}
|
||||
// Sort attributes
|
||||
slices.Sort(entry.Attrs)
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
var plain strings.Builder
|
||||
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||
plain.WriteString(entry.Type)
|
||||
plain.WriteByte(':')
|
||||
plain.WriteString(entry.Value)
|
||||
for i, attr := range entry.Attrs {
|
||||
if i == 0 {
|
||||
plain.WriteByte(':')
|
||||
} else {
|
||||
plain.WriteByte(',')
|
||||
}
|
||||
plain.WriteByte('@')
|
||||
plain.WriteString(attr)
|
||||
}
|
||||
entry.Plain = plain.String()
|
||||
|
||||
if entry.Type != dlc.RuleTypeInclude {
|
||||
slices.Sort(entry.Attrs) // Sort attributes
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
var plain strings.Builder
|
||||
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||
plain.WriteString(entry.Type)
|
||||
plain.WriteByte(':')
|
||||
plain.WriteString(entry.Value)
|
||||
for i, attr := range entry.Attrs {
|
||||
if i == 0 {
|
||||
plain.WriteByte(':')
|
||||
} else {
|
||||
plain.WriteByte(',')
|
||||
}
|
||||
plain.WriteByte('@')
|
||||
plain.WriteString(attr)
|
||||
}
|
||||
entry.Plain = plain.String()
|
||||
}
|
||||
return entry, affs, nil
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
func validateDomainChars(domain string) bool {
|
||||
@@ -206,54 +206,62 @@ func validateSiteName(name string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *Processor) getOrCreateParsedList(name string) *ParsedList {
|
||||
pl, exist := p.plMap[name]
|
||||
if !exist {
|
||||
pl = &ParsedList{Name: name}
|
||||
p.plMap[name] = pl
|
||||
}
|
||||
return pl
|
||||
}
|
||||
|
||||
func (p *Processor) loadData(listName string, path string) error {
|
||||
func loadData(path string) ([]*Entry, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
pl := p.getOrCreateParsedList(listName)
|
||||
var entries []*Entry
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineIdx := 0
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
lineIdx++
|
||||
line, _, _ := strings.Cut(scanner.Text(), "#") // Remove comments
|
||||
if idx := strings.Index(line, "#"); idx != -1 {
|
||||
line = line[:idx] // Remove comments
|
||||
}
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
entry, affs, err := parseEntry(line)
|
||||
entry, err := parseEntry(line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||
}
|
||||
entries = append(entries, &entry)
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func parseList(refName string, refList []*Entry) error {
|
||||
pl, _ := plMap[refName]
|
||||
if pl == nil {
|
||||
pl = &ParsedList{Name: refName}
|
||||
plMap[refName] = pl
|
||||
}
|
||||
for _, entry := range refList {
|
||||
if entry.Type == dlc.RuleTypeInclude {
|
||||
if len(entry.Affs) != 0 {
|
||||
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
|
||||
}
|
||||
inc := &Inclusion{Source: entry.Value}
|
||||
for _, attr := range entry.Attrs {
|
||||
if attr[0] == '-' {
|
||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:])
|
||||
if strings.HasPrefix(attr, "-") {
|
||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
|
||||
} else {
|
||||
inc.MustAttrs = append(inc.MustAttrs, attr)
|
||||
}
|
||||
}
|
||||
for _, aff := range affs {
|
||||
apl := p.getOrCreateParsedList(aff)
|
||||
apl.Inclusions = append(apl.Inclusions, inc)
|
||||
}
|
||||
pl.Inclusions = append(pl.Inclusions, inc)
|
||||
} else {
|
||||
for _, aff := range affs {
|
||||
apl := p.getOrCreateParsedList(aff)
|
||||
for _, aff := range entry.Affs {
|
||||
apl, _ := plMap[aff]
|
||||
if apl == nil {
|
||||
apl = &ParsedList{Name: aff}
|
||||
plMap[aff] = apl
|
||||
}
|
||||
apl.Entries = append(apl.Entries, entry)
|
||||
}
|
||||
pl.Entries = append(pl.Entries, entry)
|
||||
@@ -288,7 +296,9 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
domainsMap := make(map[string]bool)
|
||||
for _, entry := range roughMap {
|
||||
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
||||
case dlc.RuleTypeRegexp, dlc.RuleTypeKeyword:
|
||||
case dlc.RuleTypeRegexp:
|
||||
finalList = append(finalList, entry)
|
||||
case dlc.RuleTypeKeyword:
|
||||
finalList = append(finalList, entry)
|
||||
case dlc.RuleTypeDomain:
|
||||
domainsMap[entry.Value] = true
|
||||
@@ -313,11 +323,11 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
pd = "." + pd // So that `domain:example.org` overrides `full:example.org`
|
||||
}
|
||||
for {
|
||||
var hasParent bool
|
||||
_, pd, hasParent = strings.Cut(pd, ".") // Go for next parent
|
||||
if !hasParent {
|
||||
idx := strings.Index(pd, ".")
|
||||
if idx == -1 {
|
||||
break
|
||||
}
|
||||
pd = pd[idx+1:] // Go for next parent
|
||||
if domainsMap[pd] {
|
||||
isRedundant = true
|
||||
break
|
||||
@@ -334,38 +344,36 @@ func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
return finalList
|
||||
}
|
||||
|
||||
func (p *Processor) resolveList(plname string) error {
|
||||
if _, pldone := p.finalMap[plname]; pldone {
|
||||
func resolveList(pl *ParsedList) error {
|
||||
if _, pldone := finalMap[pl.Name]; pldone {
|
||||
return nil
|
||||
}
|
||||
pl, plexist := p.plMap[plname]
|
||||
if !plexist {
|
||||
return fmt.Errorf("list %q not found", plname)
|
||||
|
||||
if cirIncMap[pl.Name] {
|
||||
return fmt.Errorf("circular inclusion in: %q", pl.Name)
|
||||
}
|
||||
if p.cirIncMap[plname] {
|
||||
return fmt.Errorf("circular inclusion in: %q", plname)
|
||||
}
|
||||
p.cirIncMap[plname] = true
|
||||
defer delete(p.cirIncMap, plname)
|
||||
cirIncMap[pl.Name] = true
|
||||
defer delete(cirIncMap, pl.Name)
|
||||
|
||||
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
||||
for _, dentry := range pl.Entries { // Add direct entries
|
||||
roughMap[dentry.Plain] = dentry
|
||||
}
|
||||
for _, inc := range pl.Inclusions {
|
||||
if _, exist := p.plMap[inc.Source]; !exist {
|
||||
return fmt.Errorf("list %q includes a non-existent list: %q", plname, inc.Source)
|
||||
incPl, exist := plMap[inc.Source]
|
||||
if !exist {
|
||||
return fmt.Errorf("list %q includes a non-existent list: %q", pl.Name, inc.Source)
|
||||
}
|
||||
if err := p.resolveList(inc.Source); err != nil {
|
||||
if err := resolveList(incPl); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ientry := range p.finalMap[inc.Source] {
|
||||
for _, ientry := range finalMap[inc.Source] {
|
||||
if isMatchAttrFilters(ientry, inc) { // Add included entries
|
||||
roughMap[ientry.Plain] = ientry
|
||||
}
|
||||
}
|
||||
}
|
||||
p.finalMap[plname] = polishList(roughMap)
|
||||
finalMap[pl.Name] = polishList(roughMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -373,8 +381,8 @@ func run() error {
|
||||
dir := *dataPath
|
||||
fmt.Printf("using domain lists data in %q\n", dir)
|
||||
|
||||
// Generate plMap
|
||||
processor := &Processor{plMap: make(map[string]*ParsedList)}
|
||||
// Generate refMap
|
||||
refMap := make(map[string][]*Entry)
|
||||
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -386,16 +394,23 @@ func run() error {
|
||||
if !validateSiteName(listName) {
|
||||
return fmt.Errorf("invalid list name: %q", listName)
|
||||
}
|
||||
return processor.loadData(listName, path)
|
||||
refMap[listName], err = loadData(path)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to loadData: %w", err)
|
||||
}
|
||||
|
||||
// Generate plMap
|
||||
for refName, refList := range refMap {
|
||||
if err := parseList(refName, refList); err != nil {
|
||||
return fmt.Errorf("failed to parseList %q: %w", refName, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate finalMap
|
||||
processor.finalMap = make(map[string][]*Entry, len(processor.plMap))
|
||||
processor.cirIncMap = make(map[string]bool)
|
||||
for plname := range processor.plMap {
|
||||
if err := processor.resolveList(plname); err != nil {
|
||||
for plname, pl := range plMap {
|
||||
if err := resolveList(pl); err != nil {
|
||||
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
|
||||
}
|
||||
}
|
||||
@@ -404,10 +419,11 @@ func run() error {
|
||||
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create output directory: %w", err)
|
||||
}
|
||||
// Export plaintext lists
|
||||
|
||||
// Export plaintext list
|
||||
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
|
||||
if epList := strings.TrimSpace(rawEpList); epList != "" {
|
||||
entries, exist := processor.finalMap[strings.ToUpper(epList)]
|
||||
entries, exist := finalMap[strings.ToUpper(epList)]
|
||||
if !exist || len(entries) == 0 {
|
||||
fmt.Printf("list %q does not exist or is empty\n", epList)
|
||||
continue
|
||||
@@ -422,7 +438,7 @@ func run() error {
|
||||
|
||||
// Generate dat file
|
||||
protoList := new(router.GeoSiteList)
|
||||
for siteName, siteEntries := range processor.finalMap {
|
||||
for siteName, siteEntries := range finalMap {
|
||||
site, err := makeProtoList(siteName, siteEntries)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
|
||||
|
||||
Reference in New Issue
Block a user