Compare commits

...

9 Commits

Author SHA1 Message Date
MkQtS
4c7afec5a9 category-electronic-cn: remove non-cn domains (#3375)
They were added in `espressif` and included in `geolocation-!cn`.
2026-03-20 10:53:04 +08:00
Jarl-Penguin
330c30eb23 category-ip-geo-detect: Add ip.hetzner.com (#3374)
Signed-off-by: Jarl-Penguin <jarlpenguin@outlook.com>
2026-03-20 10:47:36 +08:00
inf
f34f22819e category-dev: add ziglang.org (#3373) 2026-03-20 10:46:09 +08:00
ir0nmand0
baa1409cfb category-entertainment-ru: add beeline.tv (#3372)
Beeline TV (beeline.tv) is a Russian streaming service by VEON (Beeline).
Movies, TV series, and live TV channels for Russian-speaking audience.

Subdomains (covered by domain match): web-prod, rest, images, static, video.
External deps (mediavitrina.ru, vimpelcom.ru) already in category-ru.

Co-authored-by: Dima Dudukin <dima.dudukin.dev@gmail.com>
2026-03-19 18:12:53 +08:00
MkQtS
a22d247c5a qcloud: comment out useless regexp rules (#3371)
Overrided by other domain type rules, but cannot be optimized automatically.

They are actually useless and only affect performance.
2026-03-19 12:43:29 +08:00
yobarerukoto
d311bbe50b geolocation-cn: add gzyowin.com (#3369) 2026-03-18 19:29:43 +08:00
MkQtS
1db558b165 main.go: support to generate multiple custom dats (#3367)
This allows to remove any unwanted lists without modifying the domains
data, and you can generate multiple custom v2ray dat files in a single
command.

As long as the source data is consistent, any list remains in the trimed
dat contains the same rules comparing to the list in full dat.

Use the new option `datprofile` to specify the config json file path.
`outputname` will be ignored when `datprofile` is set.

Co-authored-by: database64128 <free122448@hotmail.com>
2026-03-18 18:32:05 +08:00
Konstantin
9ee0757263 Add Tilda domains (#3368)
* add tilda

* tilda: add to category-dev
2026-03-18 18:05:54 +08:00
MkQtS
714a061ba3 main.go: improve codes (#3366)
* main.go: improve codes

* main.go: add parseInclusion

- seprate from parseEntry
- not allow affiliation for inclusion
2026-03-18 15:58:47 +08:00
9 changed files with 269 additions and 114 deletions

6
.gitignore vendored
View File

@@ -4,9 +4,9 @@
/domain-list-community /domain-list-community
/domain-list-community.exe /domain-list-community.exe
# Generated dat file. # Generated dat files.
dlc.dat /*.dat
# Exported plaintext lists. # Exported plaintext lists.
/*.yml
/*.txt /*.txt
/*.yml

View File

@@ -54,6 +54,7 @@ include:stackexchange
include:strikingly include:strikingly
include:termux include:termux
include:thelinuxfoundation include:thelinuxfoundation
include:tilda
include:unity include:unity
include:v8 include:v8
@@ -152,4 +153,5 @@ wireshark.org
x.org x.org
xposed.info xposed.info
yarnpkg.com yarnpkg.com
ziglang.org
zsh.org zsh.org

View File

@@ -17,11 +17,10 @@ bouffalolab.com
cxmt.com cxmt.com
# 乐鑫信息科技 # 乐鑫信息科技
#include:espressif
esp8266.cn esp8266.cn
esp8266.com
esp8266.com.cn esp8266.com.cn
espressif.cn espressif.cn
espressif.com
espressif.com.cn espressif.com.cn
# 华秋电子 # 华秋电子

View File

@@ -11,6 +11,7 @@ include:okko
include:wink include:wink
24h.tv 24h.tv
amediateka.ru amediateka.ru
beeline.tv
ivi.ru ivi.ru
premier.one premier.one
smotreshka.tv smotreshka.tv

View File

@@ -134,6 +134,7 @@ full:checkip.amazonaws.com
full:ipv4-check-perf.radar.cloudflare.com full:ipv4-check-perf.radar.cloudflare.com
full:ipv6-check-perf.radar.cloudflare.com full:ipv6-check-perf.radar.cloudflare.com
geoip.noc.gov.ru geoip.noc.gov.ru
ip.hetzner.com
ip.mail.ru ip.mail.ru
ip.nic.ru ip.nic.ru
ip.tyk.nu ip.tyk.nu

View File

@@ -1625,3 +1625,6 @@ ao-x.ac.cn
# 万集科技 京ICP备18036282号-2 # 万集科技 京ICP备18036282号-2
wanji.net.cn wanji.net.cn
# 广州市雅望互联网服务有限公司
gzyowin.com

View File

@@ -258,13 +258,14 @@ tdnsv14.net
tdnsv15.net tdnsv15.net
# myqcloud inside mainland China # myqcloud inside mainland China
regexp:\.(.+-)?ap-beijing(-.+)?\.myqcloud\.com$ #北京 # overrided by myqcloud.com
regexp:\.(.+-)?ap-nanjing(-.+)?\.myqcloud\.com$ # #regexp:\.(.+-)?ap-beijing(-.+)?\.myqcloud\.com$ #
regexp:\.(.+-)?ap-shanghai(-.+)?\.myqcloud\.com$ #上海 #regexp:\.(.+-)?ap-nanjing(-.+)?\.myqcloud\.com$ #南京
regexp:\.(.+-)?ap-guangzhou(-.+)?\.myqcloud\.com$ #广州 #regexp:\.(.+-)?ap-shanghai(-.+)?\.myqcloud\.com$ #上海
regexp:\.(.+-)?ap-chengdu(-.+)?\.myqcloud\.com$ #成都 #regexp:\.(.+-)?ap-guangzhou(-.+)?\.myqcloud\.com$ #广州
regexp:\.(.+-)?ap-chongqing(-.+)?\.myqcloud\.com$ #重庆 #regexp:\.(.+-)?ap-chengdu(-.+)?\.myqcloud\.com$ #成都
regexp:\.(.+-)?ap-shenzhen(-.+)?\.myqcloud\.com$ #深圳 #regexp:\.(.+-)?ap-chongqing(-.+)?\.myqcloud\.com$ #重庆
#regexp:\.(.+-)?ap-shenzhen(-.+)?\.myqcloud\.com$ #深圳
# COS 使用到的非中国大陆的地域与可用区,参见 https://cloud.tencent.com/document/product/436/6224 # COS 使用到的非中国大陆的地域与可用区,参见 https://cloud.tencent.com/document/product/436/6224
ap-hongkong.myqcloud.com @!cn #中国香港 ap-hongkong.myqcloud.com @!cn #中国香港
@@ -282,13 +283,14 @@ eu-frankfurt.myqcloud.com @!cn #法兰克福
eu-moscow.myqcloud.com @!cn #莫斯科 eu-moscow.myqcloud.com @!cn #莫斯科
# tencentcos inside mainland China # tencentcos inside mainland China
regexp:\.(.+-)?ap-beijing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #北京 # overrided by tencentcos.cn, tencentcos.com, tencentcos.com.cn
regexp:\.(.+-)?ap-nanjing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ # #regexp:\.(.+-)?ap-beijing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #
regexp:\.(.+-)?ap-shanghai(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #上海 #regexp:\.(.+-)?ap-nanjing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #南京
regexp:\.(.+-)?ap-guangzhou(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #广州 #regexp:\.(.+-)?ap-shanghai(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #上海
regexp:\.(.+-)?ap-chengdu(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #成都 #regexp:\.(.+-)?ap-guangzhou(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #广州
regexp:\.(.+-)?ap-chongqing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #重庆 #regexp:\.(.+-)?ap-chengdu(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #成都
regexp:\.(.+-)?ap-shenzhen(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #深圳 #regexp:\.(.+-)?ap-chongqing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #重庆
#regexp:\.(.+-)?ap-shenzhen(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #深圳
# tencentcos outside mainland China # tencentcos outside mainland China
# regexp:.+\.ap-hongkong\.tencentcos\.(cn|com(\.cn)?)$ @!cn #中国香港 # regexp:.+\.ap-hongkong\.tencentcos\.(cn|com(\.cn)?)$ @!cn #中国香港

5
data/tilda Normal file
View File

@@ -0,0 +1,5 @@
tilda.cc
tilda.ru
tilda.ws
tildaapi.com
tildacdn.com

332
main.go
View File

@@ -2,6 +2,7 @@ package main
import ( import (
"bufio" "bufio"
"encoding/json"
"flag" "flag"
"fmt" "fmt"
"os" "os"
@@ -19,6 +20,7 @@ var (
dataPath = flag.String("datapath", "./data", "Path to your custom 'data' directory") dataPath = flag.String("datapath", "./data", "Path to your custom 'data' directory")
outputName = flag.String("outputname", "dlc.dat", "Name of the generated dat file") outputName = flag.String("outputname", "dlc.dat", "Name of the generated dat file")
outputDir = flag.String("outputdir", "./", "Directory to place all generated files") outputDir = flag.String("outputdir", "./", "Directory to place all generated files")
datProfile = flag.String("datprofile", "", "Path of config file used to assemble custom dats")
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma") exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
) )
@@ -47,7 +49,24 @@ type Processor struct {
cirIncMap map[string]bool cirIncMap map[string]bool
} }
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) { type GeoSites struct {
Sites []*router.GeoSite
SiteIdx map[string]int
}
type DatTask struct {
Name string `json:"name"`
Mode string `json:"mode"`
Lists []string `json:"lists"`
}
const (
ModeAll string = "all"
ModeAllowlist string = "allowlist"
ModeDenylist string = "denylist"
)
func makeProtoList(listName string, entries []*Entry) *router.GeoSite {
site := &router.GeoSite{ site := &router.GeoSite{
CountryCode: listName, CountryCode: listName,
Domain: make([]*router.Domain, 0, len(entries)), Domain: make([]*router.Domain, 0, len(entries)),
@@ -73,7 +92,91 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
} }
site.Domain = append(site.Domain, pdomain) site.Domain = append(site.Domain, pdomain)
} }
return site, nil return site
}
func loadTasks(path string) ([]DatTask, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
var tasks []DatTask
dec := json.NewDecoder(f)
if err := dec.Decode(&tasks); err != nil {
return nil, fmt.Errorf("failed to decode json: %w", err)
}
for i, t := range tasks {
if t.Name == "" {
return nil, fmt.Errorf("task[%d]: name is required", i)
}
switch t.Mode {
case ModeAll, ModeAllowlist, ModeDenylist:
default:
return nil, fmt.Errorf("task[%d] %q: invalid mode %q", i, t.Name, t.Mode)
}
}
return tasks, nil
}
func (gs *GeoSites) assembleDat(task DatTask) error {
datFileName := strings.ToLower(filepath.Base(task.Name))
geoSiteList := new(router.GeoSiteList)
switch task.Mode {
case ModeAll:
geoSiteList.Entry = gs.Sites
case ModeAllowlist:
allowedIdxes := make([]int, 0, len(task.Lists))
for _, list := range task.Lists {
if idx, ok := gs.SiteIdx[strings.ToUpper(list)]; ok {
allowedIdxes = append(allowedIdxes, idx)
} else {
return fmt.Errorf("list %q not found for allowlist task", list)
}
}
slices.Sort(allowedIdxes)
allowedlen := len(allowedIdxes)
if allowedlen == 0 {
return fmt.Errorf("allowlist needs at least one valid list")
}
geoSiteList.Entry = make([]*router.GeoSite, allowedlen)
for i, idx := range allowedIdxes {
geoSiteList.Entry[i] = gs.Sites[idx]
}
case ModeDenylist:
deniedMap := make(map[int]bool, len(task.Lists))
for _, list := range task.Lists {
if idx, ok := gs.SiteIdx[strings.ToUpper(list)]; ok {
deniedMap[idx] = true
} else {
fmt.Printf("[Warn] list %q not found in denylist task %q", list, task.Name)
}
}
deniedlen := len(deniedMap)
if deniedlen == 0 {
fmt.Printf("[Warn] nothing to deny in task %q", task.Name)
geoSiteList.Entry = gs.Sites
} else {
geoSiteList.Entry = make([]*router.GeoSite, 0, len(gs.Sites)-deniedlen)
for i, site := range gs.Sites {
if !deniedMap[i] {
geoSiteList.Entry = append(geoSiteList.Entry, site)
}
}
}
}
protoBytes, err := proto.Marshal(geoSiteList)
if err != nil {
return fmt.Errorf("failed to marshal: %w", err)
}
if err := os.WriteFile(filepath.Join(*outputDir, datFileName), protoBytes, 0644); err != nil {
return fmt.Errorf("failed to write file %q: %w", datFileName, err)
}
fmt.Printf("dat %q has been generated successfully\n", datFileName)
return nil
} }
func writePlainList(listname string, entries []*Entry) error { func writePlainList(listname string, entries []*Entry) error {
@@ -89,46 +192,28 @@ func writePlainList(listname string, entries []*Entry) error {
return w.Flush() return w.Flush()
} }
func parseEntry(line string) (*Entry, []string, error) { func parseEntry(typ, rule string) (*Entry, []string, error) {
entry := new(Entry) entry := &Entry{Type: typ}
parts := strings.Fields(line) parts := strings.Fields(rule)
if len(parts) == 0 { if len(parts) == 0 {
return entry, nil, fmt.Errorf("empty line") return entry, nil, fmt.Errorf("empty domain rule")
} }
// Parse value
// Parse type and value switch entry.Type {
typ, val, isTypeSpecified := strings.Cut(parts[0], ":") case dlc.RuleTypeRegexp:
typ = strings.ToLower(typ) if _, err := regexp.Compile(parts[0]); err != nil {
if !isTypeSpecified { // Default RuleType return entry, nil, fmt.Errorf("invalid regexp %q: %w", parts[0], err)
if !validateDomainChars(typ) {
return entry, nil, fmt.Errorf("invalid domain: %q", typ)
} }
entry.Type = dlc.RuleTypeDomain entry.Value = parts[0]
entry.Value = typ case dlc.RuleTypeDomain, dlc.RuleTypeFullDomain, dlc.RuleTypeKeyword:
} else { entry.Value = strings.ToLower(parts[0])
switch typ { if !validateDomainChars(entry.Value) {
case dlc.RuleTypeRegexp: return entry, nil, fmt.Errorf("invalid domain: %q", entry.Value)
if _, err := regexp.Compile(val); err != nil {
return entry, nil, fmt.Errorf("invalid regexp %q: %w", val, err)
}
entry.Type = dlc.RuleTypeRegexp
entry.Value = val
case dlc.RuleTypeInclude:
entry.Type = dlc.RuleTypeInclude
entry.Value = strings.ToUpper(val)
if !validateSiteName(entry.Value) {
return entry, nil, fmt.Errorf("invalid included list name: %q", entry.Value)
}
case dlc.RuleTypeDomain, dlc.RuleTypeFullDomain, dlc.RuleTypeKeyword:
entry.Type = typ
entry.Value = strings.ToLower(val)
if !validateDomainChars(entry.Value) {
return entry, nil, fmt.Errorf("invalid domain: %q", entry.Value)
}
default:
return entry, nil, fmt.Errorf("invalid type: %q", typ)
} }
default:
return entry, nil, fmt.Errorf("unknown rule type: %q", entry.Type)
} }
plen := len(entry.Type) + len(entry.Value) + 1
// Parse attributes and affiliations // Parse attributes and affiliations
var affs []string var affs []string
@@ -140,6 +225,7 @@ func parseEntry(line string) (*Entry, []string, error) {
return entry, affs, fmt.Errorf("invalid attribute: %q", attr) return entry, affs, fmt.Errorf("invalid attribute: %q", attr)
} }
entry.Attrs = append(entry.Attrs, attr) entry.Attrs = append(entry.Attrs, attr)
plen += 2 + len(attr)
case '&': case '&':
aff := strings.ToUpper(part[1:]) aff := strings.ToUpper(part[1:])
if !validateSiteName(aff) { if !validateSiteName(aff) {
@@ -147,33 +233,70 @@ func parseEntry(line string) (*Entry, []string, error) {
} }
affs = append(affs, aff) affs = append(affs, aff)
default: default:
return entry, affs, fmt.Errorf("invalid attribute/affiliation: %q", part) return entry, affs, fmt.Errorf("unknown field: %q", part)
} }
} }
if entry.Type != dlc.RuleTypeInclude { slices.Sort(entry.Attrs) // Sort attributes
slices.Sort(entry.Attrs) // Sort attributes // Formated plain entry: type:domain.tld:@attr1,@attr2
// Formated plain entry: type:domain.tld:@attr1,@attr2 var plain strings.Builder
var plain strings.Builder plain.Grow(plen)
plain.Grow(len(entry.Type) + len(entry.Value) + 10) plain.WriteString(entry.Type)
plain.WriteString(entry.Type) plain.WriteByte(':')
plain.WriteByte(':') plain.WriteString(entry.Value)
plain.WriteString(entry.Value) for i, attr := range entry.Attrs {
for i, attr := range entry.Attrs { if i == 0 {
if i == 0 { plain.WriteByte(':')
plain.WriteByte(':') } else {
} else { plain.WriteByte(',')
plain.WriteByte(',')
}
plain.WriteByte('@')
plain.WriteString(attr)
} }
entry.Plain = plain.String() plain.WriteByte('@')
plain.WriteString(attr)
} }
entry.Plain = plain.String()
return entry, affs, nil return entry, affs, nil
} }
func parseInclusion(rule string) (*Inclusion, error) {
parts := strings.Fields(rule)
if len(parts) == 0 {
return nil, fmt.Errorf("empty inclusion")
}
inc := &Inclusion{Source: strings.ToUpper(parts[0])}
if !validateSiteName(inc.Source) {
return inc, fmt.Errorf("invalid included list name: %q", inc.Source)
}
// Parse attributes
for _, part := range parts[1:] {
switch part[0] {
case '@':
attr := strings.ToLower(part[1:])
if attr[0] == '-' {
battr := attr[1:]
if !validateAttrChars(battr) {
return inc, fmt.Errorf("invalid ban attribute: %q", battr)
}
inc.BanAttrs = append(inc.BanAttrs, battr)
} else {
if !validateAttrChars(attr) {
return inc, fmt.Errorf("invalid must attribute: %q", attr)
}
inc.MustAttrs = append(inc.MustAttrs, attr)
}
case '&':
return inc, fmt.Errorf("affiliation is not allowed for inclusion")
default:
return inc, fmt.Errorf("unknown field: %q", part)
}
}
return inc, nil
}
func validateDomainChars(domain string) bool { func validateDomainChars(domain string) bool {
if domain == "" {
return false
}
for i := range domain { for i := range domain {
c := domain[i] c := domain[i]
if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '.' || c == '-' { if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '.' || c == '-' {
@@ -185,9 +308,12 @@ func validateDomainChars(domain string) bool {
} }
func validateAttrChars(attr string) bool { func validateAttrChars(attr string) bool {
if attr == "" {
return false
}
for i := range attr { for i := range attr {
c := attr[i] c := attr[i]
if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '!' || c == '-' { if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '!' {
continue continue
} }
return false return false
@@ -196,6 +322,9 @@ func validateAttrChars(attr string) bool {
} }
func validateSiteName(name string) bool { func validateSiteName(name string) bool {
if name == "" {
return false
}
for i := range name { for i := range name {
c := name[i] c := name[i]
if (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '!' || c == '-' { if (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '!' || c == '-' {
@@ -232,26 +361,23 @@ func (p *Processor) loadData(listName string, path string) error {
if line == "" { if line == "" {
continue continue
} }
entry, affs, err := parseEntry(line) typ, rule, isTypeSpecified := strings.Cut(line, ":")
if err != nil { if !isTypeSpecified { // Default RuleType
return fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err) typ, rule = dlc.RuleTypeDomain, typ
} else {
typ = strings.ToLower(typ)
} }
if typ == dlc.RuleTypeInclude {
if entry.Type == dlc.RuleTypeInclude { inc, err := parseInclusion(rule)
inc := &Inclusion{Source: entry.Value} if err != nil {
for _, attr := range entry.Attrs { return fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
if attr[0] == '-' {
inc.BanAttrs = append(inc.BanAttrs, attr[1:])
} else {
inc.MustAttrs = append(inc.MustAttrs, attr)
}
}
for _, aff := range affs {
apl := p.getOrCreateParsedList(aff)
apl.Inclusions = append(apl.Inclusions, inc)
} }
pl.Inclusions = append(pl.Inclusions, inc) pl.Inclusions = append(pl.Inclusions, inc)
} else { } else {
entry, affs, err := parseEntry(typ, rule)
if err != nil {
return fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
}
for _, aff := range affs { for _, aff := range affs {
apl := p.getOrCreateParsedList(aff) apl := p.getOrCreateParsedList(aff)
apl.Entries = append(apl.Entries, entry) apl.Entries = append(apl.Entries, entry)
@@ -259,7 +385,7 @@ func (p *Processor) loadData(listName string, path string) error {
pl.Entries = append(pl.Entries, entry) pl.Entries = append(pl.Entries, entry)
} }
} }
return nil return scanner.Err()
} }
func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool { func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool {
@@ -360,6 +486,9 @@ func (p *Processor) resolveList(plname string) error {
} }
} }
} }
if len(roughMap) == 0 {
return fmt.Errorf("empty list")
}
p.finalMap[plname] = polishList(roughMap) p.finalMap[plname] = polishList(roughMap)
return nil return nil
} }
@@ -387,13 +516,15 @@ func run() error {
return fmt.Errorf("failed to loadData: %w", err) return fmt.Errorf("failed to loadData: %w", err)
} }
// Generate finalMap // Generate finalMap
processor.finalMap = make(map[string][]*Entry, len(processor.plMap)) sitesCount := len(processor.plMap)
processor.finalMap = make(map[string][]*Entry, sitesCount)
processor.cirIncMap = make(map[string]bool) processor.cirIncMap = make(map[string]bool)
for plname := range processor.plMap { for plname := range processor.plMap {
if err := processor.resolveList(plname); err != nil { if err := processor.resolveList(plname); err != nil {
return fmt.Errorf("failed to resolveList %q: %w", plname, err) return fmt.Errorf("failed to resolveList %q: %w", plname, err)
} }
} }
processor.plMap = nil
// Make sure output directory exists // Make sure output directory exists
if err := os.MkdirAll(*outputDir, 0755); err != nil { if err := os.MkdirAll(*outputDir, 0755); err != nil {
@@ -403,47 +534,58 @@ func run() error {
for rawEpList := range strings.SplitSeq(*exportLists, ",") { for rawEpList := range strings.SplitSeq(*exportLists, ",") {
if epList := strings.TrimSpace(rawEpList); epList != "" { if epList := strings.TrimSpace(rawEpList); epList != "" {
entries, exist := processor.finalMap[strings.ToUpper(epList)] entries, exist := processor.finalMap[strings.ToUpper(epList)]
if !exist || len(entries) == 0 { if !exist {
fmt.Printf("list %q does not exist or is empty\n", epList) fmt.Printf("[Warn] list %q does not exist\n", epList)
continue continue
} }
if err := writePlainList(epList, entries); err != nil { if err := writePlainList(epList, entries); err != nil {
fmt.Printf("failed to write list %q: %v\n", epList, err) fmt.Printf("[Error] failed to write list %q: %v\n", epList, err)
continue continue
} }
fmt.Printf("list %q has been generated successfully.\n", epList) fmt.Printf("list %q has been generated successfully\n", epList)
} }
} }
// Generate dat file // Generate proto sites
protoList := new(router.GeoSiteList) gs := &GeoSites{
for siteName, siteEntries := range processor.finalMap { Sites: make([]*router.GeoSite, 0, sitesCount),
site, err := makeProtoList(siteName, siteEntries) SiteIdx: make(map[string]int, sitesCount),
if err != nil {
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
}
protoList.Entry = append(protoList.Entry, site)
} }
// Sort protoList so the marshaled list is reproducible for siteName, siteEntries := range processor.finalMap {
slices.SortFunc(protoList.Entry, func(a, b *router.GeoSite) int { gs.Sites = append(gs.Sites, makeProtoList(siteName, siteEntries))
}
processor = nil
// Sort proto sites so the generated file is reproducible
slices.SortFunc(gs.Sites, func(a, b *router.GeoSite) int {
return strings.Compare(a.CountryCode, b.CountryCode) return strings.Compare(a.CountryCode, b.CountryCode)
}) })
for i := range sitesCount {
gs.SiteIdx[gs.Sites[i].CountryCode] = i
}
protoBytes, err := proto.Marshal(protoList) // Load tasks and generate dat files
if err != nil { var tasks []DatTask
return fmt.Errorf("failed to marshal: %w", err) if *datProfile == "" {
tasks = []DatTask{{Name: *outputName, Mode: ModeAll}}
} else {
var err error
tasks, err = loadTasks(*datProfile)
if err != nil {
return fmt.Errorf("failed to loadTasks %q: %v", *datProfile, err)
}
} }
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil { for _, task := range tasks {
return fmt.Errorf("failed to write output: %w", err) if err := gs.assembleDat(task); err != nil {
fmt.Printf("[Error] failed to assembleDat %q: %v", task.Name, err)
}
} }
fmt.Printf("%q has been generated successfully.\n", *outputName)
return nil return nil
} }
func main() { func main() {
flag.Parse() flag.Parse()
if err := run(); err != nil { if err := run(); err != nil {
fmt.Printf("Fatal error: %v\n", err) fmt.Printf("[Fatal] critical error: %v\n", err)
os.Exit(1) os.Exit(1)
} }
} }