Compare commits

..

4 Commits

Author SHA1 Message Date
MkQtS
488ee0334e cloudflare: add more domains (#3169)
encryptedsni.com
from https://blog.cloudflare.com/encrypt-that-sni-firefox-edition/

cloudflarechallenge.com
from https://blog.cloudflare.com/introducing-cryptographic-attestation-of-personhood/

cloudflareresearch.com
from https://blog.cloudflare.com/experiment-with-pq/

browser.run
cfdata.org
cloudflarebrowser.com
cloudflarecp.com
from https://developers.cloudflare.com/cloudflare-one/traffic-policies/global-policies/

cloudflare-terms-of-service-abuse.com
cloudflare.dev
cloudflaresupport.com
cloudflareworkers.com
from public information
2026-01-12 13:32:25 +08:00
深鸣
1b63c69d76 Add more non-cn domains (#3167) 2026-01-12 13:04:45 +08:00
深鸣
6dee1594d4 Add more cn domains (#3166) 2026-01-12 12:59:26 +08:00
深鸣
49cbfc60e5 Add more ad domains (#3168) 2026-01-12 12:23:58 +08:00
12 changed files with 315 additions and 308 deletions

View File

@@ -202,3 +202,7 @@ zhugeio.com
# 车来了
atrace.chelaile.net.cn
logs.chelaile.net.cn
# 航旅纵横
analytics.umetrip.com
sensors.umetrip.com.cn

View File

@@ -1,43 +1,37 @@
# 远程桌面/远程组网
## 连连控
asklink.com
## EasyTier
easytier.cn
## Oray
oray.com
oray.net
orayer.com
orayimg.com
sunlogin.net
## 叮当猫脚本管理系统 懒人精灵远程调试
privateapi.xyz
## ToDesk
todesk.cn
todesk.com
## 连连控
asklink.com
## 叮当猫脚本管理系统 懒人精灵远程调试
privateapi.xyz
# xEdge干将互联
## xEdge干将互联
include:xedge
# 文档协作
## Tower 团队协作
tower.im
## 秀米编辑器
tritoninfo.net
xiumi.us
xiumius.com
## 语雀
nlark.com
yuque.com
## 石墨文档
shimo.im
shimonote.com
smgv.cn
## 语雀
nlark.com
yuque.com
## Tower 团队协作
tower.im
## 秀米编辑器
tritoninfo.net
xiumi.us
xiumius.com
## 吾道
woodo.cn

View File

@@ -140,6 +140,7 @@ shellcheck.net
shields.io
sqlite.org
sublimetext.com
tampermonkey.net
termius.com
unpkg.com
videojs.com

View File

@@ -104,6 +104,9 @@ bag.itunes.apple.com
bookeeper.itunes.apple.com
# Entertainment oriented media
# This section references the "Source Considerations" page on the Chinese Wikipedia:
# - https://zh.wikipedia.org/wiki/WikiProject:电子游戏/来源考量
# - https://zh.wikipedia.org/wiki/WikiProject:ACG/來源考量
## 4Gamer.net
4gamer.net
## 4Gamers
@@ -116,20 +119,28 @@ appget.com
appmedia.jp
## Automaton
automaton-media.com
## 橙心社
cxacg.com
## 电faminicogamer
denfaminicogamer.jp
## ASCII Media Works
dengekionline.com
## E-ROAD
eroge-road.com
## esports.gg
esports.gg
## ファミ通
famitsu.com
## GameApps.HK
gameapps.hk
## 遊戲基地
gamebase.com.tw
## IID, Inc.
gamebusiness.jp
gamespark.jp
inside-games.jp
## Game Focus
gamefocus.co.kr
## GameMeca
gamemeca.com
## Gameover有機網
@@ -141,6 +152,8 @@ onlinegamer.jp
greatgame.asia
## HobbiGame
hobbigame.com
## ゲーム文化保存研究所
igcc.jp
## IGN
ign.com
ignimg.com
@@ -152,18 +165,30 @@ mediaclip.jp
maedahiroyuki.com
## MANTANWEB
mantan-web.jp
## モエデジ
moedigi.com
## Moepedia
moepedia.net
## マイナビニュース
mynavi.jp
## Openbook阅读通
openbook.org.tw
## QooApp
qoo-app.com
## Real Sound
realsound.jp
## れポたま!
repotama.com
## Saiga NAK
saiganak.com
## SQOOL
sqool.net
## The Games Daily
tgdaily.co.kr
## Thisisgame
thisisgame.com
## 玩具人
toy-people.com
## Wanuxi
wanuxi.com
## よろず〜

View File

@@ -109,6 +109,9 @@ zhulang.com
zongheng.com
# 娱乐资讯媒体
# 该部分参考了中文维基百科的「来源考量」:
# - https://zh.wikipedia.org/wiki/WikiProject:电子游戏/来源考量
# - https://zh.wikipedia.org/wiki/WikiProject:ACG/來源考量
include:tgbus
include:vgtime
@@ -143,11 +146,15 @@ ign.com.cn
nadianshi.com
## 游戏日报
news.yxrb.net
## 手谈姬
shoutanjjj.com
## 游戏机实用技术
ucg.cn
## 游戏茶馆
youxichaguan.com
## 游戏葡萄
youxiputao.com
## 游戏陀螺
youxituoluo.com
## 游研社
yystv.cn

View File

@@ -112,6 +112,7 @@ hkej.com
hkgpao.com
hongkongfp.com
inmediahk.net
inquirer.net
inside.com.tw
itmedia.co.jp
jfengtime.com
@@ -120,6 +121,7 @@ limedia.tw
localpresshk.com
ltsports.com.tw
macaodaily.com
maidonanews.jp
mdnkids.com
mirrormedia.com.tw
mirrormedia.mg

View File

@@ -2,33 +2,44 @@ include:cloudflare-cn
include:cloudflare-ipfs
argotunnel.com
browser.run
cfargotunnel.com
cfdata.org
cfl.re
cloudflare-dns.com
cloudflare-ech.com
cloudflare-esni.com
cloudflare-gateway.com
cloudflare-quic.com
cloudflare-terms-of-service-abuse.com
cloudflare.com
cloudflare.dev
cloudflare.net
cloudflare.tv
cloudflareaccess.com
cloudflareapps.com
cloudflarebolt.com
cloudflarebrowser.com
cloudflarechallenge.com
cloudflareclient.com
cloudflarecp.com
cloudflareinsights.com
cloudflareok.com
cloudflarepartners.com
cloudflareportal.com
cloudflarepreview.com
cloudflareregistrar.com
cloudflareresearch.com
cloudflareresolve.com
cloudflaressl.com
cloudflarestatus.com
cloudflarestorage.com
cloudflarestream.com
cloudflaresupport.com
cloudflaretest.com
cloudflarewarp.com
cloudflareworkers.com
encryptedsni.com
every1dns.net
imagedelivery.net
isbgpsafeyet.com

View File

@@ -230,6 +230,8 @@ xiamenair.com # 厦门航空
12306.cn
95306.cn
ccrgt.com
## 北京市政交通一卡通
bmac.com.cn
## 车来了
chelaile.net.cn
## 跨境巴士

View File

@@ -1,3 +1,4 @@
c.sayhi.360.cn @ads
fenxi.360.cn @ads
fenxi.com @ads
lianmeng.360.cn @ads

View File

@@ -79,3 +79,14 @@ fymallqa3.com
fymallqa4.com
fymallqa7.com
fymallqa9.com
tdnsdl1.cn
tdnsdl1.com.cn
tdnsdl2.cn
tdnsdl2.com.cn
tdnsdl3.cn
tdnsdl3.com.cn
tdnsdl4.cn
tdnsdl4.com.cn
tdnsdl5.cn
tdnsdl5.com.cn

View File

@@ -1,6 +1,7 @@
ad.browser.qq.com @ads
ad.qq.com @ads
ad.qun.qq.com @ads
ad.tencentmusic.com @ads
ad.weixin.qq.com @ads
adfilter.imtt.qq.com @ads
adnet.qq.com @ads
@@ -42,6 +43,7 @@ pmir.3g.qq.com @ads
push.qq.com @ads
qqdata.ab.qq.com @ads
report.qqweb.qq.com @ads
report.tencentmusic.com @ads
rmonitor.qq.com @ads
sngmta.qq.com @ads
stat.y.qq.com @ads

519
main.go
View File

@@ -29,31 +29,10 @@ const (
RuleTypeInclude string = "include"
)
var (
TypeChecker = regexp.MustCompile(`^(domain|full|keyword|regexp|include)$`)
ValueChecker = regexp.MustCompile(`^[a-z0-9!\.-]+$`)
AttrChecker = regexp.MustCompile(`^[a-z0-9!-]+$`)
SiteChecker = regexp.MustCompile(`^[A-Z0-9!-]+$`)
)
var (
refMap = make(map[string]*List)
plMap = make(map[string]*ParsedList)
finalMap = make(map[string][]Entry)
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
)
type Entry struct {
Type string
Value string
Attrs []string
Affs []string
}
type Inclusion struct {
Source string
MustAttrs []string
BanAttrs []string
Attrs []*router.Domain_Attribute
}
type List struct {
@@ -62,116 +41,150 @@ type List struct {
}
type ParsedList struct {
Name string
Inclusions []Inclusion
Entry []Entry
Name string
Inclusion map[string]bool
Entry []Entry
}
func makeProtoList(listName string, entries *[]Entry) (*router.GeoSite, error) {
site := &router.GeoSite{
CountryCode: listName,
Domain: make([]*router.Domain, 0, len(*entries)),
}
for _, entry := range *entries {
pdomain := &router.Domain{Value: entry.Value}
for _, attr := range entry.Attrs {
pdomain.Attribute = append(pdomain.Attribute, &router.Domain_Attribute{
Key: attr,
TypedValue: &router.Domain_Attribute_BoolValue{BoolValue: true},
})
func (l *ParsedList) toPlainText(listName string) error {
var entryBytes []byte
for _, entry := range l.Entry {
var attrString string
if entry.Attrs != nil {
for _, attr := range entry.Attrs {
attrString += "@" + attr.GetKey() + ","
}
attrString = strings.TrimRight(":"+attrString, ",")
}
// Entry output format is: type:domain.tld:@attr1,@attr2
entryBytes = append(entryBytes, []byte(entry.Type+":"+entry.Value+attrString+"\n")...)
}
if err := os.WriteFile(filepath.Join(*outputDir, listName+".txt"), entryBytes, 0644); err != nil {
return err
}
return nil
}
func (l *ParsedList) toProto() (*router.GeoSite, error) {
site := &router.GeoSite{
CountryCode: l.Name,
}
for _, entry := range l.Entry {
switch entry.Type {
case RuleTypeDomain:
pdomain.Type = router.Domain_RootDomain
site.Domain = append(site.Domain, &router.Domain{
Type: router.Domain_RootDomain,
Value: entry.Value,
Attribute: entry.Attrs,
})
case RuleTypeRegexp:
pdomain.Type = router.Domain_Regex
// check regexp validity to avoid runtime error
_, err := regexp.Compile(entry.Value)
if err != nil {
return nil, fmt.Errorf("invalid regexp in list %s: %s", l.Name, entry.Value)
}
site.Domain = append(site.Domain, &router.Domain{
Type: router.Domain_Regex,
Value: entry.Value,
Attribute: entry.Attrs,
})
case RuleTypeKeyword:
pdomain.Type = router.Domain_Plain
site.Domain = append(site.Domain, &router.Domain{
Type: router.Domain_Plain,
Value: entry.Value,
Attribute: entry.Attrs,
})
case RuleTypeFullDomain:
pdomain.Type = router.Domain_Full
site.Domain = append(site.Domain, &router.Domain{
Type: router.Domain_Full,
Value: entry.Value,
Attribute: entry.Attrs,
})
default:
return nil, fmt.Errorf("unknown domain type: %s", entry.Type)
}
site.Domain = append(site.Domain, pdomain)
}
return site, nil
}
func writePlainList(exportedName string) error {
targetList, exist := finalMap[strings.ToUpper(exportedName)]
if !exist || len(targetList) == 0 {
return fmt.Errorf("'%s' list does not exist or is empty.", exportedName)
}
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName) + ".txt"))
if err != nil {
return err
}
defer file.Close()
w := bufio.NewWriter(file)
for _, entry := range targetList {
// Entry output format is: type:domain.tld:@attr1,@attr2
var attrString string
if entry.Attrs != nil {
attrString = ":@" + strings.Join(entry.Attrs, ",@")
func exportPlainTextList(list []string, refName string, pl *ParsedList) {
for _, listName := range list {
if strings.EqualFold(refName, listName) {
if err := pl.toPlainText(strings.ToLower(refName)); err != nil {
fmt.Println("Failed:", err)
continue
}
fmt.Printf("'%s' has been generated successfully.\n", listName)
}
fmt.Fprintln(w, entry.Type + ":" + entry.Value + attrString)
}
return w.Flush()
}
func parseEntry(line string) (Entry, error) {
var entry Entry
parts := strings.Fields(line)
func removeComment(line string) string {
idx := strings.Index(line, "#")
if idx == -1 {
return line
}
return strings.TrimSpace(line[:idx])
}
// Parse type and value
rawTypeVal := parts[0]
kv := strings.Split(rawTypeVal, ":")
func parseDomain(domain string, entry *Entry) error {
kv := strings.Split(domain, ":")
if len(kv) == 1 {
entry.Type = RuleTypeDomain // Default type
entry.Value = strings.ToLower(rawTypeVal)
} else if len(kv) == 2 {
entry.Type = RuleTypeDomain
entry.Value = strings.ToLower(kv[0])
return nil
}
if len(kv) == 2 {
entry.Type = strings.ToLower(kv[0])
if entry.Type == RuleTypeRegexp {
if strings.EqualFold(entry.Type, RuleTypeRegexp) {
entry.Value = kv[1]
} else {
entry.Value = strings.ToLower(kv[1])
}
} else {
return entry, fmt.Errorf("invalid format: %s", line)
}
// Check type and value
if !TypeChecker.MatchString(entry.Type) {
return entry, fmt.Errorf("invalid type: %s", entry.Type)
}
if entry.Type == RuleTypeRegexp {
if _, err := regexp.Compile(entry.Value); err != nil {
return entry, fmt.Errorf("invalid regexp: %s", entry.Value)
}
} else if !ValueChecker.MatchString(entry.Value) {
return entry, fmt.Errorf("invalid value: %s", entry.Value)
return nil
}
// Parse/Check attributes and affiliations
for _, part := range parts[1:] {
if strings.HasPrefix(part, "@") {
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
if !AttrChecker.MatchString(attr) {
return entry, fmt.Errorf("invalid attribute key: %s", attr)
}
entry.Attrs = append(entry.Attrs, attr)
} else if strings.HasPrefix(part, "&") {
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
if !SiteChecker.MatchString(aff) {
return entry, fmt.Errorf("invalid affiliation key: %s", aff)
}
entry.Affs = append(entry.Affs, aff)
} else {
return entry, fmt.Errorf("invalid attribute/affiliation: %s", part)
}
return fmt.Errorf("invalid format: %s", domain)
}
func parseAttribute(attr string) (*router.Domain_Attribute, error) {
var attribute router.Domain_Attribute
if len(attr) == 0 || attr[0] != '@' {
return &attribute, fmt.Errorf("invalid attribute: %s", attr)
}
attribute.Key = strings.ToLower(attr[1:]) // Trim attribute prefix `@` character
attribute.TypedValue = &router.Domain_Attribute_BoolValue{BoolValue: true}
return &attribute, nil
}
func parseEntry(line string) (Entry, error) {
line = strings.TrimSpace(line)
parts := strings.Split(line, " ")
var entry Entry
if len(parts) == 0 {
return entry, fmt.Errorf("empty entry")
}
if err := parseDomain(parts[0], &entry); err != nil {
return entry, err
}
for i := 1; i < len(parts); i++ {
attr, err := parseAttribute(parts[i])
if err != nil {
return entry, err
}
entry.Attrs = append(entry.Attrs, attr)
}
// Sort attributes
sort.Slice(entry.Attrs, func(i, j int) bool {
return entry.Attrs[i] < entry.Attrs[j]
})
return entry, nil
}
@@ -183,20 +196,14 @@ func Load(path string) (*List, error) {
}
defer file.Close()
listName := strings.ToUpper(filepath.Base(path))
if !SiteChecker.MatchString(listName) {
return nil, fmt.Errorf("invalid list name: %s", listName)
list := &List{
Name: strings.ToUpper(filepath.Base(path)),
}
list := &List{Name: listName}
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
// Remove comments
if idx := strings.Index(line, "#"); idx != -1 {
line = line[:idx]
}
line = strings.TrimSpace(line)
if line == "" {
line := strings.TrimSpace(scanner.Text())
line = removeComment(line)
if len(line) == 0 {
continue
}
entry, err := parseEntry(line)
@@ -209,158 +216,99 @@ func Load(path string) (*List, error) {
return list, nil
}
func ParseList(refList *List) error {
pl := plMap[refList.Name]
if pl == nil {
pl = &ParsedList{Name: refList.Name}
plMap[refList.Name] = pl
func isMatchAttr(Attrs []*router.Domain_Attribute, includeKey string) bool {
isMatch := false
mustMatch := true
matchName := includeKey
if strings.HasPrefix(includeKey, "!") {
isMatch = true
mustMatch = false
matchName = strings.TrimLeft(includeKey, "!")
}
for _, entry := range refList.Entry {
if entry.Type == RuleTypeInclude {
if len(entry.Affs) != 0 {
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
for _, Attr := range Attrs {
attrName := Attr.Key
if mustMatch {
if matchName == attrName {
isMatch = true
break
}
inc := Inclusion{Source: strings.ToUpper(entry.Value)}
for _, attr := range entry.Attrs {
if strings.HasPrefix(attr, "-") {
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
} else {
inc.MustAttrs = append(inc.MustAttrs, attr)
}
}
pl.Inclusions = append(pl.Inclusions, inc)
} else {
if len(entry.Affs) != 0 {
for _, aff := range entry.Affs {
apl := plMap[aff]
if apl == nil {
apl = &ParsedList{Name: aff}
plMap[aff] = apl
}
apl.Entry = append(apl.Entry, entry)
}
}
pl.Entry = append(pl.Entry, entry)
}
}
return nil
}
func polishList(rl *[]Entry) []Entry {
// Remove basic duplicates
pendingList := make([]Entry, 0, len(*rl)) // Exactly same entries removed
entry2String := func(e Entry) string { // Attributes already sorted
return e.Type + ":" + e.Value + "@" + strings.Join(e.Attrs, "@")
}
bscDupMap := make(map[string]bool)
for _, entry := range *rl {
if estring := entry2String(entry); !bscDupMap[estring] {
bscDupMap[estring] = true
pendingList = append(pendingList, entry)
}
}
finalList := make([]Entry, 0, len(pendingList))
queuingList := make([]Entry, 0, len(pendingList)) // Domain/full entries without attr
domainsMap := make(map[string]bool)
for _, entry := range pendingList {
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
case RuleTypeRegexp:
finalList = append(finalList, entry)
case RuleTypeKeyword:
finalList = append(finalList, entry)
case RuleTypeDomain:
domainsMap[entry.Value] = true
if len(entry.Attrs) != 0 {
finalList = append(finalList, entry)
} else {
queuingList = append(queuingList, entry)
}
case RuleTypeFullDomain:
if len(entry.Attrs) != 0 {
finalList = append(finalList, entry)
} else {
queuingList = append(queuingList, entry)
}
}
}
// Remove redundant subdomains for full/domain without attr
for _, qentry := range queuingList {
parts := strings.Split(qentry.Value, ".")
isRedundant := false
for i := 1; i < len(parts) - 1 ; i++ {
// Not check parent for level2 "name.tld" domain / tld will not become a parent
parentdomain := strings.Join(parts[i:], ".")
if domainsMap[parentdomain] {
isRedundant = true
if matchName == attrName {
isMatch = false
break
}
}
if !isRedundant {
finalList = append(finalList, qentry)
}
}
// Sort final entries
sort.Slice(finalList, func(i, j int) bool {
if finalList[i].Type != finalList[j].Type {
return finalList[i].Type < finalList[j].Type
}
if finalList[i].Value != finalList[j].Value {
return finalList[i].Value < finalList[j].Value
}
// Ideally, the comparison here will not be triggered by source data
return strings.Join(finalList[i].Attrs, ",") < strings.Join(finalList[j].Attrs, ",")
})
return finalList
return isMatch
}
func ResolveList(pl *ParsedList) error {
if _, pldone := finalMap[pl.Name]; pldone { return nil }
if cirIncMap[pl.Name] {
return fmt.Errorf("circular inclusion in: %s", pl.Name)
func createIncludeAttrEntrys(list *List, matchAttr *router.Domain_Attribute) []Entry {
newEntryList := make([]Entry, 0, len(list.Entry))
matchName := matchAttr.Key
for _, entry := range list.Entry {
matched := isMatchAttr(entry.Attrs, matchName)
if matched {
newEntryList = append(newEntryList, entry)
}
}
cirIncMap[pl.Name] = true
defer delete(cirIncMap, pl.Name)
return newEntryList
}
isMatchAttrFilters := func(entry Entry, incFilter Inclusion) bool {
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 { return true }
if len(entry.Attrs) == 0 { return len(incFilter.MustAttrs) == 0 }
attrMap := make(map[string]bool)
for _, attr := range entry.Attrs {
attrMap[attr] = true
}
for _, m := range incFilter.MustAttrs {
if !attrMap[m] { return false }
}
for _, b := range incFilter.BanAttrs {
if attrMap[b] { return false }
}
return true
func ParseList(list *List, ref map[string]*List) (*ParsedList, error) {
pl := &ParsedList{
Name: list.Name,
Inclusion: make(map[string]bool),
}
entryList := list.Entry
for {
newEntryList := make([]Entry, 0, len(entryList))
hasInclude := false
for _, entry := range entryList {
if entry.Type == RuleTypeInclude {
refName := strings.ToUpper(entry.Value)
if entry.Attrs != nil {
for _, attr := range entry.Attrs {
InclusionName := strings.ToUpper(refName + "@" + attr.Key)
if pl.Inclusion[InclusionName] {
continue
}
pl.Inclusion[InclusionName] = true
var roughList []Entry
roughList = append(roughList, pl.Entry...)
for _, inc := range pl.Inclusions {
incPl, exist := plMap[inc.Source]
if !exist {
return fmt.Errorf("list '%s' includes a non-existent list: '%s'", pl.Name, inc.Source)
}
if err := ResolveList(incPl); err != nil {
return err
}
for _, ientry := range finalMap[inc.Source] {
if isMatchAttrFilters(ientry, inc) {
roughList = append(roughList, ientry)
refList := ref[refName]
if refList == nil {
return nil, fmt.Errorf("list not found: %s", entry.Value)
}
attrEntrys := createIncludeAttrEntrys(refList, attr)
if len(attrEntrys) != 0 {
newEntryList = append(newEntryList, attrEntrys...)
}
}
} else {
InclusionName := refName
if pl.Inclusion[InclusionName] {
continue
}
pl.Inclusion[InclusionName] = true
refList := ref[refName]
if refList == nil {
return nil, fmt.Errorf("list not found: %s", entry.Value)
}
newEntryList = append(newEntryList, refList.Entry...)
}
hasInclude = true
} else {
newEntryList = append(newEntryList, entry)
}
}
entryList = newEntryList
if !hasInclude {
break
}
}
finalMap[pl.Name] = polishList(&roughList)
return nil
pl.Entry = entryList
return pl, nil
}
func main() {
@@ -369,7 +317,7 @@ func main() {
dir := *dataPath
fmt.Println("Use domain lists in", dir)
// Generate refMap
ref := make(map[string]*List)
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
@@ -381,7 +329,7 @@ func main() {
if err != nil {
return err
}
refMap[list.Name] = list
ref[list.Name] = list
return nil
})
if err != nil {
@@ -389,22 +337,6 @@ func main() {
os.Exit(1)
}
// Generate plMap
for _, refList := range refMap {
if err := ParseList(refList); err != nil {
fmt.Println("Failed to ParseList:", err)
os.Exit(1)
}
}
// Generate finalMap
for _, pl := range plMap {
if err := ResolveList(pl); err != nil {
fmt.Println("Failed to ResolveList:", err)
os.Exit(1)
}
}
// Create output directory if not exist
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
@@ -413,28 +345,43 @@ func main() {
}
}
// Export plaintext list
if *exportLists != "" {
exportedListSlice := strings.Split(*exportLists, ",")
for _, exportedList := range exportedListSlice {
if err := writePlainList(exportedList); err != nil {
fmt.Println("Failed to write list:", err)
continue
}
fmt.Printf("list: '%s' has been generated successfully.\n", exportedList)
}
}
// Generate dat file
protoList := new(router.GeoSiteList)
for siteName, siteEntries := range finalMap {
site, err := makeProtoList(siteName, &siteEntries)
var existList []string
for refName, list := range ref {
pl, err := ParseList(list, ref)
if err != nil {
fmt.Println("Failed:", err)
os.Exit(1)
}
site, err := pl.toProto()
if err != nil {
fmt.Println("Failed:", err)
os.Exit(1)
}
protoList.Entry = append(protoList.Entry, site)
// Flatten and export plaintext list
if *exportLists != "" {
if existList != nil {
exportPlainTextList(existList, refName, pl)
} else {
exportedListSlice := strings.Split(*exportLists, ",")
for _, exportedListName := range exportedListSlice {
fileName := filepath.Join(dir, exportedListName)
_, err := os.Stat(fileName)
if err == nil || os.IsExist(err) {
existList = append(existList, exportedListName)
} else {
fmt.Printf("'%s' list does not exist in '%s' directory.\n", exportedListName, dir)
}
}
if existList != nil {
exportPlainTextList(existList, refName, pl)
}
}
}
}
// Sort protoList so the marshaled list is reproducible
sort.SliceStable(protoList.Entry, func(i, j int) bool {
return protoList.Entry[i].CountryCode < protoList.Entry[j].CountryCode
@@ -442,11 +389,11 @@ func main() {
protoBytes, err := proto.Marshal(protoList)
if err != nil {
fmt.Println("Failed to marshal:", err)
fmt.Println("Failed:", err)
os.Exit(1)
}
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
fmt.Println("Failed to write output:", err)
fmt.Println("Failed:", err)
os.Exit(1)
} else {
fmt.Println(*outputName, "has been generated successfully.")