mirror of
https://github.com/v2fly/domain-list-community.git
synced 2026-02-05 21:43:14 +07:00
Compare commits
16 Commits
2026013011
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b20cf00e07 | ||
|
|
027b8b3409 | ||
|
|
535dc789b9 | ||
|
|
311b281000 | ||
|
|
bfb35d7b68 | ||
|
|
daf4c10d0c | ||
|
|
a188c2c058 | ||
|
|
947556aa16 | ||
|
|
44de14725e | ||
|
|
c638ec66f0 | ||
|
|
4c8b1438f8 | ||
|
|
3399285ea9 | ||
|
|
62346cf6b7 | ||
|
|
8dee321846 | ||
|
|
b117cf851f | ||
|
|
0b6606758d |
@@ -31,11 +31,21 @@ type DomainList struct {
|
||||
}
|
||||
|
||||
func (d *DomainRule) domain2String() string {
|
||||
dstring := d.Type + ":" + d.Value
|
||||
if len(d.Attrs) != 0 {
|
||||
dstring += ":@" + strings.Join(d.Attrs, ",@")
|
||||
var dstr strings.Builder
|
||||
dstr.Grow(len(d.Type) + len(d.Value) + 10)
|
||||
dstr.WriteString(d.Type)
|
||||
dstr.WriteByte(':')
|
||||
dstr.WriteString(d.Value)
|
||||
for i, attr := range d.Attrs {
|
||||
if i == 0 {
|
||||
dstr.WriteByte(':')
|
||||
} else {
|
||||
dstr.WriteByte(',')
|
||||
}
|
||||
dstr.WriteByte('@')
|
||||
dstr.WriteString(attr)
|
||||
}
|
||||
return dstring
|
||||
return dstr.String()
|
||||
}
|
||||
|
||||
func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
|
||||
@@ -82,10 +92,10 @@ func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
|
||||
func exportSite(name string, domainListByName map[string]*DomainList) error {
|
||||
domainList, ok := domainListByName[strings.ToUpper(name)]
|
||||
if !ok {
|
||||
return fmt.Errorf("list '%s' does not exist", name)
|
||||
return fmt.Errorf("list %q does not exist", name)
|
||||
}
|
||||
if len(domainList.Rules) == 0 {
|
||||
return fmt.Errorf("list '%s' is empty", name)
|
||||
return fmt.Errorf("list %q is empty", name)
|
||||
}
|
||||
file, err := os.Create(filepath.Join(*outputDir, name+".yml"))
|
||||
if err != nil {
|
||||
@@ -119,22 +129,16 @@ func exportAll(filename string, domainLists []DomainList) error {
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
// Create output directory if not exist
|
||||
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
|
||||
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
|
||||
fmt.Println("Failed to create output directory:", mkErr)
|
||||
os.Exit(1)
|
||||
}
|
||||
func run() error {
|
||||
// Make sure output directory exists
|
||||
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create output directory: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Loading %s...\n", *inputData)
|
||||
fmt.Printf("loading source data %q...\n", *inputData)
|
||||
domainLists, domainListByName, err := loadGeosite(*inputData)
|
||||
if err != nil {
|
||||
fmt.Println("Failed to loadGeosite:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to loadGeosite: %w", err)
|
||||
}
|
||||
|
||||
var exportListSlice []string
|
||||
@@ -150,15 +154,24 @@ func main() {
|
||||
for _, eplistname := range exportListSlice {
|
||||
if strings.EqualFold(eplistname, "_all_") {
|
||||
if err := exportAll(filepath.Base(*inputData)+"_plain.yml", domainLists); err != nil {
|
||||
fmt.Println("Failed to exportAll:", err)
|
||||
fmt.Printf("failed to exportAll: %v\n", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
if err := exportSite(eplistname, domainListByName); err != nil {
|
||||
fmt.Println("Failed to exportSite:", err)
|
||||
fmt.Printf("failed to exportSite: %v\n", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
fmt.Printf("list: '%s' has been exported successfully.\n", eplistname)
|
||||
fmt.Printf("list: %q has been exported successfully.\n", eplistname)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
if err := run(); err != nil {
|
||||
fmt.Printf("Fatal error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ binanceapi.com
|
||||
binanceru.net
|
||||
bnbstatic.com
|
||||
bntrace.com
|
||||
bsappapi.com
|
||||
nftstatic.com
|
||||
|
||||
# saas
|
||||
|
||||
@@ -64,7 +64,6 @@ adservice.sigmob.cn
|
||||
adtechus.com
|
||||
adtrue.com
|
||||
adxprtz.com
|
||||
assets.growingio.com
|
||||
cdn.advertserve.com
|
||||
cdn.banclip.com
|
||||
cfts1tifqr.com
|
||||
|
||||
@@ -3,6 +3,7 @@ include:category-ads
|
||||
|
||||
include:adjust
|
||||
include:clearbit
|
||||
include:growingio
|
||||
include:ogury
|
||||
include:openx
|
||||
include:pubmatic
|
||||
|
||||
@@ -4,6 +4,7 @@ include:cerebras
|
||||
include:comfy
|
||||
include:cursor
|
||||
include:elevenlabs
|
||||
include:github-copilot
|
||||
include:google-deepmind
|
||||
include:groq
|
||||
include:huggingface
|
||||
|
||||
@@ -6,12 +6,14 @@ include:qiniu
|
||||
include:upai
|
||||
include:wangsu
|
||||
|
||||
## 创世云
|
||||
# 创世云
|
||||
chuangcache.com
|
||||
chuangcdn.com
|
||||
## FUNCDN
|
||||
# 大风云CDN
|
||||
dfyun.com.cn
|
||||
# FUNCDN
|
||||
funcdn.com
|
||||
## 北京知道创宇信息技术股份有限公司
|
||||
# 北京知道创宇信息技术股份有限公司
|
||||
jiashule.com
|
||||
jiasule.com
|
||||
yunaq.com
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
asklink.com
|
||||
## EasyTier
|
||||
easytier.cn
|
||||
## 飞衡HTTP
|
||||
feihengip.com
|
||||
## Oray
|
||||
oray.com
|
||||
oray.net
|
||||
|
||||
@@ -48,6 +48,7 @@ include:kakao
|
||||
include:kaspersky
|
||||
include:lg
|
||||
include:logitech
|
||||
include:louisvuitton
|
||||
include:mailru-group
|
||||
include:meta
|
||||
include:microsoft
|
||||
|
||||
@@ -20,6 +20,7 @@ include:tencent-dev
|
||||
include:ubuntukylin
|
||||
include:unitychina
|
||||
|
||||
aardio.com
|
||||
jinrishici.com
|
||||
openvela.com
|
||||
tipdm.org
|
||||
|
||||
@@ -71,6 +71,8 @@ baicizhan.com
|
||||
baicizhan.org
|
||||
bczcdn.com
|
||||
bczeducation.cn
|
||||
# 毕业之家科研服务平台
|
||||
biyehome.net
|
||||
# Burning Vocabulary
|
||||
burningvocabulary.cn
|
||||
burningvocabulary.com
|
||||
|
||||
@@ -6,3 +6,7 @@ include:tianyancha
|
||||
qichamao.com
|
||||
qyyjt.cn
|
||||
x315.com
|
||||
|
||||
# 信查查
|
||||
xcc.cn
|
||||
xinchacha.com
|
||||
|
||||
@@ -54,6 +54,7 @@ include:pixiv
|
||||
include:plutotv
|
||||
include:pocketcasts
|
||||
include:primevideo
|
||||
include:radiko
|
||||
include:roku
|
||||
include:showtimeanytime
|
||||
include:sling
|
||||
|
||||
@@ -50,6 +50,8 @@ yeshen.com
|
||||
51zmt.top
|
||||
# 广东南方新媒体
|
||||
aisee.tv
|
||||
# 动画巡礼
|
||||
anitabi.cn
|
||||
# 暴风影音
|
||||
baofeng.com
|
||||
baofeng.net
|
||||
|
||||
@@ -78,6 +78,8 @@ freebuf.com
|
||||
geekpark.net
|
||||
# 光明网
|
||||
gmw.com
|
||||
# 硅谷网
|
||||
guigu.org
|
||||
# 和讯
|
||||
hexun.com
|
||||
# 河南广播电视台/大象网
|
||||
@@ -134,6 +136,9 @@ xinhuanet.com
|
||||
xinhuaxmt.com
|
||||
# 维科网
|
||||
ofweek.com
|
||||
# PChome电脑之家
|
||||
pchome.net
|
||||
pchpic.net
|
||||
# PConline 太平洋科技
|
||||
3conline.com
|
||||
pconline.com.cn
|
||||
|
||||
@@ -1,26 +1,29 @@
|
||||
# This list contains social media platforms inside China mainland.
|
||||
|
||||
include:coolapk
|
||||
include:douban
|
||||
include:gracg
|
||||
include:hupu
|
||||
include:meipian
|
||||
include:okjike
|
||||
include:sina @-!cn
|
||||
include:xiaohongshu
|
||||
include:yy
|
||||
include:zhihu
|
||||
|
||||
tieba.baidu.com
|
||||
tieba.com
|
||||
|
||||
# 杭州蛋蛋语音科技有限公司
|
||||
dandan818.com
|
||||
dandanvoice.com
|
||||
|
||||
# 脉脉
|
||||
maimai.cn
|
||||
taou.com
|
||||
|
||||
# 知识星球
|
||||
zsxq.com
|
||||
# This list contains social media platforms inside China mainland.
|
||||
|
||||
include:coolapk
|
||||
include:douban
|
||||
include:gracg
|
||||
include:hupu
|
||||
include:meipian
|
||||
include:okjike
|
||||
include:sina @-!cn
|
||||
include:xiaohongshu
|
||||
include:yy
|
||||
include:zhihu
|
||||
|
||||
tieba.baidu.com
|
||||
tieba.com
|
||||
|
||||
# 杭州蛋蛋语音科技有限公司
|
||||
dandan818.com
|
||||
dandanvoice.com
|
||||
|
||||
# 饭否
|
||||
fanfou.com
|
||||
|
||||
# 脉脉
|
||||
maimai.cn
|
||||
taou.com
|
||||
|
||||
# 知识星球
|
||||
zsxq.com
|
||||
|
||||
@@ -4,6 +4,9 @@ mbalib.com
|
||||
sec-wiki.com
|
||||
shidianbaike.com
|
||||
|
||||
# 叉子周 手机博物馆
|
||||
chaz.fun
|
||||
|
||||
# huijiwiki
|
||||
huijistatic.com
|
||||
huijiwiki.com
|
||||
|
||||
1
data/dji
1
data/dji
@@ -2,6 +2,7 @@ dji.com
|
||||
dji.ink
|
||||
dji.net
|
||||
djicdn.com
|
||||
djigate.com
|
||||
djiits.com
|
||||
djiops.com
|
||||
djiservice.org
|
||||
|
||||
@@ -271,6 +271,8 @@ ldoceonline.com
|
||||
immersivetranslate.com # 沉浸式翻译 (国际版)
|
||||
## OriginLab (Graphing for Science and Engineering)
|
||||
originlab.com
|
||||
## OsmAnd
|
||||
osmand.net
|
||||
|
||||
# Software development
|
||||
include:category-dev
|
||||
|
||||
@@ -23,6 +23,7 @@ include:category-social-media-cn
|
||||
|
||||
# Advertisment & Analytics
|
||||
include:getui
|
||||
include:growingio
|
||||
include:jiguang
|
||||
|
||||
# 神策数据
|
||||
@@ -663,7 +664,6 @@ ycrx360.com
|
||||
9ht.com
|
||||
9xu.com
|
||||
a9vg.com
|
||||
aardio.com # 皖ICP备09012014号
|
||||
acetaffy.club # 粤ICP备2022042304号
|
||||
adxvip.com
|
||||
afzhan.com
|
||||
@@ -719,7 +719,6 @@ bio-equip.com
|
||||
biodiscover.com
|
||||
bishijie.com
|
||||
bitecoin.com
|
||||
biyehome.net
|
||||
bjcathay.com
|
||||
bobo.com
|
||||
bojianger.com
|
||||
@@ -743,7 +742,6 @@ chachaba.com
|
||||
changba.com
|
||||
chaojituzi.net
|
||||
chashebao.com
|
||||
chaz.fun # 粤ICP备2022001828号-2
|
||||
chazhengla.com
|
||||
chazidian.com
|
||||
che168.com
|
||||
@@ -879,7 +877,6 @@ fanli.com
|
||||
fangxiaoer.com
|
||||
fanxian.com
|
||||
fastapi.net
|
||||
feihengip.com # 粤ICP备2023115330号-1
|
||||
feihuo.com
|
||||
feiniaomy.com
|
||||
fengniao.com
|
||||
@@ -903,7 +900,6 @@ gdrc.com
|
||||
geektool.top # 极客Tool 蜀ICP备2024086015号-2
|
||||
gezida.com
|
||||
gfan.com
|
||||
giocdn.com
|
||||
globrand.com
|
||||
gm86.com
|
||||
gmz88.com
|
||||
@@ -914,7 +910,6 @@ gongxiangcj.com
|
||||
goosail.com
|
||||
goufw.com
|
||||
greenxiazai.com
|
||||
growingio.com
|
||||
gtags.net
|
||||
guabu.com
|
||||
guaiguai.com
|
||||
@@ -922,7 +917,6 @@ guanaitong.com
|
||||
guanhaobio.com
|
||||
guanyierp.com # 沪ICP备14043335号-8
|
||||
gucheng.com
|
||||
guigu.org
|
||||
guoxinmac.com
|
||||
gupzs.com
|
||||
gushiwen.org
|
||||
@@ -1178,7 +1172,6 @@ p5w.net
|
||||
paipaibang.com
|
||||
paopaoche.net
|
||||
pc6.com
|
||||
pchome.net
|
||||
pcpop.com
|
||||
peccn.com
|
||||
pgzs.com
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
include:github-ads
|
||||
include:github-copilot
|
||||
include:npmjs
|
||||
|
||||
atom.io
|
||||
@@ -14,7 +15,6 @@ github.dev
|
||||
github.io
|
||||
githubapp.com
|
||||
githubassets.com
|
||||
githubcopilot.com
|
||||
githubhackathon.com
|
||||
githubnext.com
|
||||
githubpreview.dev
|
||||
|
||||
1
data/github-copilot
Normal file
1
data/github-copilot
Normal file
@@ -0,0 +1 @@
|
||||
githubcopilot.com
|
||||
7
data/growingio
Normal file
7
data/growingio
Normal file
@@ -0,0 +1,7 @@
|
||||
# 北京易数科技
|
||||
datayi.cn
|
||||
gio.ren
|
||||
giocdn.com
|
||||
growin.cn
|
||||
growingio.cn
|
||||
growingio.com
|
||||
@@ -6,4 +6,9 @@ gfw.ovh # sub domains mirror
|
||||
mos-gorsud.co # kinopub domain to generate a mirror site through gfw.ovh
|
||||
|
||||
# kinopub CDN servers
|
||||
cdn-service.space
|
||||
cdn2cdn.com
|
||||
cdn2site.com
|
||||
pushbr.com # poster images CDN
|
||||
|
||||
regexp:(\w+)-static-[0-9]+\.cdntogo\.net$
|
||||
|
||||
5
data/louisvuitton
Normal file
5
data/louisvuitton
Normal file
@@ -0,0 +1,5 @@
|
||||
louisvuitton.cn @cn
|
||||
louisvuitton.com
|
||||
lvcampaign.com @cn
|
||||
|
||||
full:tp.louisvuitton.com @cn
|
||||
@@ -60,6 +60,7 @@ full:default.exp-tas.com
|
||||
full:developer.microsoft.com
|
||||
full:download.visualstudio.microsoft.com
|
||||
full:dtlgalleryint.cloudapp.net
|
||||
full:packages.microsoft.com
|
||||
full:poshtestgallery.cloudapp.net
|
||||
full:psg-int-centralus.cloudapp.net
|
||||
full:psg-int-eastus.cloudapp.net
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
sekai.colorfulpalette.org
|
||||
pjsekai.sega.jp
|
||||
|
||||
@@ -44,6 +44,7 @@ dnsv1.com.cn
|
||||
dothework.cn
|
||||
ectencent.cn
|
||||
ectencent.com.cn
|
||||
edgeone.cool
|
||||
edgeonedy1.com
|
||||
essurl.com
|
||||
exmailgz.com
|
||||
|
||||
5
data/radiko
Normal file
5
data/radiko
Normal file
@@ -0,0 +1,5 @@
|
||||
# radiko official access and streaming domains
|
||||
|
||||
radiko-cf.com
|
||||
radiko.jp
|
||||
smartstream.ne.jp
|
||||
@@ -24,6 +24,7 @@ pardot.com
|
||||
quotable.com
|
||||
radian6.com
|
||||
relateiq.com
|
||||
salesforce-setup.com
|
||||
salesforce.com
|
||||
salesforce.org
|
||||
salesforceiq.com
|
||||
|
||||
189
main.go
189
main.go
@@ -23,7 +23,6 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
refMap = make(map[string][]*Entry)
|
||||
plMap = make(map[string]*ParsedList)
|
||||
finalMap = make(map[string][]*Entry)
|
||||
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
||||
@@ -78,18 +77,14 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
||||
return site, nil
|
||||
}
|
||||
|
||||
func writePlainList(exportedName string) error {
|
||||
targetList, exist := finalMap[strings.ToUpper(exportedName)]
|
||||
if !exist || len(targetList) == 0 {
|
||||
return fmt.Errorf("list %q does not exist or is empty.", exportedName)
|
||||
}
|
||||
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName)+".txt"))
|
||||
func writePlainList(listname string, entries []*Entry) error {
|
||||
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(listname)+".txt"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
w := bufio.NewWriter(file)
|
||||
for _, entry := range targetList {
|
||||
for _, entry := range entries {
|
||||
fmt.Fprintln(w, entry.Plain)
|
||||
}
|
||||
return w.Flush()
|
||||
@@ -99,7 +94,7 @@ func parseEntry(line string) (Entry, error) {
|
||||
var entry Entry
|
||||
parts := strings.Fields(line)
|
||||
if len(parts) == 0 {
|
||||
return entry, fmt.Errorf("empty line: %q", line)
|
||||
return entry, fmt.Errorf("empty line")
|
||||
}
|
||||
|
||||
// Parse type and value
|
||||
@@ -138,7 +133,7 @@ func parseEntry(line string) (Entry, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Parse/Check attributes and affiliations
|
||||
// Parse attributes and affiliations
|
||||
for _, part := range parts[1:] {
|
||||
if strings.HasPrefix(part, "@") {
|
||||
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
||||
@@ -159,10 +154,21 @@ func parseEntry(line string) (Entry, error) {
|
||||
// Sort attributes
|
||||
slices.Sort(entry.Attrs)
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
entry.Plain = entry.Type + ":" + entry.Value
|
||||
if len(entry.Attrs) != 0 {
|
||||
entry.Plain = entry.Plain + ":@" + strings.Join(entry.Attrs, ",@")
|
||||
var plain strings.Builder
|
||||
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||
plain.WriteString(entry.Type)
|
||||
plain.WriteByte(':')
|
||||
plain.WriteString(entry.Value)
|
||||
for i, attr := range entry.Attrs {
|
||||
if i == 0 {
|
||||
plain.WriteByte(':')
|
||||
} else {
|
||||
plain.WriteByte(',')
|
||||
}
|
||||
plain.WriteByte('@')
|
||||
plain.WriteString(attr)
|
||||
}
|
||||
entry.Plain = plain.String()
|
||||
|
||||
return entry, nil
|
||||
}
|
||||
@@ -200,25 +206,21 @@ func validateSiteName(name string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func loadData(path string) error {
|
||||
func loadData(path string) ([]*Entry, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
listName := strings.ToUpper(filepath.Base(path))
|
||||
if !validateSiteName(listName) {
|
||||
return fmt.Errorf("invalid list name: %s", listName)
|
||||
}
|
||||
var entries []*Entry
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineIdx := 0
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
lineIdx++
|
||||
// Remove comments
|
||||
if idx := strings.Index(line, "#"); idx != -1 {
|
||||
line = line[:idx]
|
||||
line = line[:idx] // Remove comments
|
||||
}
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
@@ -226,11 +228,11 @@ func loadData(path string) error {
|
||||
}
|
||||
entry, err := parseEntry(line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in %s at line %d: %v", path, lineIdx, err)
|
||||
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||
}
|
||||
refMap[listName] = append(refMap[listName], &entry)
|
||||
entries = append(entries, &entry)
|
||||
}
|
||||
return nil
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func parseList(refName string, refList []*Entry) error {
|
||||
@@ -242,7 +244,7 @@ func parseList(refName string, refList []*Entry) error {
|
||||
for _, entry := range refList {
|
||||
if entry.Type == dlc.RuleTypeInclude {
|
||||
if len(entry.Affs) != 0 {
|
||||
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
|
||||
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
|
||||
}
|
||||
inc := &Inclusion{Source: entry.Value}
|
||||
for _, attr := range entry.Attrs {
|
||||
@@ -268,11 +270,31 @@ func parseList(refName string, refList []*Entry) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func polishList(roughMap *map[string]*Entry) []*Entry {
|
||||
finalList := make([]*Entry, 0, len(*roughMap))
|
||||
queuingList := make([]*Entry, 0, len(*roughMap)) // Domain/full entries without attr
|
||||
func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool {
|
||||
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
|
||||
return true
|
||||
}
|
||||
if len(entry.Attrs) == 0 {
|
||||
return len(incFilter.MustAttrs) == 0
|
||||
}
|
||||
for _, m := range incFilter.MustAttrs {
|
||||
if !slices.Contains(entry.Attrs, m) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
for _, b := range incFilter.BanAttrs {
|
||||
if slices.Contains(entry.Attrs, b) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func polishList(roughMap map[string]*Entry) []*Entry {
|
||||
finalList := make([]*Entry, 0, len(roughMap))
|
||||
queuingList := make([]*Entry, 0, len(roughMap)) // Domain/full entries without attr
|
||||
domainsMap := make(map[string]bool)
|
||||
for _, entry := range *roughMap {
|
||||
for _, entry := range roughMap {
|
||||
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
||||
case dlc.RuleTypeRegexp:
|
||||
finalList = append(finalList, entry)
|
||||
@@ -306,9 +328,6 @@ func polishList(roughMap *map[string]*Entry) []*Entry {
|
||||
break
|
||||
}
|
||||
pd = pd[idx+1:] // Go for next parent
|
||||
if !strings.Contains(pd, ".") {
|
||||
break
|
||||
} // Not allow tld to be a parent
|
||||
if domainsMap[pd] {
|
||||
isRedundant = true
|
||||
break
|
||||
@@ -331,32 +350,11 @@ func resolveList(pl *ParsedList) error {
|
||||
}
|
||||
|
||||
if cirIncMap[pl.Name] {
|
||||
return fmt.Errorf("circular inclusion in: %s", pl.Name)
|
||||
return fmt.Errorf("circular inclusion in: %q", pl.Name)
|
||||
}
|
||||
cirIncMap[pl.Name] = true
|
||||
defer delete(cirIncMap, pl.Name)
|
||||
|
||||
isMatchAttrFilters := func(entry *Entry, incFilter *Inclusion) bool {
|
||||
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
|
||||
return true
|
||||
}
|
||||
if len(entry.Attrs) == 0 {
|
||||
return len(incFilter.MustAttrs) == 0
|
||||
}
|
||||
|
||||
for _, m := range incFilter.MustAttrs {
|
||||
if !slices.Contains(entry.Attrs, m) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
for _, b := range incFilter.BanAttrs {
|
||||
if slices.Contains(entry.Attrs, b) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
||||
for _, dentry := range pl.Entries { // Add direct entries
|
||||
roughMap[dentry.Plain] = dentry
|
||||
@@ -375,80 +373,75 @@ func resolveList(pl *ParsedList) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
finalMap[pl.Name] = polishList(&roughMap)
|
||||
finalMap[pl.Name] = polishList(roughMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
func run() error {
|
||||
dir := *dataPath
|
||||
fmt.Println("Use domain lists in", dir)
|
||||
fmt.Printf("using domain lists data in %q\n", dir)
|
||||
|
||||
// Generate refMap
|
||||
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
|
||||
refMap := make(map[string][]*Entry)
|
||||
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if err := loadData(path); err != nil {
|
||||
return err
|
||||
listName := strings.ToUpper(filepath.Base(path))
|
||||
if !validateSiteName(listName) {
|
||||
return fmt.Errorf("invalid list name: %q", listName)
|
||||
}
|
||||
return nil
|
||||
refMap[listName], err = loadData(path)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
fmt.Println("Failed to loadData:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to loadData: %w", err)
|
||||
}
|
||||
|
||||
// Generate plMap
|
||||
for refName, refList := range refMap {
|
||||
if err := parseList(refName, refList); err != nil {
|
||||
fmt.Println("Failed to parseList:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to parseList %q: %w", refName, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate finalMap
|
||||
for _, pl := range plMap {
|
||||
for plname, pl := range plMap {
|
||||
if err := resolveList(pl); err != nil {
|
||||
fmt.Println("Failed to resolveList:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create output directory if not exist
|
||||
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
|
||||
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
|
||||
fmt.Println("Failed to create output directory:", mkErr)
|
||||
os.Exit(1)
|
||||
}
|
||||
// Make sure output directory exists
|
||||
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create output directory: %w", err)
|
||||
}
|
||||
|
||||
// Export plaintext list
|
||||
var exportListSlice []string
|
||||
for raw := range strings.SplitSeq(*exportLists, ",") {
|
||||
if trimmed := strings.TrimSpace(raw); trimmed != "" {
|
||||
exportListSlice = append(exportListSlice, trimmed)
|
||||
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
|
||||
if epList := strings.TrimSpace(rawEpList); epList != "" {
|
||||
entries, exist := finalMap[strings.ToUpper(epList)]
|
||||
if !exist || len(entries) == 0 {
|
||||
fmt.Printf("list %q does not exist or is empty\n", epList)
|
||||
continue
|
||||
}
|
||||
if err := writePlainList(epList, entries); err != nil {
|
||||
fmt.Printf("failed to write list %q: %v\n", epList, err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("list %q has been generated successfully.\n", epList)
|
||||
}
|
||||
}
|
||||
for _, exportList := range exportListSlice {
|
||||
if err := writePlainList(exportList); err != nil {
|
||||
fmt.Println("Failed to write list:", err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("list %q has been generated successfully.\n", exportList)
|
||||
}
|
||||
|
||||
// Generate dat file
|
||||
protoList := new(router.GeoSiteList)
|
||||
for siteName, siteEntries := range finalMap {
|
||||
site, err := makeProtoList(siteName, siteEntries)
|
||||
if err != nil {
|
||||
fmt.Println("Failed to makeProtoList:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
|
||||
}
|
||||
protoList.Entry = append(protoList.Entry, site)
|
||||
}
|
||||
@@ -459,13 +452,19 @@ func main() {
|
||||
|
||||
protoBytes, err := proto.Marshal(protoList)
|
||||
if err != nil {
|
||||
fmt.Println("Failed to marshal:", err)
|
||||
os.Exit(1)
|
||||
return fmt.Errorf("failed to marshal: %w", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
|
||||
fmt.Println("Failed to write output:", err)
|
||||
return fmt.Errorf("failed to write output: %w", err)
|
||||
}
|
||||
fmt.Printf("%q has been generated successfully.\n", *outputName)
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
if err := run(); err != nil {
|
||||
fmt.Printf("Fatal error: %v\n", err)
|
||||
os.Exit(1)
|
||||
} else {
|
||||
fmt.Println(*outputName, "has been generated successfully.")
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user