mirror of
https://github.com/v2fly/domain-list-community.git
synced 2026-02-06 05:53:13 +07:00
Compare commits
15 Commits
2026011404
...
2026012013
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
912c689da3 | ||
|
|
d1addde6f7 | ||
|
|
ec95fedc45 | ||
|
|
d50e2e1ad7 | ||
|
|
ab42940731 | ||
|
|
efd57f30ee | ||
|
|
3ee190ac78 | ||
|
|
fa279bdd79 | ||
|
|
b18f5e3049 | ||
|
|
5411cefcaa | ||
|
|
d84e864ce8 | ||
|
|
49444d78b7 | ||
|
|
dad8e15cd0 | ||
|
|
e6e731a616 | ||
|
|
8c0b190c3f |
@@ -11,6 +11,14 @@ This project is not opinionated. In other words, it does NOT endorse, claim or i
|
||||
- **dlc.dat**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat)
|
||||
- **dlc.dat.sha256sum**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum)
|
||||
|
||||
## Notice
|
||||
|
||||
Rules with `@!cn` attribute has been cast out from cn lists. `geosite:geolocation-cn@!cn` is no longer available.
|
||||
|
||||
Check [#390](https://github.com/v2fly/domain-list-community/issues/390), [#3119](https://github.com/v2fly/domain-list-community/pull/3119) and [#3198](https://github.com/v2fly/domain-list-community/pull/3198) for more information.
|
||||
|
||||
Please report if you have any problems or questions.
|
||||
|
||||
## Usage example
|
||||
|
||||
Each file in the `data` directory can be used as a rule in this format: `geosite:filename`.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
annas-archive.in
|
||||
annas-archive.li
|
||||
annas-archive.org
|
||||
annas-archive.pm
|
||||
annas-archive.se
|
||||
|
||||
@@ -43,7 +43,6 @@ include:pocoiq-ads
|
||||
include:pubmatic-ads
|
||||
include:qihoo360-ads
|
||||
include:segment-ads
|
||||
include:sensorsdata-ads
|
||||
include:sina-ads
|
||||
include:sohu-ads
|
||||
include:spotify-ads
|
||||
@@ -194,6 +193,9 @@ reachmax.cn
|
||||
# 热云数据
|
||||
reyun.com
|
||||
|
||||
# 神策数据
|
||||
static.sensorsdata.cn
|
||||
|
||||
# 诸葛io
|
||||
zhugeapi.com
|
||||
zhugeapi.net
|
||||
|
||||
@@ -7,10 +7,12 @@ include:elevenlabs
|
||||
include:google-deepmind
|
||||
include:groq
|
||||
include:huggingface
|
||||
include:liveperson
|
||||
include:openai
|
||||
include:perplexity
|
||||
include:poe
|
||||
include:xai
|
||||
include:youmind
|
||||
|
||||
# CodeRabbit
|
||||
coderabbit.ai
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
include:boc
|
||||
include:ccb
|
||||
include:citic
|
||||
include:cmb
|
||||
include:boc @-!cn
|
||||
include:ccb @-!cn
|
||||
include:citic @-!cn
|
||||
include:cmb @-!cn
|
||||
include:hsbc-cn
|
||||
include:icbc
|
||||
include:icbc @-!cn
|
||||
include:unionpay
|
||||
|
||||
abchina.com
|
||||
|
||||
@@ -4,7 +4,7 @@ include:apipost
|
||||
include:baltamatica
|
||||
include:cnblogs
|
||||
include:csdn
|
||||
include:deepin
|
||||
include:deepin @-!cn
|
||||
include:gitee
|
||||
include:goproxy
|
||||
include:huawei-dev
|
||||
|
||||
@@ -188,3 +188,6 @@ emby1.69yun69.com
|
||||
|
||||
# 云梯
|
||||
yunti.online
|
||||
|
||||
# 守候网络
|
||||
server2.cn2gias.uk
|
||||
@@ -6,7 +6,7 @@ include:aamgame
|
||||
include:acfun
|
||||
include:acplay
|
||||
include:bestv
|
||||
include:bilibili
|
||||
include:bilibili @-!cn
|
||||
include:ciweimao
|
||||
include:dedao
|
||||
include:douyin
|
||||
@@ -18,7 +18,7 @@ include:gamersky
|
||||
include:gitv
|
||||
include:hunantv
|
||||
include:huya
|
||||
include:iqiyi
|
||||
include:iqiyi @-!cn
|
||||
include:ku6
|
||||
include:kuaikan
|
||||
include:kuaishou
|
||||
|
||||
@@ -12,6 +12,7 @@ include:itiger
|
||||
include:longbridge
|
||||
include:n26
|
||||
include:schwab
|
||||
include:standardchartered
|
||||
include:wise
|
||||
|
||||
fxcorporate.com
|
||||
|
||||
@@ -24,6 +24,7 @@ include:pandanet
|
||||
include:pinkcore
|
||||
include:playstation
|
||||
include:projectsekai
|
||||
include:pubg
|
||||
include:purikonejp
|
||||
include:riot
|
||||
include:roblox
|
||||
|
||||
@@ -9,7 +9,7 @@ include:mihoyo-cn
|
||||
include:tencent-games
|
||||
include:tiancity
|
||||
include:vrzwk
|
||||
include:xd
|
||||
include:xd @-!cn
|
||||
include:yokaverse
|
||||
|
||||
# 北京奇客创想科技有限公司
|
||||
|
||||
@@ -160,6 +160,7 @@ tnntoday.com
|
||||
tvbs.com.tw
|
||||
tvmost.com.hk
|
||||
twgreatnews.com
|
||||
twreporter.org
|
||||
unwire.hk
|
||||
upmedia.mg
|
||||
vjmedia.com.hk
|
||||
|
||||
@@ -52,6 +52,7 @@ hwshu.com # 瀚文民国书库
|
||||
hytung.cn # 瀚堂典藏古籍
|
||||
incopat.com # incoPat 专利数据库
|
||||
lawyee.org # 北大法意网 中国法律资料库
|
||||
libvideo.com # 知识视界 武汉缘来文化
|
||||
neohytung.com # 瀚堂近代报刊
|
||||
nmrdata.com # 微谱数据
|
||||
nssd.cn # 国家哲学社会科学学术期刊数据库
|
||||
@@ -67,5 +68,3 @@ unihan.com.cn # 书同文
|
||||
wenxin-ge.com # 文心阁古籍全文数据库
|
||||
wind.com.cn # Wind 资讯金融
|
||||
yiigle.com # 中华医学期刊全文数据库
|
||||
|
||||
full:www.libvideo.com # 武汉缘来文化-知识视界
|
||||
|
||||
@@ -6,7 +6,7 @@ include:gracg
|
||||
include:hupu
|
||||
include:meipian
|
||||
include:okjike
|
||||
include:sina
|
||||
include:sina @-!cn
|
||||
include:xiaohongshu
|
||||
include:yy
|
||||
include:zhihu
|
||||
|
||||
@@ -46,16 +46,20 @@ ntd.com
|
||||
ntd.tv
|
||||
ntdca.com
|
||||
ntdimg.com
|
||||
ntdtv-dc.com
|
||||
ntdtv.ca
|
||||
ntdtv.co.il
|
||||
ntdtv.co.kr
|
||||
ntdtv.com
|
||||
ntdtv.com.tw
|
||||
ntdtv.fr
|
||||
ntdtv.jp
|
||||
ntdtv.kr
|
||||
ntdtv.org
|
||||
ntdtv.ru
|
||||
ntdtv-dc.com
|
||||
ntdtv.se
|
||||
ntdtvla.com
|
||||
ntdvideo.tw
|
||||
ntdvn.com
|
||||
persianepochtimes.com
|
||||
renminbao.com
|
||||
|
||||
@@ -244,29 +244,31 @@ include:zeplin
|
||||
include:zoho
|
||||
include:zoom
|
||||
|
||||
biliplus.com # BiliPlus
|
||||
|
||||
# Graphing for Science and Engineering
|
||||
originlab.com
|
||||
|
||||
# Online LaTeX Editor
|
||||
cloudlatex.io
|
||||
overleaf.com
|
||||
|
||||
# Translator & Dictionary
|
||||
include:linguee
|
||||
|
||||
collinsdictionary.com
|
||||
ldoceonline.com
|
||||
immersivetranslate.com # 沉浸式翻译 (国际版)
|
||||
|
||||
# Aurora Open Source Software (https://gitlab.com/AuroraOSS)
|
||||
## Aurora Open Source Software (https://gitlab.com/AuroraOSS)
|
||||
auroraoss.com
|
||||
|
||||
# CookiePro, provides cookies and tracking
|
||||
## BiliPlus
|
||||
biliplus.com
|
||||
## CataBoom
|
||||
cataboom.com
|
||||
## Consent Management Platforms / Cookie service
|
||||
consentpro.com
|
||||
cookiepro.com
|
||||
cookielaw.org
|
||||
onetrust.com
|
||||
osano.com
|
||||
usercentrics.eu
|
||||
## Greasy Fork
|
||||
greasyfork.org
|
||||
## Online LaTeX Editor
|
||||
cloudlatex.io
|
||||
overleaf.com
|
||||
## Translator & Dictionary
|
||||
include:linguee
|
||||
collinsdictionary.com
|
||||
ldoceonline.com
|
||||
immersivetranslate.com # 沉浸式翻译 (国际版)
|
||||
## OriginLab (Graphing for Science and Engineering)
|
||||
originlab.com
|
||||
|
||||
# Software development
|
||||
include:category-dev
|
||||
@@ -284,9 +286,6 @@ include:rarbg
|
||||
dmhy.org
|
||||
rutor.info
|
||||
|
||||
# User scripts
|
||||
greasyfork.org
|
||||
|
||||
# VPN services
|
||||
include:category-vpnservices
|
||||
|
||||
|
||||
@@ -26,6 +26,9 @@ include:getui
|
||||
include:jiguang
|
||||
include:umeng
|
||||
|
||||
# 神策数据
|
||||
sensorsdata.cn
|
||||
|
||||
# category-httpdns-cn is mainly for advertising purpose
|
||||
include:category-httpdns-cn
|
||||
|
||||
@@ -35,9 +38,9 @@ include:category-httpdns-cn
|
||||
# Bank & Finance & Insurance & Securities
|
||||
include:category-bank-cn
|
||||
include:category-securities-cn
|
||||
include:eastmoney
|
||||
include:eastmoney @-!cn
|
||||
include:everbright
|
||||
include:pingan
|
||||
include:pingan @-!cn
|
||||
include:taikang
|
||||
|
||||
## 航财通·校园付
|
||||
@@ -89,11 +92,11 @@ pkoplink.com
|
||||
|
||||
# E-commerce
|
||||
include:58tongcheng
|
||||
include:ctrip
|
||||
include:ctrip @-!cn
|
||||
include:dangdang
|
||||
include:dewu
|
||||
include:dewu @-!cn
|
||||
include:dongjiao
|
||||
include:jd
|
||||
include:jd @-!cn
|
||||
include:lianjia
|
||||
include:meituan
|
||||
include:miaomiaozhe
|
||||
@@ -408,19 +411,19 @@ zhaopin.cn
|
||||
# Tech companies & Orginations
|
||||
include:aisiku # 北京艾斯酷科技有限公司
|
||||
include:akiko # 秋子酱科技
|
||||
include:alibaba
|
||||
include:alibaba @-!cn
|
||||
include:baidu
|
||||
include:beisen
|
||||
include:bluepoch
|
||||
include:bytedance
|
||||
include:didi
|
||||
include:bytedance @-!cn
|
||||
include:didi @-!cn
|
||||
include:dingdatech # 叮哒出行(杭州金通互联科技有限公司)
|
||||
include:dji
|
||||
include:gree
|
||||
include:haier
|
||||
include:hikvision
|
||||
include:honor
|
||||
include:huawei
|
||||
include:huawei @-!cn
|
||||
include:hupun # 杭州湖畔网络技术有限公司
|
||||
include:iflytek
|
||||
include:ishumei # 北京数美时代科技有限公司
|
||||
@@ -432,14 +435,14 @@ include:meizu
|
||||
include:midea
|
||||
include:narwal # 云鲸科技
|
||||
include:netease
|
||||
include:oppo
|
||||
include:oppo @-!cn
|
||||
include:qihoo360
|
||||
include:sumkoo #北京尚古创新科技有限公司
|
||||
include:tcl
|
||||
include:tencent
|
||||
include:tencent @-!cn
|
||||
include:tongfang
|
||||
include:vivo
|
||||
include:xiaomi
|
||||
include:vivo @-!cn
|
||||
include:xiaomi @-!cn
|
||||
include:xunlei
|
||||
include:youquan # 祐全科技
|
||||
include:yuanbei # 上海圆贝信息科技有限公司
|
||||
@@ -476,13 +479,14 @@ xsbapp.cn
|
||||
|
||||
# Telecommunication
|
||||
include:chinabroadnet
|
||||
include:chinamobile
|
||||
include:chinatelecom
|
||||
include:chinamobile @-!cn
|
||||
include:chinatelecom @-!cn
|
||||
include:chinatower
|
||||
include:chinaunicom
|
||||
include:chinaunicom @-!cn
|
||||
|
||||
# 在线工具
|
||||
include:ipip # IPIP ip地理位置数据库
|
||||
## IPIP ip地理位置数据库
|
||||
include:ipip @-!cn
|
||||
|
||||
chaziyu.com # 滇ICP备2024035496号
|
||||
fofa.info # Fofa网站测绘(华顺信安)
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
bisheng.cn @cn
|
||||
bishengcompiler.cn @cn
|
||||
devui.design @cn
|
||||
gneec.com @cn
|
||||
gneec.com.cn @cn
|
||||
gneec3.com @cn
|
||||
gneec4.com @cn
|
||||
gneec7.com @cn
|
||||
harmonyos.com @cn
|
||||
hiascend.cn @cn
|
||||
hiascend.com @cn
|
||||
hiclc.com @cn
|
||||
hikunpeng.cn @cn
|
||||
hikunpeng.com @cn
|
||||
hikunpeng.com.cn @cn
|
||||
hikunpeng.net @cn
|
||||
hisilicon.com @cn
|
||||
hisilicon.com.cn @cn
|
||||
huaweiapaas.com @cn
|
||||
mindspore.cn @cn
|
||||
owsgo.com @cn
|
||||
teleows.com @cn
|
||||
saasops.tech @cn
|
||||
bisheng.cn
|
||||
bishengcompiler.cn
|
||||
devui.design
|
||||
gneec.com
|
||||
gneec.com.cn
|
||||
gneec3.com
|
||||
gneec4.com
|
||||
gneec7.com
|
||||
harmonyos.com
|
||||
hiascend.cn
|
||||
hiascend.com
|
||||
hiclc.com
|
||||
hikunpeng.cn
|
||||
hikunpeng.com
|
||||
hikunpeng.com.cn
|
||||
hikunpeng.net
|
||||
hisilicon.com
|
||||
hisilicon.com.cn
|
||||
huaweiapaas.com
|
||||
mindspore.cn
|
||||
owsgo.com
|
||||
saasops.tech
|
||||
teleows.com
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
kechuang.org
|
||||
full:kc.kexinshe.com @cn
|
||||
kexinshe.com
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
kurogames.com @cn
|
||||
kurogames.com
|
||||
|
||||
# Wuthering Waves
|
||||
aki-game.com @cn
|
||||
aki-game.com
|
||||
|
||||
2
data/liveperson
Normal file
2
data/liveperson
Normal file
@@ -0,0 +1,2 @@
|
||||
liveperson.net
|
||||
lpsnmedia.net
|
||||
5
data/pubg
Normal file
5
data/pubg
Normal file
@@ -0,0 +1,5 @@
|
||||
kraftonde.com
|
||||
playbattlegrounds.com
|
||||
pubg.com
|
||||
|
||||
full:pubg1.battleye.com
|
||||
@@ -1 +0,0 @@
|
||||
static.sensorsdata.cn @ads
|
||||
4
data/standardchartered
Normal file
4
data/standardchartered
Normal file
@@ -0,0 +1,4 @@
|
||||
sc.com
|
||||
standardchartered.com
|
||||
full:standchartbank.sc.omtrdc.net
|
||||
full:standchartbank.tt.omtrdc.net
|
||||
@@ -11,4 +11,4 @@ tencentcloud.com
|
||||
tjstats.com
|
||||
wegamedeveloper.com
|
||||
weixinbridge.com
|
||||
weui.io @cn
|
||||
weui.io
|
||||
|
||||
@@ -107,6 +107,7 @@ volcfcdnrd.com
|
||||
volcfcdnsc.com
|
||||
volcfxgjrtm.com
|
||||
volcgroup.com
|
||||
volcgslb-mlt.com
|
||||
volcgslb.com
|
||||
volcgtm.com
|
||||
volciad.com
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
include:xiaomi-ads
|
||||
include:xiaomi-ai
|
||||
include:xiaomi-iot
|
||||
|
||||
mgslb.com
|
||||
mi-idc.com
|
||||
|
||||
5
data/xiaomi-iot
Normal file
5
data/xiaomi-iot
Normal file
@@ -0,0 +1,5 @@
|
||||
# Xiaomi IoT Services
|
||||
account.xiaomi.com
|
||||
cn-ha.mqtt.io.mi.com
|
||||
ha.api.io.mi.com
|
||||
miot-spec.org
|
||||
@@ -1,2 +1,5 @@
|
||||
xv-ru.com
|
||||
xvideos-ar.com
|
||||
xvideos-cdn.com
|
||||
xvideos-india.com
|
||||
xvideos.com
|
||||
|
||||
4
data/youmind
Normal file
4
data/youmind
Normal file
@@ -0,0 +1,4 @@
|
||||
# Youmind
|
||||
youmind.ai
|
||||
youmind.com
|
||||
youmind.site
|
||||
532
main.go
532
main.go
@@ -7,7 +7,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
router "github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
||||
@@ -29,286 +29,307 @@ const (
|
||||
RuleTypeInclude string = "include"
|
||||
)
|
||||
|
||||
var (
|
||||
TypeChecker = regexp.MustCompile(`^(domain|full|keyword|regexp|include)$`)
|
||||
ValueChecker = regexp.MustCompile(`^[a-z0-9!\.-]+$`)
|
||||
AttrChecker = regexp.MustCompile(`^[a-z0-9!-]+$`)
|
||||
SiteChecker = regexp.MustCompile(`^[A-Z0-9!-]+$`)
|
||||
)
|
||||
|
||||
var (
|
||||
refMap = make(map[string][]*Entry)
|
||||
plMap = make(map[string]*ParsedList)
|
||||
finalMap = make(map[string][]*Entry)
|
||||
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
||||
)
|
||||
|
||||
type Entry struct {
|
||||
Type string
|
||||
Value string
|
||||
Attrs []*router.Domain_Attribute
|
||||
Attrs []string
|
||||
Plain string
|
||||
Affs []string
|
||||
}
|
||||
|
||||
type List struct {
|
||||
Name string
|
||||
Entry []Entry
|
||||
type Inclusion struct {
|
||||
Source string
|
||||
MustAttrs []string
|
||||
BanAttrs []string
|
||||
}
|
||||
|
||||
type ParsedList struct {
|
||||
Name string
|
||||
Inclusion map[string]bool
|
||||
Entry []Entry
|
||||
Name string
|
||||
Inclusions []*Inclusion
|
||||
Entries []*Entry
|
||||
}
|
||||
|
||||
func (l *ParsedList) toPlainText(listName string) error {
|
||||
var entryBytes []byte
|
||||
for _, entry := range l.Entry {
|
||||
var attrString string
|
||||
if entry.Attrs != nil {
|
||||
for _, attr := range entry.Attrs {
|
||||
attrString += "@" + attr.GetKey() + ","
|
||||
}
|
||||
attrString = strings.TrimRight(":"+attrString, ",")
|
||||
}
|
||||
// Entry output format is: type:domain.tld:@attr1,@attr2
|
||||
entryBytes = append(entryBytes, []byte(entry.Type+":"+entry.Value+attrString+"\n")...)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(*outputDir, listName+".txt"), entryBytes, 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *ParsedList) toProto() (*router.GeoSite, error) {
|
||||
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
||||
site := &router.GeoSite{
|
||||
CountryCode: l.Name,
|
||||
CountryCode: listName,
|
||||
Domain: make([]*router.Domain, 0, len(entries)),
|
||||
}
|
||||
for _, entry := range l.Entry {
|
||||
for _, entry := range entries {
|
||||
pdomain := &router.Domain{Value: entry.Value}
|
||||
for _, attr := range entry.Attrs {
|
||||
pdomain.Attribute = append(pdomain.Attribute, &router.Domain_Attribute{
|
||||
Key: attr,
|
||||
TypedValue: &router.Domain_Attribute_BoolValue{BoolValue: true},
|
||||
})
|
||||
}
|
||||
|
||||
switch entry.Type {
|
||||
case RuleTypeDomain:
|
||||
site.Domain = append(site.Domain, &router.Domain{
|
||||
Type: router.Domain_RootDomain,
|
||||
Value: entry.Value,
|
||||
Attribute: entry.Attrs,
|
||||
})
|
||||
|
||||
pdomain.Type = router.Domain_RootDomain
|
||||
case RuleTypeRegexp:
|
||||
// check regexp validity to avoid runtime error
|
||||
_, err := regexp.Compile(entry.Value)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid regexp in list %s: %s", l.Name, entry.Value)
|
||||
}
|
||||
site.Domain = append(site.Domain, &router.Domain{
|
||||
Type: router.Domain_Regex,
|
||||
Value: entry.Value,
|
||||
Attribute: entry.Attrs,
|
||||
})
|
||||
|
||||
pdomain.Type = router.Domain_Regex
|
||||
case RuleTypeKeyword:
|
||||
site.Domain = append(site.Domain, &router.Domain{
|
||||
Type: router.Domain_Plain,
|
||||
Value: entry.Value,
|
||||
Attribute: entry.Attrs,
|
||||
})
|
||||
|
||||
pdomain.Type = router.Domain_Plain
|
||||
case RuleTypeFullDomain:
|
||||
site.Domain = append(site.Domain, &router.Domain{
|
||||
Type: router.Domain_Full,
|
||||
Value: entry.Value,
|
||||
Attribute: entry.Attrs,
|
||||
})
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown domain type: %s", entry.Type)
|
||||
pdomain.Type = router.Domain_Full
|
||||
}
|
||||
site.Domain = append(site.Domain, pdomain)
|
||||
}
|
||||
return site, nil
|
||||
}
|
||||
|
||||
func exportPlainTextList(list []string, refName string, pl *ParsedList) {
|
||||
for _, listName := range list {
|
||||
if strings.EqualFold(refName, listName) {
|
||||
if err := pl.toPlainText(strings.ToLower(refName)); err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("'%s' has been generated successfully.\n", listName)
|
||||
}
|
||||
func writePlainList(exportedName string) error {
|
||||
targetList, exist := finalMap[strings.ToUpper(exportedName)]
|
||||
if !exist || len(targetList) == 0 {
|
||||
return fmt.Errorf("'%s' list does not exist or is empty.", exportedName)
|
||||
}
|
||||
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName) + ".txt"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
w := bufio.NewWriter(file)
|
||||
for _, entry := range targetList {
|
||||
fmt.Fprintln(w, entry.Plain)
|
||||
}
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func removeComment(line string) string {
|
||||
idx := strings.Index(line, "#")
|
||||
if idx == -1 {
|
||||
return line
|
||||
}
|
||||
return strings.TrimSpace(line[:idx])
|
||||
}
|
||||
func parseEntry(line string) (Entry, error) {
|
||||
var entry Entry
|
||||
parts := strings.Fields(line)
|
||||
|
||||
func parseDomain(domain string, entry *Entry) error {
|
||||
kv := strings.Split(domain, ":")
|
||||
// Parse type and value
|
||||
rawTypeVal := parts[0]
|
||||
kv := strings.Split(rawTypeVal, ":")
|
||||
if len(kv) == 1 {
|
||||
entry.Type = RuleTypeDomain
|
||||
entry.Value = strings.ToLower(kv[0])
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(kv) == 2 {
|
||||
entry.Type = RuleTypeDomain // Default type
|
||||
entry.Value = strings.ToLower(rawTypeVal)
|
||||
} else if len(kv) == 2 {
|
||||
entry.Type = strings.ToLower(kv[0])
|
||||
|
||||
if strings.EqualFold(entry.Type, RuleTypeRegexp) {
|
||||
if entry.Type == RuleTypeRegexp {
|
||||
entry.Value = kv[1]
|
||||
} else {
|
||||
entry.Value = strings.ToLower(kv[1])
|
||||
}
|
||||
|
||||
return nil
|
||||
} else {
|
||||
return entry, fmt.Errorf("invalid format: %s", line)
|
||||
}
|
||||
|
||||
return fmt.Errorf("invalid format: %s", domain)
|
||||
}
|
||||
|
||||
func parseAttribute(attr string) (*router.Domain_Attribute, error) {
|
||||
var attribute router.Domain_Attribute
|
||||
if len(attr) == 0 || attr[0] != '@' {
|
||||
return &attribute, fmt.Errorf("invalid attribute: %s", attr)
|
||||
// Check type and value
|
||||
if !TypeChecker.MatchString(entry.Type) {
|
||||
return entry, fmt.Errorf("invalid type: %s", entry.Type)
|
||||
}
|
||||
|
||||
attribute.Key = strings.ToLower(attr[1:]) // Trim attribute prefix `@` character
|
||||
attribute.TypedValue = &router.Domain_Attribute_BoolValue{BoolValue: true}
|
||||
return &attribute, nil
|
||||
}
|
||||
|
||||
func parseEntry(line string) (Entry, error) {
|
||||
line = strings.TrimSpace(line)
|
||||
parts := strings.Split(line, " ")
|
||||
|
||||
var entry Entry
|
||||
if len(parts) == 0 {
|
||||
return entry, fmt.Errorf("empty entry")
|
||||
}
|
||||
|
||||
if err := parseDomain(parts[0], &entry); err != nil {
|
||||
return entry, err
|
||||
}
|
||||
|
||||
for i := 1; i < len(parts); i++ {
|
||||
attr, err := parseAttribute(parts[i])
|
||||
if err != nil {
|
||||
return entry, err
|
||||
if entry.Type == RuleTypeRegexp {
|
||||
if _, err := regexp.Compile(entry.Value); err != nil {
|
||||
return entry, fmt.Errorf("invalid regexp: %s", entry.Value)
|
||||
}
|
||||
entry.Attrs = append(entry.Attrs, attr)
|
||||
} else if !ValueChecker.MatchString(entry.Value) {
|
||||
return entry, fmt.Errorf("invalid value: %s", entry.Value)
|
||||
}
|
||||
|
||||
// Parse/Check attributes and affiliations
|
||||
for _, part := range parts[1:] {
|
||||
if strings.HasPrefix(part, "@") {
|
||||
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
||||
if !AttrChecker.MatchString(attr) {
|
||||
return entry, fmt.Errorf("invalid attribute key: %s", attr)
|
||||
}
|
||||
entry.Attrs = append(entry.Attrs, attr)
|
||||
} else if strings.HasPrefix(part, "&") {
|
||||
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
|
||||
if !SiteChecker.MatchString(aff) {
|
||||
return entry, fmt.Errorf("invalid affiliation key: %s", aff)
|
||||
}
|
||||
entry.Affs = append(entry.Affs, aff)
|
||||
} else {
|
||||
return entry, fmt.Errorf("invalid attribute/affiliation: %s", part)
|
||||
}
|
||||
}
|
||||
// Sort attributes
|
||||
slices.Sort(entry.Attrs)
|
||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||
entry.Plain = entry.Type + ":" + entry.Value
|
||||
if len(entry.Attrs) != 0 {
|
||||
entry.Plain = entry.Plain + ":@" + strings.Join(entry.Attrs, ",@")
|
||||
}
|
||||
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
func Load(path string) (*List, error) {
|
||||
func loadData(path string) error {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
list := &List{
|
||||
Name: strings.ToUpper(filepath.Base(path)),
|
||||
listName := strings.ToUpper(filepath.Base(path))
|
||||
if !SiteChecker.MatchString(listName) {
|
||||
return fmt.Errorf("invalid list name: %s", listName)
|
||||
}
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineIdx := 0
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
line = removeComment(line)
|
||||
if len(line) == 0 {
|
||||
line := scanner.Text()
|
||||
lineIdx++
|
||||
// Remove comments
|
||||
if idx := strings.Index(line, "#"); idx != -1 {
|
||||
line = line[:idx]
|
||||
}
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
entry, err := parseEntry(line)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return fmt.Errorf("error in %s at line %d: %v", path, lineIdx, err)
|
||||
}
|
||||
list.Entry = append(list.Entry, entry)
|
||||
refMap[listName] = append(refMap[listName], &entry)
|
||||
}
|
||||
|
||||
return list, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func isMatchAttr(Attrs []*router.Domain_Attribute, includeKey string) bool {
|
||||
isMatch := false
|
||||
mustMatch := true
|
||||
matchName := includeKey
|
||||
if strings.HasPrefix(includeKey, "!") {
|
||||
isMatch = true
|
||||
mustMatch = false
|
||||
matchName = strings.TrimLeft(includeKey, "!")
|
||||
func parseList(refName string, refList []*Entry) error {
|
||||
pl, _ := plMap[refName]
|
||||
if pl == nil {
|
||||
pl = &ParsedList{Name: refName}
|
||||
plMap[refName] = pl
|
||||
}
|
||||
|
||||
for _, Attr := range Attrs {
|
||||
attrName := Attr.Key
|
||||
if mustMatch {
|
||||
if matchName == attrName {
|
||||
isMatch = true
|
||||
break
|
||||
for _, entry := range refList {
|
||||
if entry.Type == RuleTypeInclude {
|
||||
if len(entry.Affs) != 0 {
|
||||
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
|
||||
}
|
||||
} else {
|
||||
if matchName == attrName {
|
||||
isMatch = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return isMatch
|
||||
}
|
||||
|
||||
func createIncludeAttrEntrys(list *List, matchAttr *router.Domain_Attribute) []Entry {
|
||||
newEntryList := make([]Entry, 0, len(list.Entry))
|
||||
matchName := matchAttr.Key
|
||||
for _, entry := range list.Entry {
|
||||
matched := isMatchAttr(entry.Attrs, matchName)
|
||||
if matched {
|
||||
newEntryList = append(newEntryList, entry)
|
||||
}
|
||||
}
|
||||
return newEntryList
|
||||
}
|
||||
|
||||
func ParseList(list *List, ref map[string]*List) (*ParsedList, error) {
|
||||
pl := &ParsedList{
|
||||
Name: list.Name,
|
||||
Inclusion: make(map[string]bool),
|
||||
}
|
||||
entryList := list.Entry
|
||||
for {
|
||||
newEntryList := make([]Entry, 0, len(entryList))
|
||||
hasInclude := false
|
||||
for _, entry := range entryList {
|
||||
if entry.Type == RuleTypeInclude {
|
||||
refName := strings.ToUpper(entry.Value)
|
||||
if entry.Attrs != nil {
|
||||
for _, attr := range entry.Attrs {
|
||||
InclusionName := strings.ToUpper(refName + "@" + attr.Key)
|
||||
if pl.Inclusion[InclusionName] {
|
||||
continue
|
||||
}
|
||||
pl.Inclusion[InclusionName] = true
|
||||
|
||||
refList := ref[refName]
|
||||
if refList == nil {
|
||||
return nil, fmt.Errorf("list not found: %s", entry.Value)
|
||||
}
|
||||
attrEntrys := createIncludeAttrEntrys(refList, attr)
|
||||
if len(attrEntrys) != 0 {
|
||||
newEntryList = append(newEntryList, attrEntrys...)
|
||||
}
|
||||
}
|
||||
inc := &Inclusion{Source: strings.ToUpper(entry.Value)}
|
||||
for _, attr := range entry.Attrs {
|
||||
if strings.HasPrefix(attr, "-") {
|
||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
|
||||
} else {
|
||||
InclusionName := refName
|
||||
if pl.Inclusion[InclusionName] {
|
||||
continue
|
||||
}
|
||||
pl.Inclusion[InclusionName] = true
|
||||
refList := ref[refName]
|
||||
if refList == nil {
|
||||
return nil, fmt.Errorf("list not found: %s", entry.Value)
|
||||
}
|
||||
newEntryList = append(newEntryList, refList.Entry...)
|
||||
inc.MustAttrs = append(inc.MustAttrs, attr)
|
||||
}
|
||||
hasInclude = true
|
||||
} else {
|
||||
newEntryList = append(newEntryList, entry)
|
||||
}
|
||||
}
|
||||
entryList = newEntryList
|
||||
if !hasInclude {
|
||||
break
|
||||
pl.Inclusions = append(pl.Inclusions, inc)
|
||||
} else {
|
||||
for _, aff := range entry.Affs {
|
||||
apl, _ := plMap[aff]
|
||||
if apl == nil {
|
||||
apl = &ParsedList{Name: aff}
|
||||
plMap[aff] = apl
|
||||
}
|
||||
apl.Entries = append(apl.Entries, entry)
|
||||
}
|
||||
pl.Entries = append(pl.Entries, entry)
|
||||
}
|
||||
}
|
||||
pl.Entry = entryList
|
||||
return nil
|
||||
}
|
||||
|
||||
return pl, nil
|
||||
func polishList(roughMap *map[string]*Entry) []*Entry {
|
||||
finalList := make([]*Entry, 0, len(*roughMap))
|
||||
queuingList := make([]*Entry, 0, len(*roughMap)) // Domain/full entries without attr
|
||||
domainsMap := make(map[string]bool)
|
||||
for _, entry := range *roughMap {
|
||||
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
||||
case RuleTypeRegexp:
|
||||
finalList = append(finalList, entry)
|
||||
case RuleTypeKeyword:
|
||||
finalList = append(finalList, entry)
|
||||
case RuleTypeDomain:
|
||||
domainsMap[entry.Value] = true
|
||||
if len(entry.Attrs) != 0 {
|
||||
finalList = append(finalList, entry)
|
||||
} else {
|
||||
queuingList = append(queuingList, entry)
|
||||
}
|
||||
case RuleTypeFullDomain:
|
||||
if len(entry.Attrs) != 0 {
|
||||
finalList = append(finalList, entry)
|
||||
} else {
|
||||
queuingList = append(queuingList, entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Remove redundant subdomains for full/domain without attr
|
||||
for _, qentry := range queuingList {
|
||||
isRedundant := false
|
||||
pd := qentry.Value // Parent domain
|
||||
for {
|
||||
idx := strings.Index(pd, ".")
|
||||
if idx == -1 { break }
|
||||
pd = pd[idx+1:] // Go for next parent
|
||||
if !strings.Contains(pd, ".") { break } // Not allow tld to be a parent
|
||||
if domainsMap[pd] {
|
||||
isRedundant = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !isRedundant {
|
||||
finalList = append(finalList, qentry)
|
||||
}
|
||||
}
|
||||
// Sort final entries
|
||||
slices.SortFunc(finalList, func(a, b *Entry) int {
|
||||
return strings.Compare(a.Plain, b.Plain)
|
||||
})
|
||||
return finalList
|
||||
}
|
||||
|
||||
func resolveList(pl *ParsedList) error {
|
||||
if _, pldone := finalMap[pl.Name]; pldone { return nil }
|
||||
|
||||
if cirIncMap[pl.Name] {
|
||||
return fmt.Errorf("circular inclusion in: %s", pl.Name)
|
||||
}
|
||||
cirIncMap[pl.Name] = true
|
||||
defer delete(cirIncMap, pl.Name)
|
||||
|
||||
isMatchAttrFilters := func(entry *Entry, incFilter *Inclusion) bool {
|
||||
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 { return true }
|
||||
if len(entry.Attrs) == 0 { return len(incFilter.MustAttrs) == 0 }
|
||||
|
||||
for _, m := range incFilter.MustAttrs {
|
||||
if !slices.Contains(entry.Attrs, m) { return false }
|
||||
}
|
||||
for _, b := range incFilter.BanAttrs {
|
||||
if slices.Contains(entry.Attrs, b) { return false }
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
||||
for _, dentry := range pl.Entries { // Add direct entries
|
||||
roughMap[dentry.Plain] = dentry
|
||||
}
|
||||
for _, inc := range pl.Inclusions {
|
||||
incPl, exist := plMap[inc.Source]
|
||||
if !exist {
|
||||
return fmt.Errorf("list '%s' includes a non-existent list: '%s'", pl.Name, inc.Source)
|
||||
}
|
||||
if err := resolveList(incPl); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ientry := range finalMap[inc.Source] {
|
||||
if isMatchAttrFilters(ientry, inc) { // Add included entries
|
||||
roughMap[ientry.Plain] = ientry
|
||||
}
|
||||
}
|
||||
}
|
||||
finalMap[pl.Name] = polishList(&roughMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -317,7 +338,7 @@ func main() {
|
||||
dir := *dataPath
|
||||
fmt.Println("Use domain lists in", dir)
|
||||
|
||||
ref := make(map[string]*List)
|
||||
// Generate refMap
|
||||
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -325,18 +346,32 @@ func main() {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
list, err := Load(path)
|
||||
if err != nil {
|
||||
if err := loadData(path); err != nil {
|
||||
return err
|
||||
}
|
||||
ref[list.Name] = list
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
fmt.Println("Failed to loadData:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Generate plMap
|
||||
for refName, refList := range refMap {
|
||||
if err := parseList(refName, refList); err != nil {
|
||||
fmt.Println("Failed to parseList:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate finalMap
|
||||
for _, pl := range plMap {
|
||||
if err := resolveList(pl); err != nil {
|
||||
fmt.Println("Failed to resolveList:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Create output directory if not exist
|
||||
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
|
||||
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
|
||||
@@ -345,55 +380,40 @@ func main() {
|
||||
}
|
||||
}
|
||||
|
||||
protoList := new(router.GeoSiteList)
|
||||
var existList []string
|
||||
for refName, list := range ref {
|
||||
pl, err := ParseList(list, ref)
|
||||
if err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
os.Exit(1)
|
||||
// Export plaintext list
|
||||
if *exportLists != "" {
|
||||
exportedListSlice := strings.Split(*exportLists, ",")
|
||||
for _, exportedList := range exportedListSlice {
|
||||
if err := writePlainList(exportedList); err != nil {
|
||||
fmt.Println("Failed to write list:", err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("list: '%s' has been generated successfully.\n", exportedList)
|
||||
}
|
||||
site, err := pl.toProto()
|
||||
}
|
||||
|
||||
// Generate dat file
|
||||
protoList := new(router.GeoSiteList)
|
||||
for siteName, siteEntries := range finalMap {
|
||||
site, err := makeProtoList(siteName, siteEntries)
|
||||
if err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
protoList.Entry = append(protoList.Entry, site)
|
||||
|
||||
// Flatten and export plaintext list
|
||||
if *exportLists != "" {
|
||||
if existList != nil {
|
||||
exportPlainTextList(existList, refName, pl)
|
||||
} else {
|
||||
exportedListSlice := strings.Split(*exportLists, ",")
|
||||
for _, exportedListName := range exportedListSlice {
|
||||
fileName := filepath.Join(dir, exportedListName)
|
||||
_, err := os.Stat(fileName)
|
||||
if err == nil || os.IsExist(err) {
|
||||
existList = append(existList, exportedListName)
|
||||
} else {
|
||||
fmt.Printf("'%s' list does not exist in '%s' directory.\n", exportedListName, dir)
|
||||
}
|
||||
}
|
||||
if existList != nil {
|
||||
exportPlainTextList(existList, refName, pl)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort protoList so the marshaled list is reproducible
|
||||
sort.SliceStable(protoList.Entry, func(i, j int) bool {
|
||||
return protoList.Entry[i].CountryCode < protoList.Entry[j].CountryCode
|
||||
slices.SortFunc(protoList.Entry, func(a, b *router.GeoSite) int {
|
||||
return strings.Compare(a.CountryCode, b.CountryCode)
|
||||
})
|
||||
|
||||
protoBytes, err := proto.Marshal(protoList)
|
||||
if err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
fmt.Println("Failed to marshal:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
|
||||
fmt.Println("Failed:", err)
|
||||
fmt.Println("Failed to write output:", err)
|
||||
os.Exit(1)
|
||||
} else {
|
||||
fmt.Println(*outputName, "has been generated successfully.")
|
||||
|
||||
Reference in New Issue
Block a user