Compare commits

..

10 Commits

Author SHA1 Message Date
MkQtS
b20cf00e07 Add more cn domains (#3249)
* add growingio

* category-cdn-cn: add dfyun.com.cn

* category-collaborate-cn: add feihengip.com

* category-dev-cn: add aardio.com

* category-education-cn: add biyehome.net

* category-enterprise-query-platform-cn: add xinchacha domains

* category-media-cn: add more domains

* category-social-media-cn: add fanfou.com

* category-wiki-cn: add chaz.fun
2026-02-05 21:32:06 +08:00
jinqiang zhang
027b8b3409 dji: add djigate.com (#3248) 2026-02-05 20:20:39 +08:00
xd DG
535dc789b9 Add geosite:radiko (#3247)
* Add geosite:radiko

* Sort domains and include radiko in category-entertainment

---------

Co-authored-by: terada46 <mizukiloveu@gmail.com>
2026-02-05 17:30:18 +08:00
MkQtS
311b281000 improve codes (#3246) 2026-02-04 15:03:04 +08:00
秋野かえで
bfb35d7b68 split githubcopilot.com to github-copilot (#3245) 2026-02-04 14:34:55 +08:00
深鸣
daf4c10d0c category-entertainment-cn: add anitabi.cn (#3244) 2026-02-04 13:58:38 +08:00
深鸣
a188c2c058 geolocation-!cn: add osmand.net (#3243) 2026-02-04 13:57:46 +08:00
MkQtS
947556aa16 Improve codes (#3242)
* main.go: improve code

* main.go: move refMap from global variable to local

* main.go: allow tld to be a parent domain

* datdump: improve code
2026-02-03 22:38:18 +08:00
susaninz
44de14725e kinopub: add cdn2cdn.com, cdn2site.com, pushbr.com (#3240)
These CDN domains are used by Kinopub for:
- cdn2cdn.com: video streaming CDN
- cdn2site.com: video streaming CDN
- pushbr.com: poster/thumbnail images

Discovered via network traffic analysis on the Kinopub web app.
Without these domains proxied, poster images fail to load.

---------

Co-authored-by: Ivan Slezkin <ivanslezkin@Mac.lan>
2026-02-03 19:04:53 +08:00
sergeevms
c638ec66f0 salesforce: add salesforce-setup.com (#3239) 2026-02-02 23:31:35 +08:00
24 changed files with 208 additions and 156 deletions

View File

@@ -31,11 +31,21 @@ type DomainList struct {
}
func (d *DomainRule) domain2String() string {
dstring := d.Type + ":" + d.Value
if len(d.Attrs) != 0 {
dstring += ":@" + strings.Join(d.Attrs, ",@")
var dstr strings.Builder
dstr.Grow(len(d.Type) + len(d.Value) + 10)
dstr.WriteString(d.Type)
dstr.WriteByte(':')
dstr.WriteString(d.Value)
for i, attr := range d.Attrs {
if i == 0 {
dstr.WriteByte(':')
} else {
dstr.WriteByte(',')
}
dstr.WriteByte('@')
dstr.WriteString(attr)
}
return dstring
return dstr.String()
}
func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
@@ -82,10 +92,10 @@ func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
func exportSite(name string, domainListByName map[string]*DomainList) error {
domainList, ok := domainListByName[strings.ToUpper(name)]
if !ok {
return fmt.Errorf("list '%s' does not exist", name)
return fmt.Errorf("list %q does not exist", name)
}
if len(domainList.Rules) == 0 {
return fmt.Errorf("list '%s' is empty", name)
return fmt.Errorf("list %q is empty", name)
}
file, err := os.Create(filepath.Join(*outputDir, name+".yml"))
if err != nil {
@@ -119,22 +129,16 @@ func exportAll(filename string, domainLists []DomainList) error {
return w.Flush()
}
func main() {
flag.Parse()
// Create output directory if not exist
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
fmt.Println("Failed to create output directory:", mkErr)
os.Exit(1)
}
func run() error {
// Make sure output directory exists
if err := os.MkdirAll(*outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
fmt.Printf("Loading %s...\n", *inputData)
fmt.Printf("loading source data %q...\n", *inputData)
domainLists, domainListByName, err := loadGeosite(*inputData)
if err != nil {
fmt.Println("Failed to loadGeosite:", err)
os.Exit(1)
return fmt.Errorf("failed to loadGeosite: %w", err)
}
var exportListSlice []string
@@ -150,15 +154,24 @@ func main() {
for _, eplistname := range exportListSlice {
if strings.EqualFold(eplistname, "_all_") {
if err := exportAll(filepath.Base(*inputData)+"_plain.yml", domainLists); err != nil {
fmt.Println("Failed to exportAll:", err)
fmt.Printf("failed to exportAll: %v\n", err)
continue
}
} else {
if err := exportSite(eplistname, domainListByName); err != nil {
fmt.Println("Failed to exportSite:", err)
fmt.Printf("failed to exportSite: %v\n", err)
continue
}
}
fmt.Printf("list: '%s' has been exported successfully.\n", eplistname)
fmt.Printf("list: %q has been exported successfully.\n", eplistname)
}
return nil
}
func main() {
flag.Parse()
if err := run(); err != nil {
fmt.Printf("Fatal error: %v\n", err)
os.Exit(1)
}
}

View File

@@ -64,7 +64,6 @@ adservice.sigmob.cn
adtechus.com
adtrue.com
adxprtz.com
assets.growingio.com
cdn.advertserve.com
cdn.banclip.com
cfts1tifqr.com

View File

@@ -3,6 +3,7 @@ include:category-ads
include:adjust
include:clearbit
include:growingio
include:ogury
include:openx
include:pubmatic

View File

@@ -4,6 +4,7 @@ include:cerebras
include:comfy
include:cursor
include:elevenlabs
include:github-copilot
include:google-deepmind
include:groq
include:huggingface

View File

@@ -6,12 +6,14 @@ include:qiniu
include:upai
include:wangsu
## 创世云
# 创世云
chuangcache.com
chuangcdn.com
## FUNCDN
# 大风云CDN
dfyun.com.cn
# FUNCDN
funcdn.com
## 北京知道创宇信息技术股份有限公司
# 北京知道创宇信息技术股份有限公司
jiashule.com
jiasule.com
yunaq.com

View File

@@ -4,6 +4,8 @@
asklink.com
## EasyTier
easytier.cn
## 飞衡HTTP
feihengip.com
## Oray
oray.com
oray.net

View File

@@ -20,6 +20,7 @@ include:tencent-dev
include:ubuntukylin
include:unitychina
aardio.com
jinrishici.com
openvela.com
tipdm.org

View File

@@ -71,6 +71,8 @@ baicizhan.com
baicizhan.org
bczcdn.com
bczeducation.cn
# 毕业之家科研服务平台
biyehome.net
# Burning Vocabulary
burningvocabulary.cn
burningvocabulary.com

View File

@@ -6,3 +6,7 @@ include:tianyancha
qichamao.com
qyyjt.cn
x315.com
# 信查查
xcc.cn
xinchacha.com

View File

@@ -54,6 +54,7 @@ include:pixiv
include:plutotv
include:pocketcasts
include:primevideo
include:radiko
include:roku
include:showtimeanytime
include:sling

View File

@@ -50,6 +50,8 @@ yeshen.com
51zmt.top
# 广东南方新媒体
aisee.tv
# 动画巡礼
anitabi.cn
# 暴风影音
baofeng.com
baofeng.net

View File

@@ -78,6 +78,8 @@ freebuf.com
geekpark.net
# 光明网
gmw.com
# 硅谷网
guigu.org
# 和讯
hexun.com
# 河南广播电视台/大象网
@@ -134,6 +136,9 @@ xinhuanet.com
xinhuaxmt.com
# 维科网
ofweek.com
# PChome电脑之家
pchome.net
pchpic.net
# PConline 太平洋科技
3conline.com
pconline.com.cn

View File

@@ -1,26 +1,29 @@
# This list contains social media platforms inside China mainland.
include:coolapk
include:douban
include:gracg
include:hupu
include:meipian
include:okjike
include:sina @-!cn
include:xiaohongshu
include:yy
include:zhihu
tieba.baidu.com
tieba.com
# 杭州蛋蛋语音科技有限公司
dandan818.com
dandanvoice.com
# 脉脉
maimai.cn
taou.com
# 知识星球
zsxq.com
# This list contains social media platforms inside China mainland.
include:coolapk
include:douban
include:gracg
include:hupu
include:meipian
include:okjike
include:sina @-!cn
include:xiaohongshu
include:yy
include:zhihu
tieba.baidu.com
tieba.com
# 杭州蛋蛋语音科技有限公司
dandan818.com
dandanvoice.com
# 饭否
fanfou.com
# 脉脉
maimai.cn
taou.com
# 知识星球
zsxq.com

View File

@@ -4,6 +4,9 @@ mbalib.com
sec-wiki.com
shidianbaike.com
# 叉子周 手机博物馆
chaz.fun
# huijiwiki
huijistatic.com
huijiwiki.com

View File

@@ -2,6 +2,7 @@ dji.com
dji.ink
dji.net
djicdn.com
djigate.com
djiits.com
djiops.com
djiservice.org

View File

@@ -271,6 +271,8 @@ ldoceonline.com
immersivetranslate.com # 沉浸式翻译 (国际版)
## OriginLab (Graphing for Science and Engineering)
originlab.com
## OsmAnd
osmand.net
# Software development
include:category-dev

View File

@@ -23,6 +23,7 @@ include:category-social-media-cn
# Advertisment & Analytics
include:getui
include:growingio
include:jiguang
# 神策数据
@@ -663,7 +664,6 @@ ycrx360.com
9ht.com
9xu.com
a9vg.com
aardio.com # 皖ICP备09012014号
acetaffy.club # 粤ICP备2022042304号
adxvip.com
afzhan.com
@@ -719,7 +719,6 @@ bio-equip.com
biodiscover.com
bishijie.com
bitecoin.com
biyehome.net
bjcathay.com
bobo.com
bojianger.com
@@ -743,7 +742,6 @@ chachaba.com
changba.com
chaojituzi.net
chashebao.com
chaz.fun # 粤ICP备2022001828号-2
chazhengla.com
chazidian.com
che168.com
@@ -879,7 +877,6 @@ fanli.com
fangxiaoer.com
fanxian.com
fastapi.net
feihengip.com # 粤ICP备2023115330号-1
feihuo.com
feiniaomy.com
fengniao.com
@@ -903,7 +900,6 @@ gdrc.com
geektool.top # 极客Tool 蜀ICP备2024086015号-2
gezida.com
gfan.com
giocdn.com
globrand.com
gm86.com
gmz88.com
@@ -914,7 +910,6 @@ gongxiangcj.com
goosail.com
goufw.com
greenxiazai.com
growingio.com
gtags.net
guabu.com
guaiguai.com
@@ -922,7 +917,6 @@ guanaitong.com
guanhaobio.com
guanyierp.com # 沪ICP备14043335号-8
gucheng.com
guigu.org
guoxinmac.com
gupzs.com
gushiwen.org
@@ -1178,7 +1172,6 @@ p5w.net
paipaibang.com
paopaoche.net
pc6.com
pchome.net
pcpop.com
peccn.com
pgzs.com

View File

@@ -1,4 +1,5 @@
include:github-ads
include:github-copilot
include:npmjs
atom.io
@@ -14,7 +15,6 @@ github.dev
github.io
githubapp.com
githubassets.com
githubcopilot.com
githubhackathon.com
githubnext.com
githubpreview.dev

1
data/github-copilot Normal file
View File

@@ -0,0 +1 @@
githubcopilot.com

7
data/growingio Normal file
View File

@@ -0,0 +1,7 @@
# 北京易数科技
datayi.cn
gio.ren
giocdn.com
growin.cn
growingio.cn
growingio.com

View File

@@ -1,4 +1,3 @@
cdn-service.space
kino.pub
kinopub.online
@@ -7,4 +6,9 @@ gfw.ovh # sub domains mirror
mos-gorsud.co # kinopub domain to generate a mirror site through gfw.ovh
# kinopub CDN servers
cdn-service.space
cdn2cdn.com
cdn2site.com
pushbr.com # poster images CDN
regexp:(\w+)-static-[0-9]+\.cdntogo\.net$

5
data/radiko Normal file
View File

@@ -0,0 +1,5 @@
# radiko official access and streaming domains
radiko-cf.com
radiko.jp
smartstream.ne.jp

View File

@@ -24,6 +24,7 @@ pardot.com
quotable.com
radian6.com
relateiq.com
salesforce-setup.com
salesforce.com
salesforce.org
salesforceiq.com

189
main.go
View File

@@ -23,7 +23,6 @@ var (
)
var (
refMap = make(map[string][]*Entry)
plMap = make(map[string]*ParsedList)
finalMap = make(map[string][]*Entry)
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
@@ -78,18 +77,14 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
return site, nil
}
func writePlainList(exportedName string) error {
targetList, exist := finalMap[strings.ToUpper(exportedName)]
if !exist || len(targetList) == 0 {
return fmt.Errorf("list %q does not exist or is empty.", exportedName)
}
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName)+".txt"))
func writePlainList(listname string, entries []*Entry) error {
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(listname)+".txt"))
if err != nil {
return err
}
defer file.Close()
w := bufio.NewWriter(file)
for _, entry := range targetList {
for _, entry := range entries {
fmt.Fprintln(w, entry.Plain)
}
return w.Flush()
@@ -99,7 +94,7 @@ func parseEntry(line string) (Entry, error) {
var entry Entry
parts := strings.Fields(line)
if len(parts) == 0 {
return entry, fmt.Errorf("empty line: %q", line)
return entry, fmt.Errorf("empty line")
}
// Parse type and value
@@ -138,7 +133,7 @@ func parseEntry(line string) (Entry, error) {
}
}
// Parse/Check attributes and affiliations
// Parse attributes and affiliations
for _, part := range parts[1:] {
if strings.HasPrefix(part, "@") {
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
@@ -159,10 +154,21 @@ func parseEntry(line string) (Entry, error) {
// Sort attributes
slices.Sort(entry.Attrs)
// Formated plain entry: type:domain.tld:@attr1,@attr2
entry.Plain = entry.Type + ":" + entry.Value
if len(entry.Attrs) != 0 {
entry.Plain = entry.Plain + ":@" + strings.Join(entry.Attrs, ",@")
var plain strings.Builder
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
plain.WriteString(entry.Type)
plain.WriteByte(':')
plain.WriteString(entry.Value)
for i, attr := range entry.Attrs {
if i == 0 {
plain.WriteByte(':')
} else {
plain.WriteByte(',')
}
plain.WriteByte('@')
plain.WriteString(attr)
}
entry.Plain = plain.String()
return entry, nil
}
@@ -200,25 +206,21 @@ func validateSiteName(name string) bool {
return true
}
func loadData(path string) error {
func loadData(path string) ([]*Entry, error) {
file, err := os.Open(path)
if err != nil {
return err
return nil, err
}
defer file.Close()
listName := strings.ToUpper(filepath.Base(path))
if !validateSiteName(listName) {
return fmt.Errorf("invalid list name: %s", listName)
}
var entries []*Entry
scanner := bufio.NewScanner(file)
lineIdx := 0
for scanner.Scan() {
line := scanner.Text()
lineIdx++
// Remove comments
if idx := strings.Index(line, "#"); idx != -1 {
line = line[:idx]
line = line[:idx] // Remove comments
}
line = strings.TrimSpace(line)
if line == "" {
@@ -226,11 +228,11 @@ func loadData(path string) error {
}
entry, err := parseEntry(line)
if err != nil {
return fmt.Errorf("error in %s at line %d: %v", path, lineIdx, err)
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
}
refMap[listName] = append(refMap[listName], &entry)
entries = append(entries, &entry)
}
return nil
return entries, nil
}
func parseList(refName string, refList []*Entry) error {
@@ -242,7 +244,7 @@ func parseList(refName string, refList []*Entry) error {
for _, entry := range refList {
if entry.Type == dlc.RuleTypeInclude {
if len(entry.Affs) != 0 {
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
}
inc := &Inclusion{Source: entry.Value}
for _, attr := range entry.Attrs {
@@ -268,11 +270,31 @@ func parseList(refName string, refList []*Entry) error {
return nil
}
func polishList(roughMap *map[string]*Entry) []*Entry {
finalList := make([]*Entry, 0, len(*roughMap))
queuingList := make([]*Entry, 0, len(*roughMap)) // Domain/full entries without attr
func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool {
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
return true
}
if len(entry.Attrs) == 0 {
return len(incFilter.MustAttrs) == 0
}
for _, m := range incFilter.MustAttrs {
if !slices.Contains(entry.Attrs, m) {
return false
}
}
for _, b := range incFilter.BanAttrs {
if slices.Contains(entry.Attrs, b) {
return false
}
}
return true
}
func polishList(roughMap map[string]*Entry) []*Entry {
finalList := make([]*Entry, 0, len(roughMap))
queuingList := make([]*Entry, 0, len(roughMap)) // Domain/full entries without attr
domainsMap := make(map[string]bool)
for _, entry := range *roughMap {
for _, entry := range roughMap {
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
case dlc.RuleTypeRegexp:
finalList = append(finalList, entry)
@@ -306,9 +328,6 @@ func polishList(roughMap *map[string]*Entry) []*Entry {
break
}
pd = pd[idx+1:] // Go for next parent
if !strings.Contains(pd, ".") {
break
} // Not allow tld to be a parent
if domainsMap[pd] {
isRedundant = true
break
@@ -331,32 +350,11 @@ func resolveList(pl *ParsedList) error {
}
if cirIncMap[pl.Name] {
return fmt.Errorf("circular inclusion in: %s", pl.Name)
return fmt.Errorf("circular inclusion in: %q", pl.Name)
}
cirIncMap[pl.Name] = true
defer delete(cirIncMap, pl.Name)
isMatchAttrFilters := func(entry *Entry, incFilter *Inclusion) bool {
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
return true
}
if len(entry.Attrs) == 0 {
return len(incFilter.MustAttrs) == 0
}
for _, m := range incFilter.MustAttrs {
if !slices.Contains(entry.Attrs, m) {
return false
}
}
for _, b := range incFilter.BanAttrs {
if slices.Contains(entry.Attrs, b) {
return false
}
}
return true
}
roughMap := make(map[string]*Entry) // Avoid basic duplicates
for _, dentry := range pl.Entries { // Add direct entries
roughMap[dentry.Plain] = dentry
@@ -375,80 +373,75 @@ func resolveList(pl *ParsedList) error {
}
}
}
finalMap[pl.Name] = polishList(&roughMap)
finalMap[pl.Name] = polishList(roughMap)
return nil
}
func main() {
flag.Parse()
func run() error {
dir := *dataPath
fmt.Println("Use domain lists in", dir)
fmt.Printf("using domain lists data in %q\n", dir)
// Generate refMap
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
refMap := make(map[string][]*Entry)
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
return nil
}
if err := loadData(path); err != nil {
return err
listName := strings.ToUpper(filepath.Base(path))
if !validateSiteName(listName) {
return fmt.Errorf("invalid list name: %q", listName)
}
return nil
refMap[listName], err = loadData(path)
return err
})
if err != nil {
fmt.Println("Failed to loadData:", err)
os.Exit(1)
return fmt.Errorf("failed to loadData: %w", err)
}
// Generate plMap
for refName, refList := range refMap {
if err := parseList(refName, refList); err != nil {
fmt.Println("Failed to parseList:", err)
os.Exit(1)
return fmt.Errorf("failed to parseList %q: %w", refName, err)
}
}
// Generate finalMap
for _, pl := range plMap {
for plname, pl := range plMap {
if err := resolveList(pl); err != nil {
fmt.Println("Failed to resolveList:", err)
os.Exit(1)
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
}
}
// Create output directory if not exist
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
fmt.Println("Failed to create output directory:", mkErr)
os.Exit(1)
}
// Make sure output directory exists
if err := os.MkdirAll(*outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
// Export plaintext list
var exportListSlice []string
for raw := range strings.SplitSeq(*exportLists, ",") {
if trimmed := strings.TrimSpace(raw); trimmed != "" {
exportListSlice = append(exportListSlice, trimmed)
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
if epList := strings.TrimSpace(rawEpList); epList != "" {
entries, exist := finalMap[strings.ToUpper(epList)]
if !exist || len(entries) == 0 {
fmt.Printf("list %q does not exist or is empty\n", epList)
continue
}
if err := writePlainList(epList, entries); err != nil {
fmt.Printf("failed to write list %q: %v\n", epList, err)
continue
}
fmt.Printf("list %q has been generated successfully.\n", epList)
}
}
for _, exportList := range exportListSlice {
if err := writePlainList(exportList); err != nil {
fmt.Println("Failed to write list:", err)
continue
}
fmt.Printf("list %q has been generated successfully.\n", exportList)
}
// Generate dat file
protoList := new(router.GeoSiteList)
for siteName, siteEntries := range finalMap {
site, err := makeProtoList(siteName, siteEntries)
if err != nil {
fmt.Println("Failed to makeProtoList:", err)
os.Exit(1)
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
}
protoList.Entry = append(protoList.Entry, site)
}
@@ -459,13 +452,19 @@ func main() {
protoBytes, err := proto.Marshal(protoList)
if err != nil {
fmt.Println("Failed to marshal:", err)
os.Exit(1)
return fmt.Errorf("failed to marshal: %w", err)
}
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
fmt.Println("Failed to write output:", err)
return fmt.Errorf("failed to write output: %w", err)
}
fmt.Printf("%q has been generated successfully.\n", *outputName)
return nil
}
func main() {
flag.Parse()
if err := run(); err != nil {
fmt.Printf("Fatal error: %v\n", err)
os.Exit(1)
} else {
fmt.Println(*outputName, "has been generated successfully.")
}
}