Compare commits

...

11 Commits

Author SHA1 Message Date
MkQtS
947556aa16 Improve codes (#3242)
* main.go: improve code

* main.go: move refMap from global variable to local

* main.go: allow tld to be a parent domain

* datdump: improve code
2026-02-03 22:38:18 +08:00
susaninz
44de14725e kinopub: add cdn2cdn.com, cdn2site.com, pushbr.com (#3240)
These CDN domains are used by Kinopub for:
- cdn2cdn.com: video streaming CDN
- cdn2site.com: video streaming CDN
- pushbr.com: poster/thumbnail images

Discovered via network traffic analysis on the Kinopub web app.
Without these domains proxied, poster images fail to load.

---------

Co-authored-by: Ivan Slezkin <ivanslezkin@Mac.lan>
2026-02-03 19:04:53 +08:00
sergeevms
c638ec66f0 salesforce: add salesforce-setup.com (#3239) 2026-02-02 23:31:35 +08:00
susaninz
4c8b1438f8 kinopub: add cdn-service.space (#3220)
This domain is used by the Kinopub Android TV app for version checking.
Without it, the app hangs on startup when accessed from regions where
this domain is blocked.

Discovered during network traffic analysis on 2026-01-27.
2026-02-02 23:15:54 +08:00
Emik
3399285ea9 add pjsekai.sega.jp to projectsekai (#3236) 2026-02-01 21:35:51 +08:00
⑨bingyin
62346cf6b7 Add bsappapi.com to Binance (#3235) 2026-02-01 21:30:20 +08:00
jinqiang zhang
8dee321846 qcloud: add edgeone.cool (#3237) 2026-02-01 21:28:10 +08:00
fernvenue
b117cf851f Add packages.microsoft.com to microsoft-dev. (#3234) 2026-02-01 11:58:25 +08:00
jinqiang zhang
0b6606758d add louisvuitton (#3233) 2026-01-31 18:04:21 +08:00
Blackteahamburger
fcf9c67d83 category-education-cn: add zjzs.net (#3232) 2026-01-30 19:20:41 +08:00
MkQtS
56e0b47c73 Clean up ad lists (#3231)
* category-ads-all: include adjust

* category-ads-all: include clearbit

* category-ads-all: include ogury

* category-ads-all: include openx

* category-ads-all: include pubmatic

and remove pubmatic-ads

* category-ads-all: include segment

* category-ads-all: include supersonic

* geolocation-cn: remove the inclusion of umeng

it's included in alibaba

* add unitychina

* remove unity-ads

use unity@ads or unitychina@ads instead
2026-01-30 12:10:37 +08:00
25 changed files with 151 additions and 132 deletions

View File

@@ -31,11 +31,13 @@ type DomainList struct {
}
func (d *DomainRule) domain2String() string {
dstring := d.Type + ":" + d.Value
var dstr strings.Builder
dstr.Grow(len(d.Type) + len(d.Value) + 10)
fmt.Fprintf(&dstr, "%s:%s", d.Type, d.Value)
if len(d.Attrs) != 0 {
dstring += ":@" + strings.Join(d.Attrs, ",@")
fmt.Fprintf(&dstr, ":@%s", strings.Join(d.Attrs, ",@"))
}
return dstring
return dstr.String()
}
func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
@@ -82,10 +84,10 @@ func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
func exportSite(name string, domainListByName map[string]*DomainList) error {
domainList, ok := domainListByName[strings.ToUpper(name)]
if !ok {
return fmt.Errorf("list '%s' does not exist", name)
return fmt.Errorf("list %q does not exist", name)
}
if len(domainList.Rules) == 0 {
return fmt.Errorf("list '%s' is empty", name)
return fmt.Errorf("list %q is empty", name)
}
file, err := os.Create(filepath.Join(*outputDir, name+".yml"))
if err != nil {
@@ -119,22 +121,16 @@ func exportAll(filename string, domainLists []DomainList) error {
return w.Flush()
}
func main() {
flag.Parse()
// Create output directory if not exist
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
fmt.Println("Failed to create output directory:", mkErr)
os.Exit(1)
}
func run() error {
// Make sure output directory exists
if err := os.MkdirAll(*outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
fmt.Printf("Loading %s...\n", *inputData)
fmt.Printf("loading source data %q...\n", *inputData)
domainLists, domainListByName, err := loadGeosite(*inputData)
if err != nil {
fmt.Println("Failed to loadGeosite:", err)
os.Exit(1)
return fmt.Errorf("failed to loadGeosite: %w", err)
}
var exportListSlice []string
@@ -150,15 +146,24 @@ func main() {
for _, eplistname := range exportListSlice {
if strings.EqualFold(eplistname, "_all_") {
if err := exportAll(filepath.Base(*inputData)+"_plain.yml", domainLists); err != nil {
fmt.Println("Failed to exportAll:", err)
fmt.Printf("failed to exportAll: %v\n", err)
continue
}
} else {
if err := exportSite(eplistname, domainListByName); err != nil {
fmt.Println("Failed to exportSite:", err)
fmt.Printf("failed to exportSite: %v\n", err)
continue
}
}
fmt.Printf("list: '%s' has been exported successfully.\n", eplistname)
fmt.Printf("list: %q has been exported successfully.\n", eplistname)
}
return nil
}
func main() {
flag.Parse()
if err := run(); err != nil {
fmt.Printf("Fatal error: %v\n", err)
os.Exit(1)
}
}

View File

@@ -1,4 +1,4 @@
adjust.com @ads
adjust.net.in @ads
adjust.io @ads
adjust.net.in @ads
adjust.world @ads

View File

@@ -31,6 +31,7 @@ binanceapi.com
binanceru.net
bnbstatic.com
bntrace.com
bsappapi.com
nftstatic.com
# saas

View File

@@ -1,7 +1,6 @@
# This file contains domains that clearly serving ads
include:acfun-ads
include:adjust-ads
include:adobe-ads
include:alibaba-ads
include:amazon-ads
@@ -10,7 +9,6 @@ include:baidu-ads
include:bytedance-ads
include:category-ads-ir
include:cctv @ads
include:clearbit-ads
include:disney @ads
include:dmm-ads
include:duolingo-ads
@@ -26,28 +24,24 @@ include:letv-ads
include:meta-ads
include:microsoft-ads
include:netease-ads
include:ogury-ads
include:ookla-speedtest-ads
include:openai @ads
include:openx-ads
include:picacg @ads
include:pikpak @ads
include:pixiv @ads
include:pubmatic-ads
include:qihoo360-ads
include:samsung @ads
include:segment-ads
include:sina-ads
include:snap @ads
include:sohu-ads
include:spotify-ads
include:supersonic-ads
include:television-ads
include:tencent-ads
include:tendcloud @ads
include:twitter @ads
include:umeng-ads
include:unity-ads
include:unity @ads
include:unitychina @ads
include:xhamster-ads
include:xiaomi-ads
include:ximalaya-ads

View File

@@ -1,6 +1,13 @@
# This file contains domains of all ads providers, including both the domains that serves ads, and the domains of providers themselves.
include:category-ads
include:adjust
include:clearbit
include:ogury
include:openx
include:pubmatic
include:segment
include:supersonic
include:taboola
1rx.io @ads
@@ -21,7 +28,6 @@ lijit.com @ads
mfadsrvr.com @ads
mgid.com @ads
ns1p.net @ads
pubmatic.com @ads
sigmob.com @ads
spotxchange.com @ads
unimhk.com @ads

View File

@@ -48,6 +48,7 @@ include:kakao
include:kaspersky
include:lg
include:logitech
include:louisvuitton
include:mailru-group
include:meta
include:microsoft

View File

@@ -18,6 +18,7 @@ include:segmentfault
include:sxl
include:tencent-dev
include:ubuntukylin
include:unitychina
jinrishici.com
openvela.com

View File

@@ -142,3 +142,5 @@ ystbds.com
zhan.com
# 智慧树
zhihuishu.com
# 浙江省教育考试院
zjzs.net

View File

@@ -24,7 +24,6 @@ include:category-social-media-cn
# Advertisment & Analytics
include:getui
include:jiguang
include:umeng
# 神策数据
sensorsdata.cn

View File

@@ -6,4 +6,9 @@ gfw.ovh # sub domains mirror
mos-gorsud.co # kinopub domain to generate a mirror site through gfw.ovh
# kinopub CDN servers
cdn-service.space
cdn2cdn.com
cdn2site.com
pushbr.com # poster images CDN
regexp:(\w+)-static-[0-9]+\.cdntogo\.net$

5
data/louisvuitton Normal file
View File

@@ -0,0 +1,5 @@
louisvuitton.cn @cn
louisvuitton.com
lvcampaign.com @cn
full:tp.louisvuitton.com @cn

View File

@@ -60,6 +60,7 @@ full:default.exp-tas.com
full:developer.microsoft.com
full:download.visualstudio.microsoft.com
full:dtlgalleryint.cloudapp.net
full:packages.microsoft.com
full:poshtestgallery.cloudapp.net
full:psg-int-centralus.cloudapp.net
full:psg-int-eastus.cloudapp.net

View File

@@ -1,3 +1,3 @@
ogury.co @ads
ogury.com @ads
presage.io @ads
ogury.co @ads

View File

@@ -1 +1,2 @@
sekai.colorfulpalette.org
pjsekai.sega.jp

View File

@@ -2,5 +2,3 @@
pubmatic.com
pubmatic.co.jp
include:pubmatic-ads

View File

@@ -1 +0,0 @@
ads.pubmatic.com @ads

View File

@@ -44,6 +44,7 @@ dnsv1.com.cn
dothework.cn
ectencent.cn
ectencent.com.cn
edgeone.cool
edgeonedy1.com
essurl.com
exmailgz.com

View File

@@ -24,6 +24,7 @@ pardot.com
quotable.com
radian6.com
relateiq.com
salesforce-setup.com
salesforce.com
salesforce.org
salesforceiq.com

View File

@@ -1,4 +1,5 @@
ssacdn.com @ads
supersonic.com @ads
supersonicads.com @ads
ssacdn.com @ads
supersonicads-a.akamaihd.net @ads

View File

@@ -1,4 +1,6 @@
unity.com
unity3d.com
include:unity-ads
# Ads/tracking
iads.unity3d.com @ads
unityads.unity3d.com @ads

View File

@@ -1,6 +1,11 @@
# 优三缔 / 优美缔 / 团结引擎
u3d.cn
unity.cn
unitychina.cn
# Ads/tracking
ads.unitychina.cn @ads
splash-ads.cdn.unity.cn @ads
splash-ads.unitychina.cn @ads
unityads.unity.cn @ads
unityads.unity3d.com @ads
unityads.unitychina.cn @ads

179
main.go
View File

@@ -23,7 +23,6 @@ var (
)
var (
refMap = make(map[string][]*Entry)
plMap = make(map[string]*ParsedList)
finalMap = make(map[string][]*Entry)
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
@@ -78,18 +77,14 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
return site, nil
}
func writePlainList(exportedName string) error {
targetList, exist := finalMap[strings.ToUpper(exportedName)]
if !exist || len(targetList) == 0 {
return fmt.Errorf("list %q does not exist or is empty.", exportedName)
}
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName)+".txt"))
func writePlainList(listname string, entries []*Entry) error {
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(listname)+".txt"))
if err != nil {
return err
}
defer file.Close()
w := bufio.NewWriter(file)
for _, entry := range targetList {
for _, entry := range entries {
fmt.Fprintln(w, entry.Plain)
}
return w.Flush()
@@ -99,7 +94,7 @@ func parseEntry(line string) (Entry, error) {
var entry Entry
parts := strings.Fields(line)
if len(parts) == 0 {
return entry, fmt.Errorf("empty line: %q", line)
return entry, fmt.Errorf("empty line")
}
// Parse type and value
@@ -138,7 +133,7 @@ func parseEntry(line string) (Entry, error) {
}
}
// Parse/Check attributes and affiliations
// Parse attributes and affiliations
for _, part := range parts[1:] {
if strings.HasPrefix(part, "@") {
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
@@ -159,10 +154,13 @@ func parseEntry(line string) (Entry, error) {
// Sort attributes
slices.Sort(entry.Attrs)
// Formated plain entry: type:domain.tld:@attr1,@attr2
entry.Plain = entry.Type + ":" + entry.Value
var plain strings.Builder
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
fmt.Fprintf(&plain, "%s:%s", entry.Type, entry.Value)
if len(entry.Attrs) != 0 {
entry.Plain = entry.Plain + ":@" + strings.Join(entry.Attrs, ",@")
fmt.Fprintf(&plain, ":@%s", strings.Join(entry.Attrs, ",@"))
}
entry.Plain = plain.String()
return entry, nil
}
@@ -200,25 +198,21 @@ func validateSiteName(name string) bool {
return true
}
func loadData(path string) error {
func loadData(path string) ([]*Entry, error) {
file, err := os.Open(path)
if err != nil {
return err
return nil, err
}
defer file.Close()
listName := strings.ToUpper(filepath.Base(path))
if !validateSiteName(listName) {
return fmt.Errorf("invalid list name: %s", listName)
}
var entries []*Entry
scanner := bufio.NewScanner(file)
lineIdx := 0
for scanner.Scan() {
line := scanner.Text()
lineIdx++
// Remove comments
if idx := strings.Index(line, "#"); idx != -1 {
line = line[:idx]
line = line[:idx] // Remove comments
}
line = strings.TrimSpace(line)
if line == "" {
@@ -226,11 +220,11 @@ func loadData(path string) error {
}
entry, err := parseEntry(line)
if err != nil {
return fmt.Errorf("error in %s at line %d: %v", path, lineIdx, err)
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
}
refMap[listName] = append(refMap[listName], &entry)
entries = append(entries, &entry)
}
return nil
return entries, nil
}
func parseList(refName string, refList []*Entry) error {
@@ -242,7 +236,7 @@ func parseList(refName string, refList []*Entry) error {
for _, entry := range refList {
if entry.Type == dlc.RuleTypeInclude {
if len(entry.Affs) != 0 {
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
}
inc := &Inclusion{Source: entry.Value}
for _, attr := range entry.Attrs {
@@ -268,11 +262,31 @@ func parseList(refName string, refList []*Entry) error {
return nil
}
func polishList(roughMap *map[string]*Entry) []*Entry {
finalList := make([]*Entry, 0, len(*roughMap))
queuingList := make([]*Entry, 0, len(*roughMap)) // Domain/full entries without attr
func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool {
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
return true
}
if len(entry.Attrs) == 0 {
return len(incFilter.MustAttrs) == 0
}
for _, m := range incFilter.MustAttrs {
if !slices.Contains(entry.Attrs, m) {
return false
}
}
for _, b := range incFilter.BanAttrs {
if slices.Contains(entry.Attrs, b) {
return false
}
}
return true
}
func polishList(roughMap map[string]*Entry) []*Entry {
finalList := make([]*Entry, 0, len(roughMap))
queuingList := make([]*Entry, 0, len(roughMap)) // Domain/full entries without attr
domainsMap := make(map[string]bool)
for _, entry := range *roughMap {
for _, entry := range roughMap {
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
case dlc.RuleTypeRegexp:
finalList = append(finalList, entry)
@@ -306,9 +320,6 @@ func polishList(roughMap *map[string]*Entry) []*Entry {
break
}
pd = pd[idx+1:] // Go for next parent
if !strings.Contains(pd, ".") {
break
} // Not allow tld to be a parent
if domainsMap[pd] {
isRedundant = true
break
@@ -331,32 +342,11 @@ func resolveList(pl *ParsedList) error {
}
if cirIncMap[pl.Name] {
return fmt.Errorf("circular inclusion in: %s", pl.Name)
return fmt.Errorf("circular inclusion in: %q", pl.Name)
}
cirIncMap[pl.Name] = true
defer delete(cirIncMap, pl.Name)
isMatchAttrFilters := func(entry *Entry, incFilter *Inclusion) bool {
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
return true
}
if len(entry.Attrs) == 0 {
return len(incFilter.MustAttrs) == 0
}
for _, m := range incFilter.MustAttrs {
if !slices.Contains(entry.Attrs, m) {
return false
}
}
for _, b := range incFilter.BanAttrs {
if slices.Contains(entry.Attrs, b) {
return false
}
}
return true
}
roughMap := make(map[string]*Entry) // Avoid basic duplicates
for _, dentry := range pl.Entries { // Add direct entries
roughMap[dentry.Plain] = dentry
@@ -375,80 +365,75 @@ func resolveList(pl *ParsedList) error {
}
}
}
finalMap[pl.Name] = polishList(&roughMap)
finalMap[pl.Name] = polishList(roughMap)
return nil
}
func main() {
flag.Parse()
func run() error {
dir := *dataPath
fmt.Println("Use domain lists in", dir)
fmt.Printf("using domain lists data in %q\n", dir)
// Generate refMap
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
refMap := make(map[string][]*Entry)
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
return nil
}
if err := loadData(path); err != nil {
return err
listName := strings.ToUpper(filepath.Base(path))
if !validateSiteName(listName) {
return fmt.Errorf("invalid list name: %q", listName)
}
return nil
refMap[listName], err = loadData(path)
return err
})
if err != nil {
fmt.Println("Failed to loadData:", err)
os.Exit(1)
return fmt.Errorf("failed to loadData: %w", err)
}
// Generate plMap
for refName, refList := range refMap {
if err := parseList(refName, refList); err != nil {
fmt.Println("Failed to parseList:", err)
os.Exit(1)
return fmt.Errorf("failed to parseList %q: %w", refName, err)
}
}
// Generate finalMap
for _, pl := range plMap {
for plname, pl := range plMap {
if err := resolveList(pl); err != nil {
fmt.Println("Failed to resolveList:", err)
os.Exit(1)
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
}
}
// Create output directory if not exist
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
fmt.Println("Failed to create output directory:", mkErr)
os.Exit(1)
}
// Make sure output directory exists
if err := os.MkdirAll(*outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
// Export plaintext list
var exportListSlice []string
for raw := range strings.SplitSeq(*exportLists, ",") {
if trimmed := strings.TrimSpace(raw); trimmed != "" {
exportListSlice = append(exportListSlice, trimmed)
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
if epList := strings.TrimSpace(rawEpList); epList != "" {
entries, exist := finalMap[strings.ToUpper(epList)]
if !exist || len(entries) == 0 {
fmt.Printf("list %q does not exist or is empty\n", epList)
continue
}
if err := writePlainList(epList, entries); err != nil {
fmt.Printf("failed to write list %q: %v\n", epList, err)
continue
}
fmt.Printf("list %q has been generated successfully.\n", epList)
}
}
for _, exportList := range exportListSlice {
if err := writePlainList(exportList); err != nil {
fmt.Println("Failed to write list:", err)
continue
}
fmt.Printf("list %q has been generated successfully.\n", exportList)
}
// Generate dat file
protoList := new(router.GeoSiteList)
for siteName, siteEntries := range finalMap {
site, err := makeProtoList(siteName, siteEntries)
if err != nil {
fmt.Println("Failed to makeProtoList:", err)
os.Exit(1)
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
}
protoList.Entry = append(protoList.Entry, site)
}
@@ -459,13 +444,19 @@ func main() {
protoBytes, err := proto.Marshal(protoList)
if err != nil {
fmt.Println("Failed to marshal:", err)
os.Exit(1)
return fmt.Errorf("failed to marshal: %w", err)
}
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
fmt.Println("Failed to write output:", err)
return fmt.Errorf("failed to write output: %w", err)
}
fmt.Printf("%q has been generated successfully.\n", *outputName)
return nil
}
func main() {
flag.Parse()
if err := run(); err != nil {
fmt.Printf("Fatal error: %v\n", err)
os.Exit(1)
} else {
fmt.Println(*outputName, "has been generated successfully.")
}
}