Compare commits

...

5 Commits

Author SHA1 Message Date
ir0nmand0
baa1409cfb category-entertainment-ru: add beeline.tv (#3372)
Beeline TV (beeline.tv) is a Russian streaming service by VEON (Beeline).
Movies, TV series, and live TV channels for Russian-speaking audience.

Subdomains (covered by domain match): web-prod, rest, images, static, video.
External deps (mediavitrina.ru, vimpelcom.ru) already in category-ru.

Co-authored-by: Dima Dudukin <dima.dudukin.dev@gmail.com>
2026-03-19 18:12:53 +08:00
MkQtS
a22d247c5a qcloud: comment out useless regexp rules (#3371)
Overrided by other domain type rules, but cannot be optimized automatically.

They are actually useless and only affect performance.
2026-03-19 12:43:29 +08:00
yobarerukoto
d311bbe50b geolocation-cn: add gzyowin.com (#3369) 2026-03-18 19:29:43 +08:00
MkQtS
1db558b165 main.go: support to generate multiple custom dats (#3367)
This allows to remove any unwanted lists without modifying the domains
data, and you can generate multiple custom v2ray dat files in a single
command.

As long as the source data is consistent, any list remains in the trimed
dat contains the same rules comparing to the list in full dat.

Use the new option `datprofile` to specify the config json file path.
`outputname` will be ignored when `datprofile` is set.

Co-authored-by: database64128 <free122448@hotmail.com>
2026-03-18 18:32:05 +08:00
Konstantin
9ee0757263 Add Tilda domains (#3368)
* add tilda

* tilda: add to category-dev
2026-03-18 18:05:54 +08:00
7 changed files with 157 additions and 28 deletions

6
.gitignore vendored
View File

@@ -4,9 +4,9 @@
/domain-list-community
/domain-list-community.exe
# Generated dat file.
dlc.dat
# Generated dat files.
/*.dat
# Exported plaintext lists.
/*.yml
/*.txt
/*.yml

View File

@@ -54,6 +54,7 @@ include:stackexchange
include:strikingly
include:termux
include:thelinuxfoundation
include:tilda
include:unity
include:v8

View File

@@ -11,6 +11,7 @@ include:okko
include:wink
24h.tv
amediateka.ru
beeline.tv
ivi.ru
premier.one
smotreshka.tv

View File

@@ -1625,3 +1625,6 @@ ao-x.ac.cn
# 万集科技 京ICP备18036282号-2
wanji.net.cn
# 广州市雅望互联网服务有限公司
gzyowin.com

View File

@@ -258,13 +258,14 @@ tdnsv14.net
tdnsv15.net
# myqcloud inside mainland China
regexp:\.(.+-)?ap-beijing(-.+)?\.myqcloud\.com$ #北京
regexp:\.(.+-)?ap-nanjing(-.+)?\.myqcloud\.com$ #
regexp:\.(.+-)?ap-shanghai(-.+)?\.myqcloud\.com$ #上海
regexp:\.(.+-)?ap-guangzhou(-.+)?\.myqcloud\.com$ #广州
regexp:\.(.+-)?ap-chengdu(-.+)?\.myqcloud\.com$ #成都
regexp:\.(.+-)?ap-chongqing(-.+)?\.myqcloud\.com$ #重庆
regexp:\.(.+-)?ap-shenzhen(-.+)?\.myqcloud\.com$ #深圳
# overrided by myqcloud.com
#regexp:\.(.+-)?ap-beijing(-.+)?\.myqcloud\.com$ #
#regexp:\.(.+-)?ap-nanjing(-.+)?\.myqcloud\.com$ #南京
#regexp:\.(.+-)?ap-shanghai(-.+)?\.myqcloud\.com$ #上海
#regexp:\.(.+-)?ap-guangzhou(-.+)?\.myqcloud\.com$ #广州
#regexp:\.(.+-)?ap-chengdu(-.+)?\.myqcloud\.com$ #成都
#regexp:\.(.+-)?ap-chongqing(-.+)?\.myqcloud\.com$ #重庆
#regexp:\.(.+-)?ap-shenzhen(-.+)?\.myqcloud\.com$ #深圳
# COS 使用到的非中国大陆的地域与可用区,参见 https://cloud.tencent.com/document/product/436/6224
ap-hongkong.myqcloud.com @!cn #中国香港
@@ -282,13 +283,14 @@ eu-frankfurt.myqcloud.com @!cn #法兰克福
eu-moscow.myqcloud.com @!cn #莫斯科
# tencentcos inside mainland China
regexp:\.(.+-)?ap-beijing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #北京
regexp:\.(.+-)?ap-nanjing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #
regexp:\.(.+-)?ap-shanghai(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #上海
regexp:\.(.+-)?ap-guangzhou(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #广州
regexp:\.(.+-)?ap-chengdu(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #成都
regexp:\.(.+-)?ap-chongqing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #重庆
regexp:\.(.+-)?ap-shenzhen(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #深圳
# overrided by tencentcos.cn, tencentcos.com, tencentcos.com.cn
#regexp:\.(.+-)?ap-beijing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #
#regexp:\.(.+-)?ap-nanjing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #南京
#regexp:\.(.+-)?ap-shanghai(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #上海
#regexp:\.(.+-)?ap-guangzhou(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #广州
#regexp:\.(.+-)?ap-chengdu(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #成都
#regexp:\.(.+-)?ap-chongqing(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #重庆
#regexp:\.(.+-)?ap-shenzhen(-.+)?\.tencentcos\.(cn|com(\.cn)?)$ #深圳
# tencentcos outside mainland China
# regexp:.+\.ap-hongkong\.tencentcos\.(cn|com(\.cn)?)$ @!cn #中国香港

5
data/tilda Normal file
View File

@@ -0,0 +1,5 @@
tilda.cc
tilda.ru
tilda.ws
tildaapi.com
tildacdn.com

139
main.go
View File

@@ -2,6 +2,7 @@ package main
import (
"bufio"
"encoding/json"
"flag"
"fmt"
"os"
@@ -19,6 +20,7 @@ var (
dataPath = flag.String("datapath", "./data", "Path to your custom 'data' directory")
outputName = flag.String("outputname", "dlc.dat", "Name of the generated dat file")
outputDir = flag.String("outputdir", "./", "Directory to place all generated files")
datProfile = flag.String("datprofile", "", "Path of config file used to assemble custom dats")
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
)
@@ -47,6 +49,23 @@ type Processor struct {
cirIncMap map[string]bool
}
type GeoSites struct {
Sites []*router.GeoSite
SiteIdx map[string]int
}
type DatTask struct {
Name string `json:"name"`
Mode string `json:"mode"`
Lists []string `json:"lists"`
}
const (
ModeAll string = "all"
ModeAllowlist string = "allowlist"
ModeDenylist string = "denylist"
)
func makeProtoList(listName string, entries []*Entry) *router.GeoSite {
site := &router.GeoSite{
CountryCode: listName,
@@ -76,6 +95,90 @@ func makeProtoList(listName string, entries []*Entry) *router.GeoSite {
return site
}
func loadTasks(path string) ([]DatTask, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
var tasks []DatTask
dec := json.NewDecoder(f)
if err := dec.Decode(&tasks); err != nil {
return nil, fmt.Errorf("failed to decode json: %w", err)
}
for i, t := range tasks {
if t.Name == "" {
return nil, fmt.Errorf("task[%d]: name is required", i)
}
switch t.Mode {
case ModeAll, ModeAllowlist, ModeDenylist:
default:
return nil, fmt.Errorf("task[%d] %q: invalid mode %q", i, t.Name, t.Mode)
}
}
return tasks, nil
}
func (gs *GeoSites) assembleDat(task DatTask) error {
datFileName := strings.ToLower(filepath.Base(task.Name))
geoSiteList := new(router.GeoSiteList)
switch task.Mode {
case ModeAll:
geoSiteList.Entry = gs.Sites
case ModeAllowlist:
allowedIdxes := make([]int, 0, len(task.Lists))
for _, list := range task.Lists {
if idx, ok := gs.SiteIdx[strings.ToUpper(list)]; ok {
allowedIdxes = append(allowedIdxes, idx)
} else {
return fmt.Errorf("list %q not found for allowlist task", list)
}
}
slices.Sort(allowedIdxes)
allowedlen := len(allowedIdxes)
if allowedlen == 0 {
return fmt.Errorf("allowlist needs at least one valid list")
}
geoSiteList.Entry = make([]*router.GeoSite, allowedlen)
for i, idx := range allowedIdxes {
geoSiteList.Entry[i] = gs.Sites[idx]
}
case ModeDenylist:
deniedMap := make(map[int]bool, len(task.Lists))
for _, list := range task.Lists {
if idx, ok := gs.SiteIdx[strings.ToUpper(list)]; ok {
deniedMap[idx] = true
} else {
fmt.Printf("[Warn] list %q not found in denylist task %q", list, task.Name)
}
}
deniedlen := len(deniedMap)
if deniedlen == 0 {
fmt.Printf("[Warn] nothing to deny in task %q", task.Name)
geoSiteList.Entry = gs.Sites
} else {
geoSiteList.Entry = make([]*router.GeoSite, 0, len(gs.Sites)-deniedlen)
for i, site := range gs.Sites {
if !deniedMap[i] {
geoSiteList.Entry = append(geoSiteList.Entry, site)
}
}
}
}
protoBytes, err := proto.Marshal(geoSiteList)
if err != nil {
return fmt.Errorf("failed to marshal: %w", err)
}
if err := os.WriteFile(filepath.Join(*outputDir, datFileName), protoBytes, 0644); err != nil {
return fmt.Errorf("failed to write file %q: %w", datFileName, err)
}
fmt.Printf("dat %q has been generated successfully\n", datFileName)
return nil
}
func writePlainList(listname string, entries []*Entry) error {
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(listname)+".txt"))
if err != nil {
@@ -443,25 +546,39 @@ func run() error {
}
}
// Generate dat file
protoList := &router.GeoSiteList{Entry: make([]*router.GeoSite, 0, sitesCount)}
// Generate proto sites
gs := &GeoSites{
Sites: make([]*router.GeoSite, 0, sitesCount),
SiteIdx: make(map[string]int, sitesCount),
}
for siteName, siteEntries := range processor.finalMap {
protoList.Entry = append(protoList.Entry, makeProtoList(siteName, siteEntries))
gs.Sites = append(gs.Sites, makeProtoList(siteName, siteEntries))
}
processor = nil
// Sort protoList so the marshaled list is reproducible
slices.SortFunc(protoList.Entry, func(a, b *router.GeoSite) int {
// Sort proto sites so the generated file is reproducible
slices.SortFunc(gs.Sites, func(a, b *router.GeoSite) int {
return strings.Compare(a.CountryCode, b.CountryCode)
})
for i := range sitesCount {
gs.SiteIdx[gs.Sites[i].CountryCode] = i
}
protoBytes, err := proto.Marshal(protoList)
if err != nil {
return fmt.Errorf("failed to marshal: %w", err)
// Load tasks and generate dat files
var tasks []DatTask
if *datProfile == "" {
tasks = []DatTask{{Name: *outputName, Mode: ModeAll}}
} else {
var err error
tasks, err = loadTasks(*datProfile)
if err != nil {
return fmt.Errorf("failed to loadTasks %q: %v", *datProfile, err)
}
}
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
return fmt.Errorf("failed to write output: %w", err)
for _, task := range tasks {
if err := gs.assembleDat(task); err != nil {
fmt.Printf("[Error] failed to assembleDat %q: %v", task.Name, err)
}
}
fmt.Printf("%q has been generated successfully\n", *outputName)
return nil
}