mirror of
https://github.com/v2fly/domain-list-community.git
synced 2026-03-06 19:50:43 +07:00
Compare commits
32 Commits
2026012101
...
2026020405
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
daf4c10d0c | ||
|
|
a188c2c058 | ||
|
|
947556aa16 | ||
|
|
44de14725e | ||
|
|
c638ec66f0 | ||
|
|
4c8b1438f8 | ||
|
|
3399285ea9 | ||
|
|
62346cf6b7 | ||
|
|
8dee321846 | ||
|
|
b117cf851f | ||
|
|
0b6606758d | ||
|
|
fcf9c67d83 | ||
|
|
56e0b47c73 | ||
|
|
4f45866be4 | ||
|
|
40d763daca | ||
|
|
6c91898557 | ||
|
|
91da593233 | ||
|
|
9f1c6b6922 | ||
|
|
b3bae7de8f | ||
|
|
4e9b28f951 | ||
|
|
3c0a538219 | ||
|
|
2160230ef9 | ||
|
|
5c38f34456 | ||
|
|
8e62b9b541 | ||
|
|
85edae7ba1 | ||
|
|
1bd07b2e76 | ||
|
|
614a880a55 | ||
|
|
676832d14a | ||
|
|
a2f08a142c | ||
|
|
2359ad7f8e | ||
|
|
330592feff | ||
|
|
f44fbc801d |
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
@@ -33,15 +33,17 @@ jobs:
|
|||||||
echo "TAG_NAME=$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
echo "TAG_NAME=$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Build dlc.dat file
|
- name: Build dlc.dat and plain lists
|
||||||
run: |
|
run: |
|
||||||
cd code || exit 1
|
cd code || exit 1
|
||||||
go run ./ --outputdir=../ --exportlists=category-ads-all,tld-cn,cn,tld-\!cn,geolocation-\!cn,apple,icloud
|
go run ./ --outputdir=../ --exportlists=category-ads-all,tld-cn,cn,tld-\!cn,geolocation-\!cn,apple,icloud
|
||||||
|
go run ./cmd/datdump/main.go --inputdata=../dlc.dat --outputdir=../ --exportlists=_all_
|
||||||
cd ../ && rm -rf code
|
cd ../ && rm -rf code
|
||||||
|
|
||||||
- name: Generate dlc.dat sha256 hash
|
- name: Generate dlc.dat sha256 hash
|
||||||
run: |
|
run: |
|
||||||
sha256sum dlc.dat > dlc.dat.sha256sum
|
sha256sum dlc.dat > dlc.dat.sha256sum
|
||||||
|
sha256sum dlc.dat_plain.yml > dlc.dat_plain.yml.sha256sum
|
||||||
|
|
||||||
- name: Generate Zip
|
- name: Generate Zip
|
||||||
run: |
|
run: |
|
||||||
@@ -66,6 +68,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Release and upload assets
|
- name: Release and upload assets
|
||||||
run: |
|
run: |
|
||||||
gh release create ${{ env.TAG_NAME }} --generate-notes --latest --title ${{ env.RELEASE_NAME }} ./dlc.dat ./dlc.dat.*
|
gh release create ${{ env.TAG_NAME }} --generate-notes --latest --title ${{ env.RELEASE_NAME }} ./dlc.dat ./dlc.dat.* ./dlc.dat_plain.yml ./dlc.dat_plain.yml.*
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,4 +8,5 @@
|
|||||||
dlc.dat
|
dlc.dat
|
||||||
|
|
||||||
# Exported plaintext lists.
|
# Exported plaintext lists.
|
||||||
|
/*.yml
|
||||||
/*.txt
|
/*.txt
|
||||||
|
|||||||
47
README.md
47
README.md
@@ -10,12 +10,12 @@ This project is not opinionated. In other words, it does NOT endorse, claim or i
|
|||||||
|
|
||||||
- **dlc.dat**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat)
|
- **dlc.dat**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat)
|
||||||
- **dlc.dat.sha256sum**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum)
|
- **dlc.dat.sha256sum**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat.sha256sum)
|
||||||
|
- **dlc.dat_plain.yml**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat_plain.yml](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat_plain.yml)
|
||||||
|
- **dlc.dat_plain.yml.sha256sum**:[https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat_plain.yml.sha256sum](https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat_plain.yml.sha256sum)
|
||||||
|
|
||||||
## Notice
|
## Notice
|
||||||
|
|
||||||
Rules with `@!cn` attribute has been cast out from cn lists. `geosite:geolocation-cn@!cn` is no longer available.
|
Rules with `@!cn` attribute has been cast out from cn lists. `geosite:geolocation-cn@!cn` is no longer available. Check [#390](https://github.com/v2fly/domain-list-community/issues/390), [#3119](https://github.com/v2fly/domain-list-community/pull/3119) and [#3198](https://github.com/v2fly/domain-list-community/pull/3198) for more information.
|
||||||
|
|
||||||
Check [#390](https://github.com/v2fly/domain-list-community/issues/390), [#3119](https://github.com/v2fly/domain-list-community/pull/3119) and [#3198](https://github.com/v2fly/domain-list-community/pull/3198) for more information.
|
|
||||||
|
|
||||||
Please report if you have any problems or questions.
|
Please report if you have any problems or questions.
|
||||||
|
|
||||||
@@ -93,38 +93,45 @@ All data are under `data` directory. Each file in the directory represents a sub
|
|||||||
# comments
|
# comments
|
||||||
include:another-file
|
include:another-file
|
||||||
domain:google.com @attr1 @attr2
|
domain:google.com @attr1 @attr2
|
||||||
|
full:analytics.google.com @ads
|
||||||
keyword:google
|
keyword:google
|
||||||
regexp:www\.google\.com$
|
regexp:^odd[1-7]\.example\.org(\.[a-z]{2})?$
|
||||||
full:www.google.com
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Syntax:**
|
**Syntax:**
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Adding new `regexp` and `keyword` rules is discouraged because it is easy to use them incorrectly, and proxy software cannot efficiently match these types of rules.
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
> The following types of rules are **NOT** fully compatible with the ones that defined by user in V2Ray config file. Do **Not** copy and paste directly.
|
> The following types of rules are **NOT** fully compatible with the ones that defined by user in V2Ray config file. Do **Not** copy and paste directly.
|
||||||
|
|
||||||
- Comment begins with `#`. It may begin anywhere in the file. The content in the line after `#` is treated as comment and ignored in production.
|
- Comment begins with `#`. It may begin anywhere in the file. The content in the line after `#` is treated as comment and ignored in production.
|
||||||
- Inclusion begins with `include:`, followed by the file name of an existing file in the same directory.
|
|
||||||
- Subdomain begins with `domain:`, followed by a valid domain name. The prefix `domain:` may be omitted.
|
- Subdomain begins with `domain:`, followed by a valid domain name. The prefix `domain:` may be omitted.
|
||||||
- Keyword begins with `keyword:`, followed by a string.
|
|
||||||
- Regular expression begins with `regexp:`, followed by a valid regular expression (per Golang's standard).
|
|
||||||
- Full domain begins with `full:`, followed by a complete and valid domain name.
|
- Full domain begins with `full:`, followed by a complete and valid domain name.
|
||||||
- Domains (including `domain`, `keyword`, `regexp` and `full`) may have one or more attributes. Each attribute begins with `@` and followed by the name of the attribute.
|
- Keyword begins with `keyword:`, followed by a substring of a valid domain name.
|
||||||
|
- Regular expression begins with `regexp:`, followed by a valid regular expression (per Golang's standard).
|
||||||
> **Note:** Adding new `regexp` and `keyword` rules is discouraged because it is easy to use them incorrectly, and proxy software cannot efficiently match these types of rules.
|
- Domain rules (including `domain`, `full`, `keyword`, and `regexp`) may have none, one or more attributes. Each attribute begins with `@` and followed by the name of the attribute. Attributes will remain available in final lists and `dlc.dat`.
|
||||||
|
- Domain rules may have none, one or more affiliations, which additionally adds the domain rule into the affiliated target list. Each affiliation begins with `&` and followed by the name of the target list (nomatter whether the target has a dedicated file in data path). This is a method for data management, and will not remain in the final lists or `dlc.dat`.
|
||||||
|
- Inclusion begins with `include:`, followed by the name of another valid domain list. A simple `include:listb` in file `lista` means adding all domain rules of `listb` into `lista`. Inclusions with attributes stands for selective inclusion. `include:listb @attr1 @-attr2` means only adding those domain rules *with* `@attr1` **and** *without* `@attr2`. This is a special type for data management, and will not remain in the final lists or `dlc.dat`.
|
||||||
|
|
||||||
## How it works
|
## How it works
|
||||||
|
|
||||||
The entire `data` directory will be built into an external `geosite` file for Project V. Each file in the directory represents a section in the generated file.
|
The entire `data` directory will be built into an external `geosite` file for Project V. Each file in the directory represents a section in the generated file.
|
||||||
|
|
||||||
To generate a section:
|
**General steps:**
|
||||||
|
|
||||||
1. Remove all the comments in the file.
|
1. Read files in the data path (ignore all comments and empty lines).
|
||||||
2. Replace `include:` lines with the actual content of the file.
|
2. Parse and resolve source data, turn affiliations and inclusions into actual domain rules in proper lists.
|
||||||
3. Omit all empty lines.
|
3. Deduplicate and sort rules in every list.
|
||||||
4. Generate each `domain:` line into a [sub-domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L21).
|
4. Export desired plain text lists.
|
||||||
5. Generate each `full:` line into a [full domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L23).
|
5. Generate `dlc.dat`:
|
||||||
6. Generate each `keyword:` line into a [plain domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L17).
|
- turn each `domain:` line into a [sub-domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L21).
|
||||||
7. Generate each `regexp:` line into a [regex domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L19).
|
- turn each `full:` line into a [full domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L23).
|
||||||
|
- turn each `keyword:` line into a [plain domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L17).
|
||||||
|
- turn each `regexp:` line into a [regex domain routing rule](https://github.com/v2fly/v2ray-core/blob/master/app/router/routercommon/common.proto#L19).
|
||||||
|
|
||||||
|
Read [main.go](./main.go) for details.
|
||||||
|
|
||||||
## How to organize domains
|
## How to organize domains
|
||||||
|
|
||||||
@@ -134,7 +141,7 @@ Theoretically any string can be used as the name, as long as it is a valid file
|
|||||||
|
|
||||||
### Attributes
|
### Attributes
|
||||||
|
|
||||||
Attribute is useful for sub-group of domains, especially for filtering purpose. For example, the list of `google` domains may contains its main domains, as well as domains that serve ads. The ads domains may be marked by attribute `@ads`, and can be used as `geosite:google@ads` in V2Ray routing.
|
Attribute is useful for sub-group of domains, especially for filtering purpose. For example, the list of `google` may contains its main domains, as well as domains that serve ads. The ads domains may be marked by attribute `@ads`, and can be used as `geosite:google@ads` in V2Ray routing. Domains and services that originate from outside China mainland but have access point in China mainland, may be marked by attribute `@cn`.
|
||||||
|
|
||||||
## Contribution guideline
|
## Contribution guideline
|
||||||
|
|
||||||
|
|||||||
169
cmd/datdump/main.go
Normal file
169
cmd/datdump/main.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/v2fly/domain-list-community/internal/dlc"
|
||||||
|
router "github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
||||||
|
"google.golang.org/protobuf/proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
inputData = flag.String("inputdata", "dlc.dat", "Name of the geosite dat file")
|
||||||
|
outputDir = flag.String("outputdir", "./", "Directory to place all generated files")
|
||||||
|
exportLists = flag.String("exportlists", "", "Lists to be exported, separated by ',' (empty for _all_)")
|
||||||
|
)
|
||||||
|
|
||||||
|
type DomainRule struct {
|
||||||
|
Type string
|
||||||
|
Value string
|
||||||
|
Attrs []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type DomainList struct {
|
||||||
|
Name string
|
||||||
|
Rules []DomainRule
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *DomainRule) domain2String() string {
|
||||||
|
var dstr strings.Builder
|
||||||
|
dstr.Grow(len(d.Type) + len(d.Value) + 10)
|
||||||
|
fmt.Fprintf(&dstr, "%s:%s", d.Type, d.Value)
|
||||||
|
if len(d.Attrs) != 0 {
|
||||||
|
fmt.Fprintf(&dstr, ":@%s", strings.Join(d.Attrs, ",@"))
|
||||||
|
}
|
||||||
|
return dstr.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadGeosite(path string) ([]DomainList, map[string]*DomainList, error) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to read geosite file: %w", err)
|
||||||
|
}
|
||||||
|
vgeositeList := new(router.GeoSiteList)
|
||||||
|
if err := proto.Unmarshal(data, vgeositeList); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to unmarshal: %w", err)
|
||||||
|
}
|
||||||
|
domainLists := make([]DomainList, len(vgeositeList.Entry))
|
||||||
|
domainListByName := make(map[string]*DomainList, len(vgeositeList.Entry))
|
||||||
|
for i, vsite := range vgeositeList.Entry {
|
||||||
|
rules := make([]DomainRule, 0, len(vsite.Domain))
|
||||||
|
for _, vdomain := range vsite.Domain {
|
||||||
|
rule := DomainRule{Value: vdomain.Value}
|
||||||
|
switch vdomain.Type {
|
||||||
|
case router.Domain_RootDomain:
|
||||||
|
rule.Type = dlc.RuleTypeDomain
|
||||||
|
case router.Domain_Regex:
|
||||||
|
rule.Type = dlc.RuleTypeRegexp
|
||||||
|
case router.Domain_Plain:
|
||||||
|
rule.Type = dlc.RuleTypeKeyword
|
||||||
|
case router.Domain_Full:
|
||||||
|
rule.Type = dlc.RuleTypeFullDomain
|
||||||
|
default:
|
||||||
|
return nil, nil, fmt.Errorf("invalid rule type: %+v", vdomain.Type)
|
||||||
|
}
|
||||||
|
for _, vattr := range vdomain.Attribute {
|
||||||
|
rule.Attrs = append(rule.Attrs, vattr.Key)
|
||||||
|
}
|
||||||
|
rules = append(rules, rule)
|
||||||
|
}
|
||||||
|
domainLists[i] = DomainList{
|
||||||
|
Name: strings.ToUpper(vsite.CountryCode),
|
||||||
|
Rules: rules,
|
||||||
|
}
|
||||||
|
domainListByName[domainLists[i].Name] = &domainLists[i]
|
||||||
|
}
|
||||||
|
return domainLists, domainListByName, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func exportSite(name string, domainListByName map[string]*DomainList) error {
|
||||||
|
domainList, ok := domainListByName[strings.ToUpper(name)]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("list %q does not exist", name)
|
||||||
|
}
|
||||||
|
if len(domainList.Rules) == 0 {
|
||||||
|
return fmt.Errorf("list %q is empty", name)
|
||||||
|
}
|
||||||
|
file, err := os.Create(filepath.Join(*outputDir, name+".yml"))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
w := bufio.NewWriter(file)
|
||||||
|
fmt.Fprintf(w, "%s:\n", name)
|
||||||
|
for _, domain := range domainList.Rules {
|
||||||
|
fmt.Fprintf(w, " - %q\n", domain.domain2String())
|
||||||
|
}
|
||||||
|
return w.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
func exportAll(filename string, domainLists []DomainList) error {
|
||||||
|
file, err := os.Create(filepath.Join(*outputDir, filename))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
w := bufio.NewWriter(file)
|
||||||
|
w.WriteString("lists:\n")
|
||||||
|
for _, domainList := range domainLists {
|
||||||
|
fmt.Fprintf(w, " - name: %s\n", strings.ToLower(domainList.Name))
|
||||||
|
fmt.Fprintf(w, " length: %d\n", len(domainList.Rules))
|
||||||
|
w.WriteString(" rules:\n")
|
||||||
|
for _, domain := range domainList.Rules {
|
||||||
|
fmt.Fprintf(w, " - %q\n", domain.domain2String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return w.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
func run() error {
|
||||||
|
// Make sure output directory exists
|
||||||
|
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create output directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("loading source data %q...\n", *inputData)
|
||||||
|
domainLists, domainListByName, err := loadGeosite(*inputData)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to loadGeosite: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var exportListSlice []string
|
||||||
|
for raw := range strings.SplitSeq(*exportLists, ",") {
|
||||||
|
if trimmed := strings.TrimSpace(raw); trimmed != "" {
|
||||||
|
exportListSlice = append(exportListSlice, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(exportListSlice) == 0 {
|
||||||
|
exportListSlice = []string{"_all_"}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, eplistname := range exportListSlice {
|
||||||
|
if strings.EqualFold(eplistname, "_all_") {
|
||||||
|
if err := exportAll(filepath.Base(*inputData)+"_plain.yml", domainLists); err != nil {
|
||||||
|
fmt.Printf("failed to exportAll: %v\n", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := exportSite(eplistname, domainListByName); err != nil {
|
||||||
|
fmt.Printf("failed to exportSite: %v\n", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("list: %q has been exported successfully.\n", eplistname)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
if err := run(); err != nil {
|
||||||
|
fmt.Printf("Fatal error: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1 +0,0 @@
|
|||||||
adcolony.com @ads
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
adjust.com @ads
|
adjust.com @ads
|
||||||
adjust.net.in @ads
|
|
||||||
adjust.io @ads
|
adjust.io @ads
|
||||||
|
adjust.net.in @ads
|
||||||
adjust.world @ads
|
adjust.world @ads
|
||||||
@@ -756,6 +756,7 @@ full:amp-api-edge.apps.apple.com @cn
|
|||||||
full:amp-api-search-edge.apps.apple.com @cn
|
full:amp-api-search-edge.apps.apple.com @cn
|
||||||
full:amp-api.apps.apple.com @cn
|
full:amp-api.apps.apple.com @cn
|
||||||
full:amp-api.music.apple.com @cn
|
full:amp-api.music.apple.com @cn
|
||||||
|
full:aod-ssl.itunes.apple.com @cn
|
||||||
full:aod.itunes.apple.com @cn
|
full:aod.itunes.apple.com @cn
|
||||||
full:api-edge.apps.apple.com @cn
|
full:api-edge.apps.apple.com @cn
|
||||||
full:apptrailers.itunes.apple.com @cn
|
full:apptrailers.itunes.apple.com @cn
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
applovin.com @ads
|
|
||||||
applvn.com @ads
|
|
||||||
@@ -1,7 +1,11 @@
|
|||||||
|
include:trello
|
||||||
|
|
||||||
|
atl-paas.net
|
||||||
|
atlassian-dev.net
|
||||||
atlassian.com
|
atlassian.com
|
||||||
atlassian.net
|
atlassian.net
|
||||||
bitbucket.io
|
bitbucket.io
|
||||||
bitbucket.org
|
bitbucket.org
|
||||||
|
jira.com
|
||||||
|
ss-inf.net
|
||||||
statuspage.io
|
statuspage.io
|
||||||
|
|
||||||
include:trello
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
atom-data.io @ads
|
|
||||||
analytics-data.io @ads
|
|
||||||
ironbeast.io @ads
|
|
||||||
@@ -28,8 +28,10 @@ binancezh.top
|
|||||||
|
|
||||||
# API
|
# API
|
||||||
binanceapi.com
|
binanceapi.com
|
||||||
|
binanceru.net
|
||||||
bnbstatic.com
|
bnbstatic.com
|
||||||
bntrace.com
|
bntrace.com
|
||||||
|
bsappapi.com
|
||||||
nftstatic.com
|
nftstatic.com
|
||||||
|
|
||||||
# saas
|
# saas
|
||||||
|
|||||||
9
data/bohemia
Normal file
9
data/bohemia
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
arma3.com
|
||||||
|
armaplatform.com
|
||||||
|
bistudio.com
|
||||||
|
bohemia.net
|
||||||
|
dayz.com
|
||||||
|
makearmanotwar.com
|
||||||
|
silicagame.com
|
||||||
|
vigorgame.com
|
||||||
|
ylands.com
|
||||||
@@ -1,29 +1,21 @@
|
|||||||
# This file contains domains that clearly serving ads
|
# This file contains domains that clearly serving ads
|
||||||
|
|
||||||
include:acfun-ads
|
include:acfun-ads
|
||||||
include:adcolony-ads
|
|
||||||
include:adjust-ads
|
|
||||||
include:adobe-ads
|
include:adobe-ads
|
||||||
include:alibaba-ads
|
include:alibaba-ads
|
||||||
include:amazon-ads
|
include:amazon-ads
|
||||||
include:apple-ads
|
include:apple-ads
|
||||||
include:applovin-ads
|
|
||||||
include:atom-data-ads
|
|
||||||
include:baidu-ads
|
include:baidu-ads
|
||||||
include:bytedance-ads
|
include:bytedance-ads
|
||||||
include:category-ads-ir
|
include:category-ads-ir
|
||||||
include:cctv @ads
|
include:cctv @ads
|
||||||
include:clearbit-ads
|
include:disney @ads
|
||||||
include:dmm-ads
|
include:dmm-ads
|
||||||
include:duolingo-ads
|
include:duolingo-ads
|
||||||
include:emogi-ads
|
include:gamersky @ads
|
||||||
include:flurry-ads
|
|
||||||
include:google-ads
|
include:google-ads
|
||||||
include:growingio-ads
|
include:hetzner @ads
|
||||||
include:hiido-ads
|
|
||||||
include:hotjar-ads
|
|
||||||
include:hunantv-ads
|
include:hunantv-ads
|
||||||
include:inner-active-ads
|
|
||||||
include:iqiyi-ads
|
include:iqiyi-ads
|
||||||
include:jd-ads
|
include:jd-ads
|
||||||
include:kuaishou-ads
|
include:kuaishou-ads
|
||||||
@@ -31,30 +23,25 @@ include:kugou-ads
|
|||||||
include:letv-ads
|
include:letv-ads
|
||||||
include:meta-ads
|
include:meta-ads
|
||||||
include:microsoft-ads
|
include:microsoft-ads
|
||||||
include:mopub-ads
|
|
||||||
include:mxplayer-ads
|
|
||||||
include:netease-ads
|
include:netease-ads
|
||||||
include:newrelic-ads
|
|
||||||
include:ogury-ads
|
|
||||||
include:ookla-speedtest-ads
|
include:ookla-speedtest-ads
|
||||||
include:openx-ads
|
include:openai @ads
|
||||||
include:picacg @ads
|
include:picacg @ads
|
||||||
include:pocoiq-ads
|
include:pikpak @ads
|
||||||
include:pubmatic-ads
|
include:pixiv @ads
|
||||||
include:qihoo360-ads
|
include:qihoo360-ads
|
||||||
include:segment-ads
|
include:samsung @ads
|
||||||
include:sina-ads
|
include:sina-ads
|
||||||
|
include:snap @ads
|
||||||
include:sohu-ads
|
include:sohu-ads
|
||||||
include:spotify-ads
|
include:spotify-ads
|
||||||
include:supersonic-ads
|
|
||||||
include:tagtic-ads
|
|
||||||
include:tappx-ads
|
|
||||||
include:television-ads
|
include:television-ads
|
||||||
include:tencent-ads
|
include:tencent-ads
|
||||||
include:tendcloud @ads
|
include:tendcloud @ads
|
||||||
include:uberads-ads
|
include:twitter @ads
|
||||||
include:umeng-ads
|
include:umeng-ads
|
||||||
include:unity-ads
|
include:unity @ads
|
||||||
|
include:unitychina @ads
|
||||||
include:xhamster-ads
|
include:xhamster-ads
|
||||||
include:xiaomi-ads
|
include:xiaomi-ads
|
||||||
include:ximalaya-ads
|
include:ximalaya-ads
|
||||||
@@ -77,26 +64,32 @@ adservice.sigmob.cn
|
|||||||
adtechus.com
|
adtechus.com
|
||||||
adtrue.com
|
adtrue.com
|
||||||
adxprtz.com
|
adxprtz.com
|
||||||
|
assets.growingio.com
|
||||||
cdn.advertserve.com
|
cdn.advertserve.com
|
||||||
cdn.banclip.com
|
cdn.banclip.com
|
||||||
cfts1tifqr.com
|
cfts1tifqr.com
|
||||||
contentabc.com
|
contentabc.com
|
||||||
cretgate.com
|
cretgate.com
|
||||||
|
data.flurry.com
|
||||||
decide.mixpanel.com
|
decide.mixpanel.com
|
||||||
|
emogi.com
|
||||||
ero-advertising.com
|
ero-advertising.com
|
||||||
eroadvertising.com
|
eroadvertising.com
|
||||||
|
evt.mxplay.com
|
||||||
exoclick.com
|
exoclick.com
|
||||||
exosrv.com
|
exosrv.com
|
||||||
go2.global
|
go2.global
|
||||||
gozendata.com
|
gozendata.com
|
||||||
gzads.com
|
|
||||||
gz-data.com
|
gz-data.com
|
||||||
|
gzads.com
|
||||||
img-bss.csdn.net
|
img-bss.csdn.net
|
||||||
imglnkc.com
|
imglnkc.com
|
||||||
imglnkd.com
|
imglnkd.com
|
||||||
|
inner-active.mobi
|
||||||
innovid.com
|
innovid.com
|
||||||
jads.co
|
jads.co
|
||||||
jl3.yjaxa.top
|
jl3.yjaxa.top
|
||||||
|
js-agent.newrelic.com
|
||||||
juicyads.com
|
juicyads.com
|
||||||
kepler-37b.com
|
kepler-37b.com
|
||||||
leanplum.com
|
leanplum.com
|
||||||
@@ -104,22 +97,26 @@ lqc006.com
|
|||||||
moat.com
|
moat.com
|
||||||
moatads.com
|
moatads.com
|
||||||
mobwithad.com
|
mobwithad.com
|
||||||
|
mopub.com
|
||||||
onesignal.com
|
onesignal.com
|
||||||
realsrv.com
|
realsrv.com
|
||||||
s4yxaqyq95.com
|
s4yxaqyq95.com
|
||||||
shhs-ydd8x2.yjrmss.cn
|
shhs-ydd8x2.yjrmss.cn
|
||||||
|
ssp.api.tappx.com
|
||||||
|
static.hotjar.com
|
||||||
static.javhd.com
|
static.javhd.com
|
||||||
tm-banners.gamingadult.com
|
tm-banners.gamingadult.com
|
||||||
trafficfactory.biz
|
trafficfactory.biz
|
||||||
tsyndicate.com
|
tsyndicate.com
|
||||||
|
uberads.com
|
||||||
wwads.cn
|
wwads.cn
|
||||||
|
|
||||||
# 36Kr
|
|
||||||
adx.36kr.com
|
|
||||||
|
|
||||||
# 12306
|
# 12306
|
||||||
ad.12306.cn
|
ad.12306.cn
|
||||||
|
|
||||||
|
# 36Kr
|
||||||
|
adx.36kr.com
|
||||||
|
|
||||||
# AdHub
|
# AdHub
|
||||||
hubcloud.com.cn
|
hubcloud.com.cn
|
||||||
|
|
||||||
@@ -130,6 +127,10 @@ beizi.biz
|
|||||||
click.ali213.net
|
click.ali213.net
|
||||||
pbmp.ali213.net
|
pbmp.ali213.net
|
||||||
|
|
||||||
|
# AppLovin
|
||||||
|
applovin.com
|
||||||
|
applvn.com
|
||||||
|
|
||||||
# Caixin
|
# Caixin
|
||||||
# regexp:^pinggai\d\.caixin\.com$
|
# regexp:^pinggai\d\.caixin\.com$
|
||||||
full:pinggai0.caixin.com
|
full:pinggai0.caixin.com
|
||||||
@@ -147,12 +148,29 @@ full:pinggai9.caixin.com
|
|||||||
adq.chinaso.com
|
adq.chinaso.com
|
||||||
stat.chinaso.com
|
stat.chinaso.com
|
||||||
|
|
||||||
|
# hiido
|
||||||
|
mlog.hiido.com
|
||||||
|
ylog.hiido.com
|
||||||
|
|
||||||
# Httpool
|
# Httpool
|
||||||
toboads.com
|
toboads.com
|
||||||
|
|
||||||
|
# ironSource Atom
|
||||||
|
analytics-data.io
|
||||||
|
atom-data.io
|
||||||
|
ironbeast.io
|
||||||
|
|
||||||
|
# pocoiq
|
||||||
|
cdn.pocoiq.cn
|
||||||
|
oct.pocoiq.cn
|
||||||
|
|
||||||
# Qiniu
|
# Qiniu
|
||||||
dn-growing.qbox.me
|
dn-growing.qbox.me
|
||||||
|
|
||||||
|
# tagtic
|
||||||
|
g1.tagtic.cn
|
||||||
|
xy-log.tagtic.cn
|
||||||
|
|
||||||
# UNI Marketing
|
# UNI Marketing
|
||||||
ad.unimhk.com
|
ad.unimhk.com
|
||||||
|
|
||||||
|
|||||||
@@ -1,30 +1,34 @@
|
|||||||
# This file contains domains of all ads providers, including both the domains that serves ads, and the domains of providers themselves.
|
# This file contains domains of all ads providers, including both the domains that serves ads, and the domains of providers themselves.
|
||||||
|
include:category-ads
|
||||||
|
|
||||||
|
include:adjust
|
||||||
|
include:clearbit
|
||||||
|
include:ogury
|
||||||
|
include:openx
|
||||||
|
include:pubmatic
|
||||||
|
include:segment
|
||||||
|
include:supersonic
|
||||||
|
include:taboola
|
||||||
|
|
||||||
|
1rx.io @ads
|
||||||
7box.vip @ads
|
7box.vip @ads
|
||||||
ad-delivery.net @ads
|
ad-delivery.net @ads
|
||||||
|
adcolony.com @ads
|
||||||
adinplay.com @ads
|
adinplay.com @ads
|
||||||
adnxs.com @ads
|
adnxs.com @ads
|
||||||
adview.cn @ads
|
|
||||||
ads.trafficjunky.net @ads
|
ads.trafficjunky.net @ads
|
||||||
advertserve.com @ads
|
advertserve.com @ads
|
||||||
|
adview.cn @ads
|
||||||
casalemedia.com @ads
|
casalemedia.com @ads
|
||||||
contextual.media.net @ads
|
contextual.media.net @ads
|
||||||
cpmstar.com @ads
|
cpmstar.com @ads
|
||||||
demdex.net @ads
|
demdex.net @ads
|
||||||
httpool.com @ads
|
httpool.com @ads
|
||||||
lijit.com @ads
|
lijit.com @ads
|
||||||
1rx.io @ads
|
|
||||||
mfadsrvr.com @ads
|
mfadsrvr.com @ads
|
||||||
mgid.com @ads
|
mgid.com @ads
|
||||||
ns1p.net @ads
|
ns1p.net @ads
|
||||||
pubmatic.com @ads
|
|
||||||
sigmob.com @ads
|
sigmob.com @ads
|
||||||
snapads.com @ads
|
|
||||||
spotxchange.com @ads
|
spotxchange.com @ads
|
||||||
unimhk.com @ads
|
unimhk.com @ads
|
||||||
upapi.net @ads
|
upapi.net @ads
|
||||||
|
|
||||||
|
|
||||||
include:taboola
|
|
||||||
include:category-ads
|
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ include:kakao
|
|||||||
include:kaspersky
|
include:kaspersky
|
||||||
include:lg
|
include:lg
|
||||||
include:logitech
|
include:logitech
|
||||||
|
include:louisvuitton
|
||||||
include:mailru-group
|
include:mailru-group
|
||||||
include:meta
|
include:meta
|
||||||
include:microsoft
|
include:microsoft
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ include:segmentfault
|
|||||||
include:sxl
|
include:sxl
|
||||||
include:tencent-dev
|
include:tencent-dev
|
||||||
include:ubuntukylin
|
include:ubuntukylin
|
||||||
|
include:unitychina
|
||||||
|
|
||||||
jinrishici.com
|
jinrishici.com
|
||||||
openvela.com
|
openvela.com
|
||||||
|
|||||||
@@ -142,3 +142,5 @@ ystbds.com
|
|||||||
zhan.com
|
zhan.com
|
||||||
# 智慧树
|
# 智慧树
|
||||||
zhihuishu.com
|
zhihuishu.com
|
||||||
|
# 浙江省教育考试院
|
||||||
|
zjzs.net
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ include:playcover
|
|||||||
include:fflogs
|
include:fflogs
|
||||||
include:trackernetwork
|
include:trackernetwork
|
||||||
|
|
||||||
|
# Anti-Cheat
|
||||||
|
battleye.com
|
||||||
|
|
||||||
# Android Emulator
|
# Android Emulator
|
||||||
bluestacks.com
|
bluestacks.com
|
||||||
ldmnq.com @cn
|
ldmnq.com @cn
|
||||||
@@ -16,5 +19,5 @@ prts.plus
|
|||||||
heavenlywind.cc @cn
|
heavenlywind.cc @cn
|
||||||
poi.moe
|
poi.moe
|
||||||
|
|
||||||
|
# Steam++ / Watt Toolkit
|
||||||
steampp.net @cn
|
steampp.net @cn
|
||||||
|
|||||||
@@ -50,6 +50,8 @@ yeshen.com
|
|||||||
51zmt.top
|
51zmt.top
|
||||||
# 广东南方新媒体
|
# 广东南方新媒体
|
||||||
aisee.tv
|
aisee.tv
|
||||||
|
# 动画巡礼
|
||||||
|
anitabi.cn
|
||||||
# 暴风影音
|
# 暴风影音
|
||||||
baofeng.com
|
baofeng.com
|
||||||
baofeng.net
|
baofeng.net
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
include:2kgames
|
include:2kgames
|
||||||
include:blizzard
|
include:blizzard
|
||||||
include:bluearchive
|
include:bluearchive
|
||||||
|
include:bohemia
|
||||||
include:curseforge
|
include:curseforge
|
||||||
include:cygames
|
include:cygames
|
||||||
include:ea
|
include:ea
|
||||||
include:embark
|
include:embark
|
||||||
|
include:eneba
|
||||||
include:epicgames
|
include:epicgames
|
||||||
include:escapefromtarkov
|
include:escapefromtarkov
|
||||||
include:faceit
|
include:faceit
|
||||||
|
|||||||
@@ -10,3 +10,6 @@ yctdyy.com
|
|||||||
|
|
||||||
# 南方医科大学深圳医院
|
# 南方医科大学深圳医院
|
||||||
smuszh.com
|
smuszh.com
|
||||||
|
|
||||||
|
# 四川大学华西医院
|
||||||
|
cd120.com
|
||||||
|
|||||||
@@ -41,6 +41,9 @@ cloudflarewarp.com
|
|||||||
cloudflareworkers.com
|
cloudflareworkers.com
|
||||||
encryptedsni.com
|
encryptedsni.com
|
||||||
every1dns.net
|
every1dns.net
|
||||||
|
foundationdns.com
|
||||||
|
foundationdns.net
|
||||||
|
foundationdns.org
|
||||||
imagedelivery.net
|
imagedelivery.net
|
||||||
isbgpsafeyet.com
|
isbgpsafeyet.com
|
||||||
one.one.one
|
one.one.one
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
emogi.com @ads
|
|
||||||
2
data/eneba
Normal file
2
data/eneba
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
eneba.com
|
||||||
|
eneba.games
|
||||||
@@ -1 +0,0 @@
|
|||||||
data.flurry.com @ads
|
|
||||||
@@ -271,6 +271,8 @@ ldoceonline.com
|
|||||||
immersivetranslate.com # 沉浸式翻译 (国际版)
|
immersivetranslate.com # 沉浸式翻译 (国际版)
|
||||||
## OriginLab (Graphing for Science and Engineering)
|
## OriginLab (Graphing for Science and Engineering)
|
||||||
originlab.com
|
originlab.com
|
||||||
|
## OsmAnd
|
||||||
|
osmand.net
|
||||||
|
|
||||||
# Software development
|
# Software development
|
||||||
include:category-dev
|
include:category-dev
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ include:category-social-media-cn
|
|||||||
# Advertisment & Analytics
|
# Advertisment & Analytics
|
||||||
include:getui
|
include:getui
|
||||||
include:jiguang
|
include:jiguang
|
||||||
include:umeng
|
|
||||||
|
|
||||||
# 神策数据
|
# 神策数据
|
||||||
sensorsdata.cn
|
sensorsdata.cn
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
assets.growingio.com @ads
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
mlog.hiido.com @ads
|
|
||||||
ylog.hiido.com @ads
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
static.hotjar.com @ads
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
inner-active.mobi @ads
|
|
||||||
@@ -6,4 +6,9 @@ gfw.ovh # sub domains mirror
|
|||||||
mos-gorsud.co # kinopub domain to generate a mirror site through gfw.ovh
|
mos-gorsud.co # kinopub domain to generate a mirror site through gfw.ovh
|
||||||
|
|
||||||
# kinopub CDN servers
|
# kinopub CDN servers
|
||||||
|
cdn-service.space
|
||||||
|
cdn2cdn.com
|
||||||
|
cdn2site.com
|
||||||
|
pushbr.com # poster images CDN
|
||||||
|
|
||||||
regexp:(\w+)-static-[0-9]+\.cdntogo\.net$
|
regexp:(\w+)-static-[0-9]+\.cdntogo\.net$
|
||||||
|
|||||||
5
data/louisvuitton
Normal file
5
data/louisvuitton
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
louisvuitton.cn @cn
|
||||||
|
louisvuitton.com
|
||||||
|
lvcampaign.com @cn
|
||||||
|
|
||||||
|
full:tp.louisvuitton.com @cn
|
||||||
@@ -60,6 +60,7 @@ full:default.exp-tas.com
|
|||||||
full:developer.microsoft.com
|
full:developer.microsoft.com
|
||||||
full:download.visualstudio.microsoft.com
|
full:download.visualstudio.microsoft.com
|
||||||
full:dtlgalleryint.cloudapp.net
|
full:dtlgalleryint.cloudapp.net
|
||||||
|
full:packages.microsoft.com
|
||||||
full:poshtestgallery.cloudapp.net
|
full:poshtestgallery.cloudapp.net
|
||||||
full:psg-int-centralus.cloudapp.net
|
full:psg-int-centralus.cloudapp.net
|
||||||
full:psg-int-eastus.cloudapp.net
|
full:psg-int-eastus.cloudapp.net
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
mopub.com @ads
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
evt.mxplay.com @ads
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
js-agent.newrelic.com @ads
|
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
|
ogury.co @ads
|
||||||
ogury.com @ads
|
ogury.com @ads
|
||||||
presage.io @ads
|
presage.io @ads
|
||||||
ogury.co @ads
|
|
||||||
3
data/okx
3
data/okx
@@ -1,8 +1,9 @@
|
|||||||
okex.com
|
okex.com
|
||||||
okx.com
|
|
||||||
okx-dns.com
|
okx-dns.com
|
||||||
okx-dns1.com
|
okx-dns1.com
|
||||||
okx-dns2.com
|
okx-dns2.com
|
||||||
|
okx.cab
|
||||||
|
okx.com
|
||||||
|
|
||||||
# OKC Browser
|
# OKC Browser
|
||||||
oklink.com @cn
|
oklink.com @cn
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# Main domain
|
# Main domain
|
||||||
chatgpt.com
|
|
||||||
chat.com
|
chat.com
|
||||||
|
chatgpt.com
|
||||||
|
crixet.com
|
||||||
oaistatic.com
|
oaistatic.com
|
||||||
oaiusercontent.com
|
oaiusercontent.com
|
||||||
openai.com
|
openai.com
|
||||||
@@ -10,13 +11,13 @@ sora.com
|
|||||||
openai.com.cdn.cloudflare.net
|
openai.com.cdn.cloudflare.net
|
||||||
full:openaiapi-site.azureedge.net
|
full:openaiapi-site.azureedge.net
|
||||||
full:openaicom-api-bdcpf8c6d2e9atf6.z01.azurefd.net
|
full:openaicom-api-bdcpf8c6d2e9atf6.z01.azurefd.net
|
||||||
|
full:openaicom.imgix.net
|
||||||
full:openaicomproductionae4b.blob.core.windows.net
|
full:openaicomproductionae4b.blob.core.windows.net
|
||||||
full:production-openaicom-storage.azureedge.net
|
full:production-openaicom-storage.azureedge.net
|
||||||
regexp:^chatgpt-async-webps-prod-\S+-\d+\.webpubsub\.azure\.com$
|
regexp:^chatgpt-async-webps-prod-\S+-\d+\.webpubsub\.azure\.com$
|
||||||
|
|
||||||
# tracking
|
# tracking
|
||||||
full:o33249.ingest.sentry.io @ads
|
full:o33249.ingest.sentry.io @ads
|
||||||
full:openaicom.imgix.net @ads
|
|
||||||
full:browser-intake-datadoghq.com @ads
|
full:browser-intake-datadoghq.com @ads
|
||||||
|
|
||||||
# Advanced Voice
|
# Advanced Voice
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
cdn.pocoiq.cn @ads
|
|
||||||
oct.pocoiq.cn @ads
|
|
||||||
@@ -1 +1,2 @@
|
|||||||
sekai.colorfulpalette.org
|
sekai.colorfulpalette.org
|
||||||
|
pjsekai.sega.jp
|
||||||
|
|||||||
@@ -2,5 +2,3 @@
|
|||||||
|
|
||||||
pubmatic.com
|
pubmatic.com
|
||||||
pubmatic.co.jp
|
pubmatic.co.jp
|
||||||
|
|
||||||
include:pubmatic-ads
|
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
ads.pubmatic.com @ads
|
|
||||||
@@ -44,6 +44,7 @@ dnsv1.com.cn
|
|||||||
dothework.cn
|
dothework.cn
|
||||||
ectencent.cn
|
ectencent.cn
|
||||||
ectencent.com.cn
|
ectencent.com.cn
|
||||||
|
edgeone.cool
|
||||||
edgeonedy1.com
|
edgeonedy1.com
|
||||||
essurl.com
|
essurl.com
|
||||||
exmailgz.com
|
exmailgz.com
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ pardot.com
|
|||||||
quotable.com
|
quotable.com
|
||||||
radian6.com
|
radian6.com
|
||||||
relateiq.com
|
relateiq.com
|
||||||
|
salesforce-setup.com
|
||||||
salesforce.com
|
salesforce.com
|
||||||
salesforce.org
|
salesforce.org
|
||||||
salesforceiq.com
|
salesforceiq.com
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ galaxyappstore.com
|
|||||||
galaxymobile.jp
|
galaxymobile.jp
|
||||||
game-platform.net
|
game-platform.net
|
||||||
knoxemm.com
|
knoxemm.com
|
||||||
|
ospserver.net
|
||||||
samsung.com
|
samsung.com
|
||||||
samsungads.com @ads
|
samsungads.com @ads
|
||||||
samsungapps.com
|
samsungapps.com
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
|
ssacdn.com @ads
|
||||||
supersonic.com @ads
|
supersonic.com @ads
|
||||||
supersonicads.com @ads
|
supersonicads.com @ads
|
||||||
ssacdn.com @ads
|
|
||||||
supersonicads-a.akamaihd.net @ads
|
supersonicads-a.akamaihd.net @ads
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
g1.tagtic.cn @ads
|
|
||||||
xy-log.tagtic.cn @ads
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
ssp.api.tappx.com @ads
|
|
||||||
@@ -1,2 +1,7 @@
|
|||||||
|
1024terabox.com
|
||||||
|
bestclouddrive.com
|
||||||
|
freeterabox.com
|
||||||
|
nephobox.com
|
||||||
terabox.com
|
terabox.com
|
||||||
|
terabox1024.com
|
||||||
teraboxcdn.com
|
teraboxcdn.com
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
uberads.com @ads
|
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
unity.com
|
unity.com
|
||||||
unity3d.com
|
unity3d.com
|
||||||
|
|
||||||
include:unity-ads
|
# Ads/tracking
|
||||||
|
iads.unity3d.com @ads
|
||||||
|
unityads.unity3d.com @ads
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
|
# 优三缔 / 优美缔 / 团结引擎
|
||||||
|
u3d.cn
|
||||||
|
unity.cn
|
||||||
|
unitychina.cn
|
||||||
|
|
||||||
|
# Ads/tracking
|
||||||
ads.unitychina.cn @ads
|
ads.unitychina.cn @ads
|
||||||
splash-ads.cdn.unity.cn @ads
|
splash-ads.cdn.unity.cn @ads
|
||||||
splash-ads.unitychina.cn @ads
|
splash-ads.unitychina.cn @ads
|
||||||
unityads.unity.cn @ads
|
unityads.unity.cn @ads
|
||||||
unityads.unity3d.com @ads
|
|
||||||
unityads.unitychina.cn @ads
|
unityads.unitychina.cn @ads
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
include:askdiandian
|
include:askdiandian
|
||||||
|
|
||||||
|
rednotecdn.com
|
||||||
xhscdn.com
|
xhscdn.com
|
||||||
xhscdn.net
|
xhscdn.net
|
||||||
xhslink.com
|
xhslink.com
|
||||||
|
|||||||
9
internal/dlc/dlc.go
Normal file
9
internal/dlc/dlc.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package dlc
|
||||||
|
|
||||||
|
const (
|
||||||
|
RuleTypeDomain string = "domain"
|
||||||
|
RuleTypeFullDomain string = "full"
|
||||||
|
RuleTypeKeyword string = "keyword"
|
||||||
|
RuleTypeRegexp string = "regexp"
|
||||||
|
RuleTypeInclude string = "include"
|
||||||
|
)
|
||||||
311
main.go
311
main.go
@@ -10,6 +10,7 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/v2fly/domain-list-community/internal/dlc"
|
||||||
router "github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
router "github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
||||||
"google.golang.org/protobuf/proto"
|
"google.golang.org/protobuf/proto"
|
||||||
)
|
)
|
||||||
@@ -21,23 +22,7 @@ var (
|
|||||||
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
|
exportLists = flag.String("exportlists", "", "Lists to be flattened and exported in plaintext format, separated by ',' comma")
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
RuleTypeDomain string = "domain"
|
|
||||||
RuleTypeFullDomain string = "full"
|
|
||||||
RuleTypeKeyword string = "keyword"
|
|
||||||
RuleTypeRegexp string = "regexp"
|
|
||||||
RuleTypeInclude string = "include"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
TypeChecker = regexp.MustCompile(`^(domain|full|keyword|regexp|include)$`)
|
|
||||||
ValueChecker = regexp.MustCompile(`^[a-z0-9!\.-]+$`)
|
|
||||||
AttrChecker = regexp.MustCompile(`^[a-z0-9!-]+$`)
|
|
||||||
SiteChecker = regexp.MustCompile(`^[A-Z0-9!-]+$`)
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
refMap = make(map[string][]*Entry)
|
|
||||||
plMap = make(map[string]*ParsedList)
|
plMap = make(map[string]*ParsedList)
|
||||||
finalMap = make(map[string][]*Entry)
|
finalMap = make(map[string][]*Entry)
|
||||||
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
cirIncMap = make(map[string]bool) // Used for circular inclusion detection
|
||||||
@@ -66,7 +51,7 @@ type ParsedList struct {
|
|||||||
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
||||||
site := &router.GeoSite{
|
site := &router.GeoSite{
|
||||||
CountryCode: listName,
|
CountryCode: listName,
|
||||||
Domain: make([]*router.Domain, 0, len(entries)),
|
Domain: make([]*router.Domain, 0, len(entries)),
|
||||||
}
|
}
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
pdomain := &router.Domain{Value: entry.Value}
|
pdomain := &router.Domain{Value: entry.Value}
|
||||||
@@ -78,13 +63,13 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch entry.Type {
|
switch entry.Type {
|
||||||
case RuleTypeDomain:
|
case dlc.RuleTypeDomain:
|
||||||
pdomain.Type = router.Domain_RootDomain
|
pdomain.Type = router.Domain_RootDomain
|
||||||
case RuleTypeRegexp:
|
case dlc.RuleTypeRegexp:
|
||||||
pdomain.Type = router.Domain_Regex
|
pdomain.Type = router.Domain_Regex
|
||||||
case RuleTypeKeyword:
|
case dlc.RuleTypeKeyword:
|
||||||
pdomain.Type = router.Domain_Plain
|
pdomain.Type = router.Domain_Plain
|
||||||
case RuleTypeFullDomain:
|
case dlc.RuleTypeFullDomain:
|
||||||
pdomain.Type = router.Domain_Full
|
pdomain.Type = router.Domain_Full
|
||||||
}
|
}
|
||||||
site.Domain = append(site.Domain, pdomain)
|
site.Domain = append(site.Domain, pdomain)
|
||||||
@@ -92,18 +77,14 @@ func makeProtoList(listName string, entries []*Entry) (*router.GeoSite, error) {
|
|||||||
return site, nil
|
return site, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func writePlainList(exportedName string) error {
|
func writePlainList(listname string, entries []*Entry) error {
|
||||||
targetList, exist := finalMap[strings.ToUpper(exportedName)]
|
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(listname)+".txt"))
|
||||||
if !exist || len(targetList) == 0 {
|
|
||||||
return fmt.Errorf("'%s' list does not exist or is empty.", exportedName)
|
|
||||||
}
|
|
||||||
file, err := os.Create(filepath.Join(*outputDir, strings.ToLower(exportedName) + ".txt"))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
w := bufio.NewWriter(file)
|
w := bufio.NewWriter(file)
|
||||||
for _, entry := range targetList {
|
for _, entry := range entries {
|
||||||
fmt.Fprintln(w, entry.Plain)
|
fmt.Fprintln(w, entry.Plain)
|
||||||
}
|
}
|
||||||
return w.Flush()
|
return w.Flush()
|
||||||
@@ -112,83 +93,126 @@ func writePlainList(exportedName string) error {
|
|||||||
func parseEntry(line string) (Entry, error) {
|
func parseEntry(line string) (Entry, error) {
|
||||||
var entry Entry
|
var entry Entry
|
||||||
parts := strings.Fields(line)
|
parts := strings.Fields(line)
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return entry, fmt.Errorf("empty line")
|
||||||
|
}
|
||||||
|
|
||||||
// Parse type and value
|
// Parse type and value
|
||||||
rawTypeVal := parts[0]
|
v := parts[0]
|
||||||
kv := strings.Split(rawTypeVal, ":")
|
colonIndex := strings.Index(v, ":")
|
||||||
if len(kv) == 1 {
|
if colonIndex == -1 {
|
||||||
entry.Type = RuleTypeDomain // Default type
|
entry.Type = dlc.RuleTypeDomain // Default type
|
||||||
entry.Value = strings.ToLower(rawTypeVal)
|
entry.Value = strings.ToLower(v)
|
||||||
} else if len(kv) == 2 {
|
if !validateDomainChars(entry.Value) {
|
||||||
entry.Type = strings.ToLower(kv[0])
|
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||||
if entry.Type == RuleTypeRegexp {
|
|
||||||
entry.Value = kv[1]
|
|
||||||
} else {
|
|
||||||
entry.Value = strings.ToLower(kv[1])
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return entry, fmt.Errorf("invalid format: %s", line)
|
typ := strings.ToLower(v[:colonIndex])
|
||||||
}
|
val := v[colonIndex+1:]
|
||||||
// Check type and value
|
switch typ {
|
||||||
if !TypeChecker.MatchString(entry.Type) {
|
case dlc.RuleTypeRegexp:
|
||||||
return entry, fmt.Errorf("invalid type: %s", entry.Type)
|
if _, err := regexp.Compile(val); err != nil {
|
||||||
}
|
return entry, fmt.Errorf("invalid regexp %q: %w", val, err)
|
||||||
if entry.Type == RuleTypeRegexp {
|
}
|
||||||
if _, err := regexp.Compile(entry.Value); err != nil {
|
entry.Type = dlc.RuleTypeRegexp
|
||||||
return entry, fmt.Errorf("invalid regexp: %s", entry.Value)
|
entry.Value = val
|
||||||
|
case dlc.RuleTypeInclude:
|
||||||
|
entry.Type = dlc.RuleTypeInclude
|
||||||
|
entry.Value = strings.ToUpper(val)
|
||||||
|
if !validateSiteName(entry.Value) {
|
||||||
|
return entry, fmt.Errorf("invalid include list name: %q", entry.Value)
|
||||||
|
}
|
||||||
|
case dlc.RuleTypeDomain, dlc.RuleTypeFullDomain, dlc.RuleTypeKeyword:
|
||||||
|
entry.Type = typ
|
||||||
|
entry.Value = strings.ToLower(val)
|
||||||
|
if !validateDomainChars(entry.Value) {
|
||||||
|
return entry, fmt.Errorf("invalid domain: %q", entry.Value)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return entry, fmt.Errorf("invalid type: %q", typ)
|
||||||
}
|
}
|
||||||
} else if !ValueChecker.MatchString(entry.Value) {
|
|
||||||
return entry, fmt.Errorf("invalid value: %s", entry.Value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse/Check attributes and affiliations
|
// Parse attributes and affiliations
|
||||||
for _, part := range parts[1:] {
|
for _, part := range parts[1:] {
|
||||||
if strings.HasPrefix(part, "@") {
|
if strings.HasPrefix(part, "@") {
|
||||||
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
attr := strings.ToLower(part[1:]) // Trim attribute prefix `@` character
|
||||||
if !AttrChecker.MatchString(attr) {
|
if !validateAttrChars(attr) {
|
||||||
return entry, fmt.Errorf("invalid attribute key: %s", attr)
|
return entry, fmt.Errorf("invalid attribute: %q", attr)
|
||||||
}
|
}
|
||||||
entry.Attrs = append(entry.Attrs, attr)
|
entry.Attrs = append(entry.Attrs, attr)
|
||||||
} else if strings.HasPrefix(part, "&") {
|
} else if strings.HasPrefix(part, "&") {
|
||||||
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
|
aff := strings.ToUpper(part[1:]) // Trim affiliation prefix `&` character
|
||||||
if !SiteChecker.MatchString(aff) {
|
if !validateSiteName(aff) {
|
||||||
return entry, fmt.Errorf("invalid affiliation key: %s", aff)
|
return entry, fmt.Errorf("invalid affiliation: %q", aff)
|
||||||
}
|
}
|
||||||
entry.Affs = append(entry.Affs, aff)
|
entry.Affs = append(entry.Affs, aff)
|
||||||
} else {
|
} else {
|
||||||
return entry, fmt.Errorf("invalid attribute/affiliation: %s", part)
|
return entry, fmt.Errorf("invalid attribute/affiliation: %q", part)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Sort attributes
|
// Sort attributes
|
||||||
slices.Sort(entry.Attrs)
|
slices.Sort(entry.Attrs)
|
||||||
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
// Formated plain entry: type:domain.tld:@attr1,@attr2
|
||||||
entry.Plain = entry.Type + ":" + entry.Value
|
var plain strings.Builder
|
||||||
|
plain.Grow(len(entry.Type) + len(entry.Value) + 10)
|
||||||
|
fmt.Fprintf(&plain, "%s:%s", entry.Type, entry.Value)
|
||||||
if len(entry.Attrs) != 0 {
|
if len(entry.Attrs) != 0 {
|
||||||
entry.Plain = entry.Plain + ":@" + strings.Join(entry.Attrs, ",@")
|
fmt.Fprintf(&plain, ":@%s", strings.Join(entry.Attrs, ",@"))
|
||||||
}
|
}
|
||||||
|
entry.Plain = plain.String()
|
||||||
|
|
||||||
return entry, nil
|
return entry, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadData(path string) error {
|
func validateDomainChars(domain string) bool {
|
||||||
|
for i := range domain {
|
||||||
|
c := domain[i]
|
||||||
|
if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '.' || c == '-' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateAttrChars(attr string) bool {
|
||||||
|
for i := range attr {
|
||||||
|
c := attr[i]
|
||||||
|
if (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '!' || c == '-' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateSiteName(name string) bool {
|
||||||
|
for i := range name {
|
||||||
|
c := name[i]
|
||||||
|
if (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '!' || c == '-' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadData(path string) ([]*Entry, error) {
|
||||||
file, err := os.Open(path)
|
file, err := os.Open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
listName := strings.ToUpper(filepath.Base(path))
|
var entries []*Entry
|
||||||
if !SiteChecker.MatchString(listName) {
|
|
||||||
return fmt.Errorf("invalid list name: %s", listName)
|
|
||||||
}
|
|
||||||
scanner := bufio.NewScanner(file)
|
scanner := bufio.NewScanner(file)
|
||||||
lineIdx := 0
|
lineIdx := 0
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
line := scanner.Text()
|
line := scanner.Text()
|
||||||
lineIdx++
|
lineIdx++
|
||||||
// Remove comments
|
|
||||||
if idx := strings.Index(line, "#"); idx != -1 {
|
if idx := strings.Index(line, "#"); idx != -1 {
|
||||||
line = line[:idx]
|
line = line[:idx] // Remove comments
|
||||||
}
|
}
|
||||||
line = strings.TrimSpace(line)
|
line = strings.TrimSpace(line)
|
||||||
if line == "" {
|
if line == "" {
|
||||||
@@ -196,11 +220,11 @@ func loadData(path string) error {
|
|||||||
}
|
}
|
||||||
entry, err := parseEntry(line)
|
entry, err := parseEntry(line)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error in %s at line %d: %v", path, lineIdx, err)
|
return entries, fmt.Errorf("error in %q at line %d: %w", path, lineIdx, err)
|
||||||
}
|
}
|
||||||
refMap[listName] = append(refMap[listName], &entry)
|
entries = append(entries, &entry)
|
||||||
}
|
}
|
||||||
return nil
|
return entries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseList(refName string, refList []*Entry) error {
|
func parseList(refName string, refList []*Entry) error {
|
||||||
@@ -210,11 +234,11 @@ func parseList(refName string, refList []*Entry) error {
|
|||||||
plMap[refName] = pl
|
plMap[refName] = pl
|
||||||
}
|
}
|
||||||
for _, entry := range refList {
|
for _, entry := range refList {
|
||||||
if entry.Type == RuleTypeInclude {
|
if entry.Type == dlc.RuleTypeInclude {
|
||||||
if len(entry.Affs) != 0 {
|
if len(entry.Affs) != 0 {
|
||||||
return fmt.Errorf("affiliation is not allowed for include:%s", entry.Value)
|
return fmt.Errorf("affiliation is not allowed for include:%q", entry.Value)
|
||||||
}
|
}
|
||||||
inc := &Inclusion{Source: strings.ToUpper(entry.Value)}
|
inc := &Inclusion{Source: entry.Value}
|
||||||
for _, attr := range entry.Attrs {
|
for _, attr := range entry.Attrs {
|
||||||
if strings.HasPrefix(attr, "-") {
|
if strings.HasPrefix(attr, "-") {
|
||||||
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
|
inc.BanAttrs = append(inc.BanAttrs, attr[1:]) // Trim attribute prefix `-` character
|
||||||
@@ -238,24 +262,44 @@ func parseList(refName string, refList []*Entry) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func polishList(roughMap *map[string]*Entry) []*Entry {
|
func isMatchAttrFilters(entry *Entry, incFilter *Inclusion) bool {
|
||||||
finalList := make([]*Entry, 0, len(*roughMap))
|
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 {
|
||||||
queuingList := make([]*Entry, 0, len(*roughMap)) // Domain/full entries without attr
|
return true
|
||||||
|
}
|
||||||
|
if len(entry.Attrs) == 0 {
|
||||||
|
return len(incFilter.MustAttrs) == 0
|
||||||
|
}
|
||||||
|
for _, m := range incFilter.MustAttrs {
|
||||||
|
if !slices.Contains(entry.Attrs, m) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, b := range incFilter.BanAttrs {
|
||||||
|
if slices.Contains(entry.Attrs, b) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func polishList(roughMap map[string]*Entry) []*Entry {
|
||||||
|
finalList := make([]*Entry, 0, len(roughMap))
|
||||||
|
queuingList := make([]*Entry, 0, len(roughMap)) // Domain/full entries without attr
|
||||||
domainsMap := make(map[string]bool)
|
domainsMap := make(map[string]bool)
|
||||||
for _, entry := range *roughMap {
|
for _, entry := range roughMap {
|
||||||
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
switch entry.Type { // Bypass regexp, keyword and "full/domain with attr"
|
||||||
case RuleTypeRegexp:
|
case dlc.RuleTypeRegexp:
|
||||||
finalList = append(finalList, entry)
|
finalList = append(finalList, entry)
|
||||||
case RuleTypeKeyword:
|
case dlc.RuleTypeKeyword:
|
||||||
finalList = append(finalList, entry)
|
finalList = append(finalList, entry)
|
||||||
case RuleTypeDomain:
|
case dlc.RuleTypeDomain:
|
||||||
domainsMap[entry.Value] = true
|
domainsMap[entry.Value] = true
|
||||||
if len(entry.Attrs) != 0 {
|
if len(entry.Attrs) != 0 {
|
||||||
finalList = append(finalList, entry)
|
finalList = append(finalList, entry)
|
||||||
} else {
|
} else {
|
||||||
queuingList = append(queuingList, entry)
|
queuingList = append(queuingList, entry)
|
||||||
}
|
}
|
||||||
case RuleTypeFullDomain:
|
case dlc.RuleTypeFullDomain:
|
||||||
if len(entry.Attrs) != 0 {
|
if len(entry.Attrs) != 0 {
|
||||||
finalList = append(finalList, entry)
|
finalList = append(finalList, entry)
|
||||||
} else {
|
} else {
|
||||||
@@ -266,12 +310,16 @@ func polishList(roughMap *map[string]*Entry) []*Entry {
|
|||||||
// Remove redundant subdomains for full/domain without attr
|
// Remove redundant subdomains for full/domain without attr
|
||||||
for _, qentry := range queuingList {
|
for _, qentry := range queuingList {
|
||||||
isRedundant := false
|
isRedundant := false
|
||||||
pd := qentry.Value // Parent domain
|
pd := qentry.Value // To be parent domain
|
||||||
|
if qentry.Type == dlc.RuleTypeFullDomain {
|
||||||
|
pd = "." + pd // So that `domain:example.org` overrides `full:example.org`
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
idx := strings.Index(pd, ".")
|
idx := strings.Index(pd, ".")
|
||||||
if idx == -1 { break }
|
if idx == -1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
pd = pd[idx+1:] // Go for next parent
|
pd = pd[idx+1:] // Go for next parent
|
||||||
if !strings.Contains(pd, ".") { break } // Not allow tld to be a parent
|
|
||||||
if domainsMap[pd] {
|
if domainsMap[pd] {
|
||||||
isRedundant = true
|
isRedundant = true
|
||||||
break
|
break
|
||||||
@@ -289,27 +337,16 @@ func polishList(roughMap *map[string]*Entry) []*Entry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func resolveList(pl *ParsedList) error {
|
func resolveList(pl *ParsedList) error {
|
||||||
if _, pldone := finalMap[pl.Name]; pldone { return nil }
|
if _, pldone := finalMap[pl.Name]; pldone {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
if cirIncMap[pl.Name] {
|
if cirIncMap[pl.Name] {
|
||||||
return fmt.Errorf("circular inclusion in: %s", pl.Name)
|
return fmt.Errorf("circular inclusion in: %q", pl.Name)
|
||||||
}
|
}
|
||||||
cirIncMap[pl.Name] = true
|
cirIncMap[pl.Name] = true
|
||||||
defer delete(cirIncMap, pl.Name)
|
defer delete(cirIncMap, pl.Name)
|
||||||
|
|
||||||
isMatchAttrFilters := func(entry *Entry, incFilter *Inclusion) bool {
|
|
||||||
if len(incFilter.MustAttrs) == 0 && len(incFilter.BanAttrs) == 0 { return true }
|
|
||||||
if len(entry.Attrs) == 0 { return len(incFilter.MustAttrs) == 0 }
|
|
||||||
|
|
||||||
for _, m := range incFilter.MustAttrs {
|
|
||||||
if !slices.Contains(entry.Attrs, m) { return false }
|
|
||||||
}
|
|
||||||
for _, b := range incFilter.BanAttrs {
|
|
||||||
if slices.Contains(entry.Attrs, b) { return false }
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
roughMap := make(map[string]*Entry) // Avoid basic duplicates
|
||||||
for _, dentry := range pl.Entries { // Add direct entries
|
for _, dentry := range pl.Entries { // Add direct entries
|
||||||
roughMap[dentry.Plain] = dentry
|
roughMap[dentry.Plain] = dentry
|
||||||
@@ -317,7 +354,7 @@ func resolveList(pl *ParsedList) error {
|
|||||||
for _, inc := range pl.Inclusions {
|
for _, inc := range pl.Inclusions {
|
||||||
incPl, exist := plMap[inc.Source]
|
incPl, exist := plMap[inc.Source]
|
||||||
if !exist {
|
if !exist {
|
||||||
return fmt.Errorf("list '%s' includes a non-existent list: '%s'", pl.Name, inc.Source)
|
return fmt.Errorf("list %q includes a non-existent list: %q", pl.Name, inc.Source)
|
||||||
}
|
}
|
||||||
if err := resolveList(incPl); err != nil {
|
if err := resolveList(incPl); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -328,67 +365,66 @@ func resolveList(pl *ParsedList) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finalMap[pl.Name] = polishList(&roughMap)
|
finalMap[pl.Name] = polishList(roughMap)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func run() error {
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
dir := *dataPath
|
dir := *dataPath
|
||||||
fmt.Println("Use domain lists in", dir)
|
fmt.Printf("using domain lists data in %q\n", dir)
|
||||||
|
|
||||||
// Generate refMap
|
// Generate refMap
|
||||||
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
|
refMap := make(map[string][]*Entry)
|
||||||
|
err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if info.IsDir() {
|
if d.IsDir() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if err := loadData(path); err != nil {
|
listName := strings.ToUpper(filepath.Base(path))
|
||||||
return err
|
if !validateSiteName(listName) {
|
||||||
|
return fmt.Errorf("invalid list name: %q", listName)
|
||||||
}
|
}
|
||||||
return nil
|
refMap[listName], err = loadData(path)
|
||||||
|
return err
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Failed to loadData:", err)
|
return fmt.Errorf("failed to loadData: %w", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate plMap
|
// Generate plMap
|
||||||
for refName, refList := range refMap {
|
for refName, refList := range refMap {
|
||||||
if err := parseList(refName, refList); err != nil {
|
if err := parseList(refName, refList); err != nil {
|
||||||
fmt.Println("Failed to parseList:", err)
|
return fmt.Errorf("failed to parseList %q: %w", refName, err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate finalMap
|
// Generate finalMap
|
||||||
for _, pl := range plMap {
|
for plname, pl := range plMap {
|
||||||
if err := resolveList(pl); err != nil {
|
if err := resolveList(pl); err != nil {
|
||||||
fmt.Println("Failed to resolveList:", err)
|
return fmt.Errorf("failed to resolveList %q: %w", plname, err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create output directory if not exist
|
// Make sure output directory exists
|
||||||
if _, err := os.Stat(*outputDir); os.IsNotExist(err) {
|
if err := os.MkdirAll(*outputDir, 0755); err != nil {
|
||||||
if mkErr := os.MkdirAll(*outputDir, 0755); mkErr != nil {
|
return fmt.Errorf("failed to create output directory: %w", err)
|
||||||
fmt.Println("Failed:", mkErr)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export plaintext list
|
// Export plaintext list
|
||||||
if *exportLists != "" {
|
for rawEpList := range strings.SplitSeq(*exportLists, ",") {
|
||||||
exportedListSlice := strings.Split(*exportLists, ",")
|
if epList := strings.TrimSpace(rawEpList); epList != "" {
|
||||||
for _, exportedList := range exportedListSlice {
|
entries, exist := finalMap[strings.ToUpper(epList)]
|
||||||
if err := writePlainList(exportedList); err != nil {
|
if !exist || len(entries) == 0 {
|
||||||
fmt.Println("Failed to write list:", err)
|
fmt.Printf("list %q does not exist or is empty\n", epList)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fmt.Printf("list: '%s' has been generated successfully.\n", exportedList)
|
if err := writePlainList(epList, entries); err != nil {
|
||||||
|
fmt.Printf("failed to write list %q: %v\n", epList, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("list %q has been generated successfully.\n", epList)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -397,8 +433,7 @@ func main() {
|
|||||||
for siteName, siteEntries := range finalMap {
|
for siteName, siteEntries := range finalMap {
|
||||||
site, err := makeProtoList(siteName, siteEntries)
|
site, err := makeProtoList(siteName, siteEntries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Failed:", err)
|
return fmt.Errorf("failed to makeProtoList %q: %w", siteName, err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
protoList.Entry = append(protoList.Entry, site)
|
protoList.Entry = append(protoList.Entry, site)
|
||||||
}
|
}
|
||||||
@@ -409,13 +444,19 @@ func main() {
|
|||||||
|
|
||||||
protoBytes, err := proto.Marshal(protoList)
|
protoBytes, err := proto.Marshal(protoList)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Failed to marshal:", err)
|
return fmt.Errorf("failed to marshal: %w", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
|
if err := os.WriteFile(filepath.Join(*outputDir, *outputName), protoBytes, 0644); err != nil {
|
||||||
fmt.Println("Failed to write output:", err)
|
return fmt.Errorf("failed to write output: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("%q has been generated successfully.\n", *outputName)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
if err := run(); err != nil {
|
||||||
|
fmt.Printf("Fatal error: %v\n", err)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
} else {
|
|
||||||
fmt.Println(*outputName, "has been generated successfully.")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user