2022-07-04 22:45:11 +08:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2024-07-18 13:36:35 +08:00
|
|
|
"bufio"
|
2022-07-04 22:45:11 +08:00
|
|
|
"context"
|
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/hex"
|
2024-05-15 21:54:49 +08:00
|
|
|
"fmt"
|
2022-07-04 22:45:11 +08:00
|
|
|
"io"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2023-12-30 22:51:19 +08:00
|
|
|
"sort"
|
2022-07-04 22:45:11 +08:00
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/sagernet/sing-box/common/geosite"
|
2023-11-29 13:12:38 +08:00
|
|
|
"github.com/sagernet/sing-box/common/srs"
|
|
|
|
C "github.com/sagernet/sing-box/constant"
|
2023-12-12 18:34:34 +08:00
|
|
|
"github.com/sagernet/sing-box/log"
|
2023-11-29 13:12:38 +08:00
|
|
|
"github.com/sagernet/sing-box/option"
|
2022-07-04 22:45:11 +08:00
|
|
|
"github.com/sagernet/sing/common"
|
|
|
|
E "github.com/sagernet/sing/common/exceptions"
|
2023-11-29 13:12:38 +08:00
|
|
|
|
|
|
|
"github.com/google/go-github/v45/github"
|
2022-07-04 22:45:11 +08:00
|
|
|
"github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
|
|
|
"google.golang.org/protobuf/proto"
|
|
|
|
)
|
|
|
|
|
|
|
|
var githubClient *github.Client
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
accessToken, loaded := os.LookupEnv("ACCESS_TOKEN")
|
|
|
|
if !loaded {
|
|
|
|
githubClient = github.NewClient(nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
transport := &github.BasicAuthTransport{
|
|
|
|
Username: accessToken,
|
|
|
|
}
|
|
|
|
githubClient = github.NewClient(transport.Client())
|
|
|
|
}
|
|
|
|
|
|
|
|
func fetch(from string) (*github.RepositoryRelease, error) {
|
|
|
|
names := strings.SplitN(from, "/", 2)
|
|
|
|
latestRelease, _, err := githubClient.Repositories.GetLatestRelease(context.Background(), names[0], names[1])
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return latestRelease, err
|
|
|
|
}
|
|
|
|
|
|
|
|
func get(downloadURL *string) ([]byte, error) {
|
2023-11-29 13:12:38 +08:00
|
|
|
log.Info("download ", *downloadURL)
|
2022-07-04 22:45:11 +08:00
|
|
|
response, err := http.Get(*downloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer response.Body.Close()
|
|
|
|
return io.ReadAll(response.Body)
|
|
|
|
}
|
|
|
|
|
|
|
|
func download(release *github.RepositoryRelease) ([]byte, error) {
|
|
|
|
geositeAsset := common.Find(release.Assets, func(it *github.ReleaseAsset) bool {
|
|
|
|
return *it.Name == "dlc.dat"
|
|
|
|
})
|
|
|
|
geositeChecksumAsset := common.Find(release.Assets, func(it *github.ReleaseAsset) bool {
|
|
|
|
return *it.Name == "dlc.dat.sha256sum"
|
|
|
|
})
|
|
|
|
if geositeAsset == nil {
|
|
|
|
return nil, E.New("geosite asset not found in upstream release ", release.Name)
|
|
|
|
}
|
|
|
|
if geositeChecksumAsset == nil {
|
|
|
|
return nil, E.New("geosite asset not found in upstream release ", release.Name)
|
|
|
|
}
|
|
|
|
data, err := get(geositeAsset.BrowserDownloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
remoteChecksum, err := get(geositeChecksumAsset.BrowserDownloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
checksum := sha256.Sum256(data)
|
|
|
|
if hex.EncodeToString(checksum[:]) != string(remoteChecksum[:64]) {
|
|
|
|
return nil, E.New("checksum mismatch")
|
|
|
|
}
|
|
|
|
return data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parse(vGeositeData []byte) (map[string][]geosite.Item, error) {
|
|
|
|
vGeositeList := routercommon.GeoSiteList{}
|
|
|
|
err := proto.Unmarshal(vGeositeData, &vGeositeList)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
domainMap := make(map[string][]geosite.Item)
|
|
|
|
for _, vGeositeEntry := range vGeositeList.Entry {
|
2022-09-04 12:40:46 +08:00
|
|
|
code := strings.ToLower(vGeositeEntry.CountryCode)
|
2022-07-04 22:45:11 +08:00
|
|
|
domains := make([]geosite.Item, 0, len(vGeositeEntry.Domain)*2)
|
2022-09-04 12:40:46 +08:00
|
|
|
attributes := make(map[string][]*routercommon.Domain)
|
2022-07-04 22:45:11 +08:00
|
|
|
for _, domain := range vGeositeEntry.Domain {
|
2022-09-04 11:33:10 +08:00
|
|
|
if len(domain.Attribute) > 0 {
|
2022-09-04 12:40:46 +08:00
|
|
|
for _, attribute := range domain.Attribute {
|
|
|
|
attributes[attribute.Key] = append(attributes[attribute.Key], domain)
|
|
|
|
}
|
2022-09-04 11:33:10 +08:00
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
switch domain.Type {
|
|
|
|
case routercommon.Domain_Plain:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainKeyword,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Regex:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainRegex,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_RootDomain:
|
2022-07-07 23:38:22 +08:00
|
|
|
if strings.Contains(domain.Value, ".") {
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainSuffix,
|
|
|
|
Value: "." + domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Full:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2022-09-04 12:40:46 +08:00
|
|
|
domainMap[code] = common.Uniq(domains)
|
|
|
|
for attribute, attributeEntries := range attributes {
|
|
|
|
attributeDomains := make([]geosite.Item, 0, len(attributeEntries)*2)
|
|
|
|
for _, domain := range attributeEntries {
|
|
|
|
switch domain.Type {
|
|
|
|
case routercommon.Domain_Plain:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainKeyword,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Regex:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainRegex,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_RootDomain:
|
|
|
|
if strings.Contains(domain.Value, ".") {
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainSuffix,
|
|
|
|
Value: "." + domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Full:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
domainMap[code+"@"+attribute] = common.Uniq(attributeDomains)
|
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
}
|
|
|
|
return domainMap, nil
|
|
|
|
}
|
|
|
|
|
2023-12-30 22:51:19 +08:00
|
|
|
type filteredCodePair struct {
|
|
|
|
code string
|
|
|
|
badCode string
|
|
|
|
}
|
|
|
|
|
|
|
|
func filterTags(data map[string][]geosite.Item) {
|
|
|
|
var codeList []string
|
|
|
|
for code := range data {
|
|
|
|
codeList = append(codeList, code)
|
|
|
|
}
|
|
|
|
var badCodeList []filteredCodePair
|
|
|
|
var filteredCodeMap []string
|
|
|
|
var mergedCodeMap []string
|
|
|
|
for _, code := range codeList {
|
|
|
|
codeParts := strings.Split(code, "@")
|
|
|
|
if len(codeParts) != 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
leftParts := strings.Split(codeParts[0], "-")
|
|
|
|
var lastName string
|
|
|
|
if len(leftParts) > 1 {
|
|
|
|
lastName = leftParts[len(leftParts)-1]
|
|
|
|
}
|
|
|
|
if lastName == "" {
|
|
|
|
lastName = codeParts[0]
|
|
|
|
}
|
|
|
|
if lastName == codeParts[1] {
|
|
|
|
delete(data, code)
|
|
|
|
filteredCodeMap = append(filteredCodeMap, code)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if "!"+lastName == codeParts[1] {
|
|
|
|
badCodeList = append(badCodeList, filteredCodePair{
|
|
|
|
code: codeParts[0],
|
|
|
|
badCode: code,
|
|
|
|
})
|
|
|
|
} else if lastName == "!"+codeParts[1] {
|
|
|
|
badCodeList = append(badCodeList, filteredCodePair{
|
|
|
|
code: codeParts[0],
|
|
|
|
badCode: code,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, it := range badCodeList {
|
|
|
|
badList := data[it.badCode]
|
2024-05-15 21:54:49 +08:00
|
|
|
if it.badCode == "geolocation-!cn@cn" {
|
|
|
|
fmt.Println(badList)
|
|
|
|
}
|
2023-12-30 22:51:19 +08:00
|
|
|
if badList == nil {
|
|
|
|
panic("bad list not found: " + it.badCode)
|
|
|
|
}
|
|
|
|
delete(data, it.badCode)
|
|
|
|
newMap := make(map[geosite.Item]bool)
|
|
|
|
for _, item := range data[it.code] {
|
|
|
|
newMap[item] = true
|
|
|
|
}
|
|
|
|
for _, item := range badList {
|
|
|
|
delete(newMap, item)
|
|
|
|
}
|
|
|
|
newList := make([]geosite.Item, 0, len(newMap))
|
|
|
|
for item := range newMap {
|
|
|
|
newList = append(newList, item)
|
|
|
|
}
|
|
|
|
data[it.code] = newList
|
|
|
|
mergedCodeMap = append(mergedCodeMap, it.badCode)
|
|
|
|
}
|
|
|
|
sort.Strings(filteredCodeMap)
|
|
|
|
sort.Strings(mergedCodeMap)
|
|
|
|
os.Stderr.WriteString("filtered " + strings.Join(filteredCodeMap, ",") + "\n")
|
|
|
|
os.Stderr.WriteString("merged " + strings.Join(mergedCodeMap, ",") + "\n")
|
|
|
|
}
|
|
|
|
|
2024-01-02 00:25:17 +08:00
|
|
|
func mergeTags(data map[string][]geosite.Item) {
|
|
|
|
var codeList []string
|
|
|
|
for code := range data {
|
|
|
|
codeList = append(codeList, code)
|
|
|
|
}
|
|
|
|
var cnCodeList []string
|
|
|
|
for _, code := range codeList {
|
|
|
|
codeParts := strings.Split(code, "@")
|
|
|
|
if len(codeParts) != 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if codeParts[1] != "cn" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if !strings.HasPrefix(codeParts[0], "category-") {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if strings.HasSuffix(codeParts[0], "-cn") || strings.HasSuffix(codeParts[0], "-!cn") {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
cnCodeList = append(cnCodeList, code)
|
|
|
|
}
|
|
|
|
newMap := make(map[geosite.Item]bool)
|
2024-01-23 10:59:48 +08:00
|
|
|
for _, item := range data["geolocation-cn"] {
|
2024-01-02 00:25:17 +08:00
|
|
|
newMap[item] = true
|
|
|
|
}
|
|
|
|
for _, code := range cnCodeList {
|
|
|
|
for _, item := range data[code] {
|
|
|
|
newMap[item] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
newList := make([]geosite.Item, 0, len(newMap))
|
|
|
|
for item := range newMap {
|
|
|
|
newList = append(newList, item)
|
|
|
|
}
|
2024-01-23 10:59:48 +08:00
|
|
|
data["geolocation-cn"] = newList
|
2024-01-02 00:25:17 +08:00
|
|
|
println("merged cn categories: " + strings.Join(cnCodeList, ","))
|
|
|
|
}
|
|
|
|
|
2024-07-18 13:36:35 +08:00
|
|
|
func generate(release *github.RepositoryRelease, output string, cnOutput string, ruleSetOutput string, ruleSetUnstableOutput string) error {
|
2022-07-04 22:45:11 +08:00
|
|
|
vData, err := download(release)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
domainMap, err := parse(vData)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-12-30 22:51:19 +08:00
|
|
|
filterTags(domainMap)
|
2024-01-02 00:25:17 +08:00
|
|
|
mergeTags(domainMap)
|
2022-07-04 22:45:11 +08:00
|
|
|
outputPath, _ := filepath.Abs(output)
|
|
|
|
os.Stderr.WriteString("write " + outputPath + "\n")
|
2023-12-12 18:34:34 +08:00
|
|
|
outputFile, err := os.Create(output)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer outputFile.Close()
|
2024-07-18 13:36:35 +08:00
|
|
|
writer := bufio.NewWriter(outputFile)
|
|
|
|
err = geosite.Write(writer, domainMap)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = writer.Flush()
|
2023-11-29 13:12:38 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-12-12 18:34:34 +08:00
|
|
|
cnCodes := []string{
|
2024-01-23 10:59:48 +08:00
|
|
|
"geolocation-cn",
|
2023-12-12 18:34:34 +08:00
|
|
|
}
|
|
|
|
cnDomainMap := make(map[string][]geosite.Item)
|
|
|
|
for _, cnCode := range cnCodes {
|
|
|
|
cnDomainMap[cnCode] = domainMap[cnCode]
|
|
|
|
}
|
|
|
|
cnOutputFile, err := os.Create(cnOutput)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer cnOutputFile.Close()
|
2024-07-18 13:36:35 +08:00
|
|
|
writer.Reset(cnOutputFile)
|
|
|
|
err = geosite.Write(writer, cnDomainMap)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = writer.Flush()
|
2023-12-12 18:34:34 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-11-29 13:12:38 +08:00
|
|
|
os.RemoveAll(ruleSetOutput)
|
2024-07-18 13:36:35 +08:00
|
|
|
os.RemoveAll(ruleSetUnstableOutput)
|
2023-11-29 13:12:38 +08:00
|
|
|
err = os.MkdirAll(ruleSetOutput, 0o755)
|
2024-07-18 13:36:35 +08:00
|
|
|
err = os.MkdirAll(ruleSetUnstableOutput, 0o755)
|
2023-11-29 13:12:38 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
for code, domains := range domainMap {
|
|
|
|
var headlessRule option.DefaultHeadlessRule
|
|
|
|
defaultRule := geosite.Compile(domains)
|
|
|
|
headlessRule.Domain = defaultRule.Domain
|
|
|
|
headlessRule.DomainSuffix = defaultRule.DomainSuffix
|
|
|
|
headlessRule.DomainKeyword = defaultRule.DomainKeyword
|
|
|
|
headlessRule.DomainRegex = defaultRule.DomainRegex
|
|
|
|
var plainRuleSet option.PlainRuleSet
|
|
|
|
plainRuleSet.Rules = []option.HeadlessRule{
|
|
|
|
{
|
|
|
|
Type: C.RuleTypeDefault,
|
|
|
|
DefaultOptions: headlessRule,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
srsPath, _ := filepath.Abs(filepath.Join(ruleSetOutput, "geosite-"+code+".srs"))
|
2024-07-18 13:36:35 +08:00
|
|
|
unstableSRSPath, _ := filepath.Abs(filepath.Join(ruleSetUnstableOutput, "geosite-"+code+".srs"))
|
|
|
|
// os.Stderr.WriteString("write " + srsPath + "\n")
|
|
|
|
var (
|
|
|
|
outputRuleSet *os.File
|
|
|
|
outputRuleSetUnstable *os.File
|
|
|
|
)
|
|
|
|
outputRuleSet, err = os.Create(srsPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = srs.Write(outputRuleSet, plainRuleSet, false)
|
|
|
|
outputRuleSet.Close()
|
2023-11-29 13:12:38 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-07-18 13:36:35 +08:00
|
|
|
outputRuleSetUnstable, err = os.Create(unstableSRSPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = srs.Write(outputRuleSetUnstable, plainRuleSet, true)
|
|
|
|
outputRuleSetUnstable.Close()
|
2023-11-29 13:12:38 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2022-07-04 22:45:11 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func setActionOutput(name string, content string) {
|
|
|
|
os.Stdout.WriteString("::set-output name=" + name + "::" + content + "\n")
|
|
|
|
}
|
|
|
|
|
2024-07-18 13:36:35 +08:00
|
|
|
func release(source string, destination string, output string, cnOutput string, ruleSetOutput string, ruleSetOutputUnstable string) error {
|
2022-07-04 22:45:11 +08:00
|
|
|
sourceRelease, err := fetch(source)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
destinationRelease, err := fetch(destination)
|
|
|
|
if err != nil {
|
2023-11-29 13:12:38 +08:00
|
|
|
log.Warn("missing destination latest release")
|
2022-07-04 22:45:11 +08:00
|
|
|
} else {
|
2022-07-05 09:12:35 +08:00
|
|
|
if os.Getenv("NO_SKIP") != "true" && strings.Contains(*destinationRelease.Name, *sourceRelease.Name) {
|
2023-11-29 13:12:38 +08:00
|
|
|
log.Info("already latest")
|
2022-07-04 22:45:11 +08:00
|
|
|
setActionOutput("skip", "true")
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
2024-07-18 13:36:35 +08:00
|
|
|
err = generate(sourceRelease, output, cnOutput, ruleSetOutput, ruleSetOutputUnstable)
|
2022-07-04 22:45:11 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
setActionOutput("tag", *sourceRelease.Name)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
2023-12-12 18:34:34 +08:00
|
|
|
err := release(
|
|
|
|
"v2fly/domain-list-community",
|
|
|
|
"sagernet/sing-geosite",
|
|
|
|
"geosite.db",
|
|
|
|
"geosite-cn.db",
|
|
|
|
"rule-set",
|
2024-07-18 13:36:35 +08:00
|
|
|
"rule-set-unstable",
|
2023-12-12 18:34:34 +08:00
|
|
|
)
|
2022-07-04 22:45:11 +08:00
|
|
|
if err != nil {
|
2023-11-29 13:12:38 +08:00
|
|
|
log.Fatal(err)
|
2022-07-04 22:45:11 +08:00
|
|
|
}
|
|
|
|
}
|