2022-07-04 22:45:11 +08:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/hex"
|
|
|
|
"io"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/google/go-github/v45/github"
|
|
|
|
"github.com/sagernet/sing-box/common/geosite"
|
|
|
|
"github.com/sagernet/sing/common"
|
|
|
|
E "github.com/sagernet/sing/common/exceptions"
|
|
|
|
"github.com/sirupsen/logrus"
|
|
|
|
"github.com/v2fly/v2ray-core/v5/app/router/routercommon"
|
|
|
|
"google.golang.org/protobuf/proto"
|
|
|
|
)
|
|
|
|
|
|
|
|
var githubClient *github.Client
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
accessToken, loaded := os.LookupEnv("ACCESS_TOKEN")
|
|
|
|
if !loaded {
|
|
|
|
githubClient = github.NewClient(nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
transport := &github.BasicAuthTransport{
|
|
|
|
Username: accessToken,
|
|
|
|
}
|
|
|
|
githubClient = github.NewClient(transport.Client())
|
|
|
|
}
|
|
|
|
|
|
|
|
func fetch(from string) (*github.RepositoryRelease, error) {
|
|
|
|
names := strings.SplitN(from, "/", 2)
|
|
|
|
latestRelease, _, err := githubClient.Repositories.GetLatestRelease(context.Background(), names[0], names[1])
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return latestRelease, err
|
|
|
|
}
|
|
|
|
|
|
|
|
func get(downloadURL *string) ([]byte, error) {
|
|
|
|
logrus.Info("download ", *downloadURL)
|
|
|
|
response, err := http.Get(*downloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer response.Body.Close()
|
|
|
|
return io.ReadAll(response.Body)
|
|
|
|
}
|
|
|
|
|
|
|
|
func download(release *github.RepositoryRelease) ([]byte, error) {
|
|
|
|
geositeAsset := common.Find(release.Assets, func(it *github.ReleaseAsset) bool {
|
|
|
|
return *it.Name == "dlc.dat"
|
|
|
|
})
|
|
|
|
geositeChecksumAsset := common.Find(release.Assets, func(it *github.ReleaseAsset) bool {
|
|
|
|
return *it.Name == "dlc.dat.sha256sum"
|
|
|
|
})
|
|
|
|
if geositeAsset == nil {
|
|
|
|
return nil, E.New("geosite asset not found in upstream release ", release.Name)
|
|
|
|
}
|
|
|
|
if geositeChecksumAsset == nil {
|
|
|
|
return nil, E.New("geosite asset not found in upstream release ", release.Name)
|
|
|
|
}
|
|
|
|
data, err := get(geositeAsset.BrowserDownloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
remoteChecksum, err := get(geositeChecksumAsset.BrowserDownloadURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
checksum := sha256.Sum256(data)
|
|
|
|
if hex.EncodeToString(checksum[:]) != string(remoteChecksum[:64]) {
|
|
|
|
return nil, E.New("checksum mismatch")
|
|
|
|
}
|
|
|
|
return data, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parse(vGeositeData []byte) (map[string][]geosite.Item, error) {
|
|
|
|
vGeositeList := routercommon.GeoSiteList{}
|
|
|
|
err := proto.Unmarshal(vGeositeData, &vGeositeList)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
domainMap := make(map[string][]geosite.Item)
|
|
|
|
for _, vGeositeEntry := range vGeositeList.Entry {
|
2022-09-04 12:40:46 +08:00
|
|
|
code := strings.ToLower(vGeositeEntry.CountryCode)
|
2022-07-04 22:45:11 +08:00
|
|
|
domains := make([]geosite.Item, 0, len(vGeositeEntry.Domain)*2)
|
2022-09-04 12:40:46 +08:00
|
|
|
attributes := make(map[string][]*routercommon.Domain)
|
2022-07-04 22:45:11 +08:00
|
|
|
for _, domain := range vGeositeEntry.Domain {
|
2022-09-04 11:33:10 +08:00
|
|
|
if len(domain.Attribute) > 0 {
|
2022-09-04 12:40:46 +08:00
|
|
|
for _, attribute := range domain.Attribute {
|
|
|
|
attributes[attribute.Key] = append(attributes[attribute.Key], domain)
|
|
|
|
}
|
2022-09-04 11:33:10 +08:00
|
|
|
continue
|
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
switch domain.Type {
|
|
|
|
case routercommon.Domain_Plain:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainKeyword,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Regex:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainRegex,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_RootDomain:
|
2022-07-07 23:38:22 +08:00
|
|
|
if strings.Contains(domain.Value, ".") {
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainSuffix,
|
|
|
|
Value: "." + domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Full:
|
|
|
|
domains = append(domains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2022-09-04 12:40:46 +08:00
|
|
|
domainMap[code] = common.Uniq(domains)
|
|
|
|
for attribute, attributeEntries := range attributes {
|
|
|
|
attributeDomains := make([]geosite.Item, 0, len(attributeEntries)*2)
|
|
|
|
for _, domain := range attributeEntries {
|
|
|
|
switch domain.Type {
|
|
|
|
case routercommon.Domain_Plain:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainKeyword,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Regex:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainRegex,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_RootDomain:
|
|
|
|
if strings.Contains(domain.Value, ".") {
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomainSuffix,
|
|
|
|
Value: "." + domain.Value,
|
|
|
|
})
|
|
|
|
case routercommon.Domain_Full:
|
|
|
|
attributeDomains = append(attributeDomains, geosite.Item{
|
|
|
|
Type: geosite.RuleTypeDomain,
|
|
|
|
Value: domain.Value,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
domainMap[code+"@"+attribute] = common.Uniq(attributeDomains)
|
|
|
|
}
|
2022-07-04 22:45:11 +08:00
|
|
|
}
|
|
|
|
return domainMap, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func generate(release *github.RepositoryRelease, output string) error {
|
|
|
|
outputFile, err := os.Create(output)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer outputFile.Close()
|
|
|
|
vData, err := download(release)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
domainMap, err := parse(vData)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
outputPath, _ := filepath.Abs(output)
|
|
|
|
os.Stderr.WriteString("write " + outputPath + "\n")
|
|
|
|
return geosite.Write(outputFile, domainMap)
|
|
|
|
}
|
|
|
|
|
|
|
|
func setActionOutput(name string, content string) {
|
|
|
|
os.Stdout.WriteString("::set-output name=" + name + "::" + content + "\n")
|
|
|
|
}
|
|
|
|
|
|
|
|
func release(source string, destination string, output string) error {
|
|
|
|
sourceRelease, err := fetch(source)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
destinationRelease, err := fetch(destination)
|
|
|
|
if err != nil {
|
|
|
|
logrus.Warn("missing destination latest release")
|
|
|
|
} else {
|
2022-07-05 09:12:35 +08:00
|
|
|
if os.Getenv("NO_SKIP") != "true" && strings.Contains(*destinationRelease.Name, *sourceRelease.Name) {
|
2022-07-04 22:45:11 +08:00
|
|
|
logrus.Info("already latest")
|
|
|
|
setActionOutput("skip", "true")
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err = generate(sourceRelease, output)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
setActionOutput("tag", *sourceRelease.Name)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
err := release("v2fly/domain-list-community", "sagernet/sing-geosite", "geosite.db")
|
|
|
|
if err != nil {
|
|
|
|
logrus.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|