2023-12-04 18:45:22 +08:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2023-12-09 22:23:30 +08:00
|
|
|
"maps"
|
2023-12-04 18:45:22 +08:00
|
|
|
"net/http"
|
2023-12-05 09:59:27 +08:00
|
|
|
"slices"
|
2023-12-04 18:45:22 +08:00
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/AdguardTeam/urlfilter/filterlist"
|
|
|
|
"github.com/AdguardTeam/urlfilter/rules"
|
|
|
|
"github.com/samber/lo"
|
|
|
|
)
|
|
|
|
|
|
|
|
const AdGuardSDNSFilter = "https://adguardteam.github.io/AdGuardSDNSFilter/Filters/filter.txt"
|
|
|
|
|
2023-12-09 22:23:30 +08:00
|
|
|
func adguard(ctx context.Context, c *http.Client) (hasReg *Ruleset, noReg *Ruleset, err error) {
|
2023-12-04 18:45:22 +08:00
|
|
|
b, err := getFilter(ctx, c)
|
|
|
|
if err != nil {
|
2023-12-09 22:23:30 +08:00
|
|
|
return nil, nil, fmt.Errorf("adguard: %w", err)
|
2023-12-04 18:45:22 +08:00
|
|
|
}
|
|
|
|
domain := map[string]struct{}{}
|
|
|
|
domainRegex := map[string]struct{}{}
|
|
|
|
domainSuffix := map[string]struct{}{}
|
|
|
|
|
|
|
|
s := filterlist.NewRuleScanner(bytes.NewReader(b), 1, true)
|
|
|
|
|
|
|
|
for s.Scan() {
|
|
|
|
r, _ := s.Rule()
|
|
|
|
hr, ok := r.(*rules.NetworkRule)
|
|
|
|
if !ok || !hr.IsHostLevelNetworkRule() || hr.Whitelist {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if hr.IsRegexRule() {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
rule := strings.TrimSuffix(strings.TrimLeft(hr.RuleText, "|"), "^")
|
|
|
|
|
|
|
|
if rule == hr.Shortcut {
|
|
|
|
rule = strings.TrimPrefix(rule, "://")
|
|
|
|
if strings.HasPrefix(rule, ".") {
|
|
|
|
domainSuffix[rule] = struct{}{}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if strings.HasSuffix(rule, ".") {
|
2023-12-04 20:47:26 +08:00
|
|
|
domainRegex[`^(.*\.)?`+rule] = struct{}{}
|
2023-12-04 18:45:22 +08:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
domain[rule] = struct{}{}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
ruleR := strings.TrimPrefix(rule, "://")
|
|
|
|
ruleR = strings.ReplaceAll(ruleR, ".", `\.`)
|
|
|
|
reg := strings.ReplaceAll(ruleR, "*", ".*")
|
2023-12-05 09:59:27 +08:00
|
|
|
if !strings.HasPrefix(hr.RuleText, "*") {
|
|
|
|
reg = `^(.*\.)?` + reg
|
|
|
|
}
|
2023-12-04 18:45:22 +08:00
|
|
|
if strings.HasSuffix(hr.RuleText, "^") {
|
|
|
|
reg = reg + "$"
|
|
|
|
}
|
|
|
|
domainRegex[reg] = struct{}{}
|
|
|
|
}
|
|
|
|
for k := range domain {
|
|
|
|
domainSuffix["."+k] = struct{}{}
|
|
|
|
}
|
|
|
|
|
2023-12-09 22:23:30 +08:00
|
|
|
rules := []map[string][]any{
|
2023-12-04 19:21:55 +08:00
|
|
|
{
|
2023-12-05 09:59:27 +08:00
|
|
|
"domain": toAny(domain),
|
|
|
|
"domain_suffix": toAny(domainSuffix),
|
|
|
|
"domain_regex": toAny(domainRegex),
|
2023-12-04 19:21:55 +08:00
|
|
|
},
|
2023-12-04 18:45:22 +08:00
|
|
|
}
|
2023-12-09 22:23:30 +08:00
|
|
|
noRegRules := maps.Clone(rules[0])
|
|
|
|
delete(noRegRules, "domain_regex")
|
|
|
|
|
|
|
|
return NewRuleSet(rules), NewRuleSet([]map[string][]any{noRegRules}), nil
|
2023-12-04 18:45:22 +08:00
|
|
|
}
|
|
|
|
|
2023-12-05 09:59:27 +08:00
|
|
|
func toAny(m map[string]struct{}) []any {
|
|
|
|
sl := lo.Keys(m)
|
|
|
|
slices.Sort(sl)
|
|
|
|
return lo.Map[string, any](sl, func(item string, index int) any { return item })
|
|
|
|
}
|
|
|
|
|
2023-12-04 18:45:22 +08:00
|
|
|
func getFilter(ctx context.Context, c *http.Client) ([]byte, error) {
|
|
|
|
reps, err := http.NewRequestWithContext(ctx, "GET", AdGuardSDNSFilter, nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("getFilter: %w", err)
|
|
|
|
}
|
|
|
|
rep, err := c.Do(reps)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("getFilter: %w", err)
|
|
|
|
}
|
|
|
|
defer rep.Body.Close()
|
|
|
|
|
|
|
|
b, err := io.ReadAll(rep.Body)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("getFilter: %w", err)
|
|
|
|
}
|
|
|
|
return b, nil
|
|
|
|
}
|