mirror of
https://github.com/KaringX/karing-ruleset.git
synced 2025-06-12 04:24:13 +08:00
AdGuardSDNSFilter
This commit is contained in:
parent
8803fdaeed
commit
509fdfa4d9
10
.github/workflows/run.yml
vendored
10
.github/workflows/run.yml
vendored
@ -40,9 +40,9 @@ jobs:
|
||||
NO_SKIP: true
|
||||
run: |
|
||||
cd sing-rule || exit 1
|
||||
wget -q https://github.com/SagerNet/sing-box/releases/download/v1.9.3/sing-box-1.9.3-linux-amd64.tar.gz -O sing-box.tar.gz && tar zxvf sing-box.tar.gz && mv sing-box-1.9.3-linux-amd64/sing-box ./
|
||||
wget -q https://github.com/SagerNet/sing-box/releases/download/v1.10.0-beta.4/sing-box-1.10.0-beta.4-linux-amd64.tar.gz -O sing-box.tar.gz && tar zxvf sing-box.tar.gz && mv sing-box-1.10.0-beta.4-linux-amd64/sing-box ./
|
||||
chmod 755 sing-box ACL4SSR/convert_srs.sh && ./ACL4SSR/convert_srs.sh ./sing-box
|
||||
rm -rf sing-box* LICENSE README.md ACL4SSR/convert_*
|
||||
rm -rf LICENSE README.md ACL4SSR/convert_*
|
||||
cp ./ACL4SSR/Ruleset/*.srs ./ACL4SSR
|
||||
|
||||
- name: Checkout meta-rules-dat
|
||||
@ -74,6 +74,12 @@ jobs:
|
||||
mkdir -p ./sing-rule/recommend
|
||||
python workflow/resouces/kr_builtin_ruleset.py ./sing-rule/
|
||||
|
||||
- name: convert AdGuardSDNSFilter to srs
|
||||
env:
|
||||
NO_SKIP: true
|
||||
run: |
|
||||
python workflow/resouces/convert_adguard.py ./sing-rule/
|
||||
|
||||
- name: Git push assets to "sing-rule" branch
|
||||
run: |
|
||||
cd sing-rule || exit 1
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,4 +1,5 @@
|
||||
*.log
|
||||
*.srs
|
||||
*.json
|
||||
*.pyc
|
||||
tmp/
|
||||
|
11
README.md
11
README.md
@ -126,5 +126,14 @@ https://fastly.jsdelivr.net/gh/karingX/karing-ruleset@sing/ACL4SSR/BanAD.srs
|
||||
| ProxyLite.srs | 规则碎片-代理 | 比较精简的代理列表,包含常用的,以及被污染的域名 |
|
||||
|
||||
|
||||
|
||||
# AdGuardSDNSFilter
|
||||
- AdGuardFilter
|
||||
- source: https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/configuration.json
|
||||
- srs list: [AdGuardFilter.list](https://github.com/KaringX/karing-ruleset/tree/sing/AdGuard/AdGuardFilter.list)
|
||||
- PopupFilter
|
||||
- source: https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/configuration_popup_filter.json
|
||||
- srs list: [PopupFilter.list](https://github.com/KaringX/karing-ruleset/tree/sing/AdGuard/PopupFilter.list)
|
||||
- ppfeuferFilter
|
||||
- source: https://raw.githubusercontent.com/ppfeufer/adguard-filter-list/master/hostlist-compiler-config.json
|
||||
- srs list: [ppfeuferFilter.list](https://github.com/KaringX/karing-ruleset/tree/sing/AdGuard/ppfeuferFilter.list)
|
||||
|
||||
|
161
resouces/convert_adguard.py
Executable file
161
resouces/convert_adguard.py
Executable file
@ -0,0 +1,161 @@
|
||||
# AdGuardSDNSFilter list convert to srs
|
||||
# from
|
||||
# https://github.com/AdguardTeam/AdGuardSDNSFilter
|
||||
# https://github.com/ppfeufer/adguard-filter-list
|
||||
# Use
|
||||
# sing-box rule-set convert --type adguard [--output <file-name>.srs] <file-name>.txt to convert to binary rule-set.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
from helper.helper import (
|
||||
debug_log,
|
||||
get_out_path,
|
||||
get_url_content,
|
||||
writeto_rulefile,
|
||||
correct_name,
|
||||
remove_ansi_escape_codes,
|
||||
)
|
||||
|
||||
# TODO exclusions.txt
|
||||
# https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt
|
||||
|
||||
|
||||
SING_BOX_EXEC_PATH = None
|
||||
|
||||
ADGUARD_CONFIG_FILTER_LIST = [
|
||||
{
|
||||
'name': 'AdGuardFilter',
|
||||
'source': 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/configuration.json',
|
||||
},
|
||||
{
|
||||
'name': 'PopupFilter',
|
||||
'source': 'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/configuration_popup_filter.json',
|
||||
},
|
||||
{
|
||||
'name': 'ppfeuferFilter',
|
||||
'source': 'https://raw.githubusercontent.com/ppfeufer/adguard-filter-list/master/hostlist-compiler-config.json',
|
||||
},
|
||||
]
|
||||
|
||||
SOURCE_REPLACE_DICT = {
|
||||
# # Get OISD list, since the HostlistCompiler can't fetch it for whatever reason » https://github.com/AdguardTeam/HostlistCompiler/issues/58
|
||||
'../oisd.txt': 'https://big.oisd.nl/',
|
||||
}
|
||||
|
||||
|
||||
def converto_srs(out_path: str, file_name: str) -> bool:
|
||||
sb_exe_path = SING_BOX_EXEC_PATH
|
||||
out_file = os.path.join(out_path, file_name + '.srs')
|
||||
src_file = os.path.join(out_path, file_name + '.txt')
|
||||
|
||||
# debug_log(f"convering ... {out_file}")
|
||||
command = (
|
||||
f"{sb_exe_path} rule-set convert --type adguard --output {out_file} {src_file}"
|
||||
)
|
||||
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
||||
|
||||
# if result.stderr:
|
||||
# Filter out lines that start with "DEBUG"
|
||||
output_lines = [
|
||||
line for line in result.stderr.splitlines() if not line.startswith("DEBUG")
|
||||
]
|
||||
|
||||
debug_log(
|
||||
f"Convert code:{result.returncode} error:{output_lines[-1]} output:{result.stdout}"
|
||||
)
|
||||
return (
|
||||
True if remove_ansi_escape_codes(output_lines[-1]).startswith('INFO') else False
|
||||
)
|
||||
# END converto_srs
|
||||
|
||||
|
||||
def main(out_path: str = None):
|
||||
# check out dir
|
||||
if not os.path.isdir(out_path):
|
||||
debug_log(f"ERR: {out_path} not exist")
|
||||
exit(1)
|
||||
|
||||
# mkdir
|
||||
out_path = os.path.abspath(out_path)
|
||||
out_path2 = os.path.join(out_path, 'AdGuard')
|
||||
if not os.path.exists(out_path2):
|
||||
os.mkdir(out_path2)
|
||||
debug_log(f"mkdir {out_path2}")
|
||||
|
||||
# check sing-box
|
||||
sb_exe_path = os.path.join(out_path, 'sing-box')
|
||||
if not os.path.isfile(sb_exe_path):
|
||||
debug_log(f"ERR: {sb_exe_path} not exist")
|
||||
exit(1)
|
||||
global SING_BOX_EXEC_PATH
|
||||
SING_BOX_EXEC_PATH = sb_exe_path
|
||||
|
||||
for item in ADGUARD_CONFIG_FILTER_LIST:
|
||||
compile_filterlist(out_path=out_path2, item=item)
|
||||
|
||||
# END main
|
||||
|
||||
|
||||
def compile_filterlist(out_path: str, item: dict):
|
||||
source = item['source']
|
||||
if source in SOURCE_REPLACE_DICT:
|
||||
source = SOURCE_REPLACE_DICT[source]
|
||||
|
||||
name = correct_name(item['name'])
|
||||
out_path2 = os.path.join(out_path, name)
|
||||
if not os.path.exists(out_path2):
|
||||
os.mkdir(out_path2)
|
||||
if not os.path.isdir(out_path2):
|
||||
debug_log(f"ERR: {out_path2} mkdir fail!")
|
||||
exit(1)
|
||||
|
||||
debug_log(f"\n\n\t\x1b[36mCompiling\x1b[0m [[{name}]] => {source}")
|
||||
content = get_url_content(source)
|
||||
if content is None:
|
||||
return False
|
||||
|
||||
# sucess list
|
||||
succ_path = os.path.join(out_path, f"{name}.list")
|
||||
succ_list = []
|
||||
|
||||
json_data = json.loads(content)
|
||||
for filter_item in json_data['sources']:
|
||||
ret = compile_filterone(out_path2, filter_item)
|
||||
if ret is True:
|
||||
succ_list.append(correct_name(filter_item['name']) + '.srs')
|
||||
|
||||
# wirte sucess list
|
||||
writeto_rulefile(succ_path, "\n".join(succ_list))
|
||||
|
||||
# END compile
|
||||
|
||||
|
||||
def compile_filterone(out_path: str, item: dict) -> bool:
|
||||
name = correct_name(item['name'])
|
||||
source = item['source']
|
||||
|
||||
debug_log(f"\n\tdownloading [ {name} ] => {source}")
|
||||
content = get_url_content(source)
|
||||
if content is None:
|
||||
return False
|
||||
|
||||
# write file
|
||||
out_file = os.path.join(out_path, name + '.txt')
|
||||
writeto_rulefile(out_file, content)
|
||||
|
||||
# convert srs
|
||||
return converto_srs(out_path, name)
|
||||
# END compile one
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("\x1b[31mUsage\x1b[0m: python script.py <output path>")
|
||||
else:
|
||||
out_path = get_out_path(sys.argv[1])
|
||||
main(out_path)
|
||||
|
||||
# END FILE
|
0
resouces/helper/__init__.py
Normal file
0
resouces/helper/__init__.py
Normal file
84
resouces/helper/helper.py
Normal file
84
resouces/helper/helper.py
Normal file
@ -0,0 +1,84 @@
|
||||
import os, sys
|
||||
import requests
|
||||
import re
|
||||
|
||||
|
||||
def debug_log(msg: str):
|
||||
if msg.startswith('ERR'):
|
||||
msg = msg.replace('ERR', '\x1b[31mERR\x1b[0m')
|
||||
print(msg)
|
||||
# pass
|
||||
|
||||
|
||||
def get_out_path(out_path: str = None):
|
||||
return (
|
||||
os.getcwd()
|
||||
if out_path is None or len(out_path) == 0
|
||||
else os.path.abspath(out_path)
|
||||
)
|
||||
|
||||
|
||||
def writeto_rulefile(out_file: str, content: dict | str):
|
||||
file_string = json.dumps(content) if isinstance(content, dict) else content
|
||||
|
||||
with open(out_file, "w") as json_file:
|
||||
json_file.write(file_string)
|
||||
debug_log(f"wirte to {out_file}")
|
||||
|
||||
|
||||
def get_url_content(url: str):
|
||||
try:
|
||||
response = requests.get(url)
|
||||
response.raise_for_status() # Raises an HTTPError for bad responses (4xx and 5xx)
|
||||
return response.text if response.status_code == 200 else None
|
||||
|
||||
except requests.exceptions.HTTPError as http_err:
|
||||
debug_log(f"HTTP error occurred: {http_err} \tURL: {url}")
|
||||
except requests.exceptions.ConnectionError as conn_err:
|
||||
debug_log(f"Connection error occurred: {conn_err} \tURL: {url}")
|
||||
except requests.exceptions.Timeout as timeout_err:
|
||||
debug_log(f"Timeout error occurred: {timeout_err} \tURL: {url}")
|
||||
except requests.exceptions.RequestException as req_err:
|
||||
debug_log(f"An error occurred: {req_err} \tURL: {url}")
|
||||
return None
|
||||
|
||||
|
||||
def correct_name(text: str) -> str:
|
||||
# Define the replacements as a dictionary
|
||||
replacements = {
|
||||
" ": "-",
|
||||
"–": "-",
|
||||
",": "-",
|
||||
"/": "_",
|
||||
":": "",
|
||||
"'": "",
|
||||
"(": "_",
|
||||
")": "",
|
||||
}
|
||||
|
||||
# Apply the replacements
|
||||
replaced_text = ''.join(replacements.get(c, c) for c in text)
|
||||
|
||||
# Collapse consecutive "-" into a single "-"
|
||||
collapsed_text = re.sub(r'-+', '-', replaced_text)
|
||||
collapsed_text = re.sub(r'_+', '_', collapsed_text)
|
||||
|
||||
# Remove trailing "-" or "_"
|
||||
final_text = re.sub(r'[-_]+$', '', collapsed_text)
|
||||
|
||||
return final_text
|
||||
|
||||
|
||||
def remove_ansi_escape_codes(text: str) -> str:
|
||||
# Regular expression pattern for ANSI escape codes
|
||||
ansi_escape = re.compile(r'\x1b\[[0-9;]*m')
|
||||
return ansi_escape.sub('', text)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# text = "example string//with spaces---and////slashes"
|
||||
text = 'uBlock-filters-–-Resource-abuse-'
|
||||
print(correct_name(text))
|
||||
|
||||
# text = '\x1b[36mINFO\x1b[0m[0000] parsed rules: 778/1450'
|
||||
# print(remove_ansi_escape_codes(text).startswith('INFO'))
|
Loading…
x
Reference in New Issue
Block a user