mirror of
https://github.com/itdoginfo/podkop.git
synced 2025-12-06 03:26:51 +03:00
Compare commits
6 Commits
56829c74c8
...
031c419ffb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
031c419ffb | ||
|
|
c13fdf5785 | ||
|
|
1b7ab606ba | ||
|
|
2bf208ecac | ||
|
|
e256e4bee5 | ||
|
|
32c385b309 |
@@ -81,4 +81,46 @@ trojan://ou8pLSyx9N@127.0.0.1:17737?type=httpupgrade&path=%2Fhttpupgradepath&hos
|
|||||||
|
|
||||||
# XHTTP
|
# XHTTP
|
||||||
trojan://VEetltxLtw@127.0.0.1:59072?type=xhttp&path=%2Fxhttppath&host=google.com&mode=auto&security=none#trojan-xhttp
|
trojan://VEetltxLtw@127.0.0.1:59072?type=xhttp&path=%2Fxhttppath&host=google.com&mode=auto&security=none#trojan-xhttp
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hysteria2
|
||||||
|
|
||||||
|
hysteria2://
|
||||||
|
```
|
||||||
|
# Basic (no authentication)
|
||||||
|
hysteria2://127.0.0.1:443/#hysteria2-basic
|
||||||
|
hysteria2://127.0.0.1:443/?insecure=1#hysteria2-basic-insecure
|
||||||
|
|
||||||
|
# With password
|
||||||
|
hysteria2://password@example.com:443/#hysteria2-password
|
||||||
|
hysteria2://password@example.com:443/?insecure=0#hysteria2-password-insecure
|
||||||
|
|
||||||
|
# With SNI
|
||||||
|
hysteria2://password@example.com:443/?sni=example.com#hysteria2-password-sni
|
||||||
|
|
||||||
|
# With obfuscation
|
||||||
|
hysteria2://password@example.com:443/?obfs=salamander&obfs-password=obfspassword#hysteria2-obfs
|
||||||
|
|
||||||
|
# All parameters combined
|
||||||
|
hysteria2://mypassword@example.com:8443/?sni=example.com&obfs=salamander&obfs-password=obfspass&insecure=0#hysteria2-all-params
|
||||||
|
```
|
||||||
|
|
||||||
|
hy2://
|
||||||
|
```
|
||||||
|
# Basic (no authentication)
|
||||||
|
hy2://127.0.0.1:443/#hysteria2-basic
|
||||||
|
hy2://127.0.0.1:443/?insecure=1#hysteria2-basic-insecure
|
||||||
|
|
||||||
|
# With password
|
||||||
|
hy2://password@example.com:443/#hysteria2-password
|
||||||
|
hy2://password@example.com:443/?insecure=0#hysteria2-password-insecure
|
||||||
|
|
||||||
|
# With SNI
|
||||||
|
hy2://password@example.com:443/?sni=example.com#hysteria2-password-sni
|
||||||
|
|
||||||
|
# With obfuscation
|
||||||
|
hy2://password@example.com:443/?obfs=salamander&obfs-password=obfspassword#hysteria2-obfs
|
||||||
|
|
||||||
|
# All parameters combined
|
||||||
|
hy2://mypassword@example.com:8443/?sni=example.com&obfs=salamander&obfs-password=obfspass&insecure=0#hysteria2-all-params
|
||||||
```
|
```
|
||||||
@@ -445,7 +445,7 @@ function createSectionContent(section) {
|
|||||||
);
|
);
|
||||||
o.value("disabled", _("Disabled"));
|
o.value("disabled", _("Disabled"));
|
||||||
o.value("dynamic", _("Dynamic List"));
|
o.value("dynamic", _("Dynamic List"));
|
||||||
o.value("text", _("Text List (comma/space/newline separated)"));
|
o.value("text", _("Text List"));
|
||||||
o.default = "disabled";
|
o.default = "disabled";
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ check_required_file "$PODKOP_LIB/helpers.sh"
|
|||||||
check_required_file "$PODKOP_LIB/sing_box_config_manager.sh"
|
check_required_file "$PODKOP_LIB/sing_box_config_manager.sh"
|
||||||
check_required_file "$PODKOP_LIB/sing_box_config_facade.sh"
|
check_required_file "$PODKOP_LIB/sing_box_config_facade.sh"
|
||||||
check_required_file "$PODKOP_LIB/logging.sh"
|
check_required_file "$PODKOP_LIB/logging.sh"
|
||||||
|
check_required_file "$PODKOP_LIB/rulesets.sh"
|
||||||
. /lib/config/uci.sh
|
. /lib/config/uci.sh
|
||||||
. /lib/functions.sh
|
. /lib/functions.sh
|
||||||
. "$PODKOP_LIB/constants.sh"
|
. "$PODKOP_LIB/constants.sh"
|
||||||
@@ -26,6 +27,7 @@ check_required_file "$PODKOP_LIB/logging.sh"
|
|||||||
. "$PODKOP_LIB/sing_box_config_manager.sh"
|
. "$PODKOP_LIB/sing_box_config_manager.sh"
|
||||||
. "$PODKOP_LIB/sing_box_config_facade.sh"
|
. "$PODKOP_LIB/sing_box_config_facade.sh"
|
||||||
. "$PODKOP_LIB/logging.sh"
|
. "$PODKOP_LIB/logging.sh"
|
||||||
|
. "$PODKOP_LIB/rulesets.sh"
|
||||||
|
|
||||||
config_load "$PODKOP_CONFIG"
|
config_load "$PODKOP_CONFIG"
|
||||||
|
|
||||||
@@ -865,66 +867,37 @@ configure_routing_for_section_lists() {
|
|||||||
|
|
||||||
if [ "$user_domain_list_type" != "disabled" ]; then
|
if [ "$user_domain_list_type" != "disabled" ]; then
|
||||||
log "Processing user domains routing rules for '$section' section"
|
log "Processing user domains routing rules for '$section' section"
|
||||||
prepare_common_ruleset "$section" "domains" "$route_rule_tag"
|
configure_user_domain_list "$section" "$route_rule_tag"
|
||||||
configure_user_domain_or_subnets_list "$section" "domains" "$route_rule_tag"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$user_subnet_list_type" != "disabled" ]; then
|
if [ "$user_subnet_list_type" != "disabled" ]; then
|
||||||
log "Processing user subnets routing rules for '$section' section"
|
log "Processing user subnets routing rules for '$section' section"
|
||||||
prepare_common_ruleset "$section" "subnets" "$route_rule_tag"
|
configure_user_subnet_list "$section" "$route_rule_tag"
|
||||||
configure_user_domain_or_subnets_list "$section" "subnets" "$route_rule_tag"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$local_domain_lists" ]; then
|
if [ -n "$local_domain_lists" ]; then
|
||||||
log "Processing local domains routing rules for '$section' section"
|
log "Processing local domains routing rules for '$section' section"
|
||||||
configure_local_domain_or_subnet_lists "$section" "domains" "$route_rule_tag"
|
configure_local_domain_lists "$section" "$route_rule_tag"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$local_subnet_lists" ]; then
|
if [ -n "$local_subnet_lists" ]; then
|
||||||
log "Processing local subnets routing rules for '$section' section"
|
log "Processing local subnets routing rules for '$section' section"
|
||||||
configure_local_domain_or_subnet_lists "$section" "subnets" "$route_rule_tag"
|
configure_local_subnet_lists "$section" "$route_rule_tag"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$remote_domain_lists" ]; then
|
if [ -n "$remote_domain_lists" ]; then
|
||||||
log "Processing remote domains routing rules for '$section' section"
|
log "Processing remote domains routing rules for '$section' section"
|
||||||
prepare_common_ruleset "$section" "domains" "$route_rule_tag"
|
|
||||||
config_list_foreach "$section" "remote_domain_lists" configure_remote_domain_or_subnet_list_handler \
|
config_list_foreach "$section" "remote_domain_lists" configure_remote_domain_or_subnet_list_handler \
|
||||||
"domains" "$section" "$route_rule_tag"
|
"domains" "$section" "$route_rule_tag"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$remote_subnet_lists" ]; then
|
if [ -n "$remote_subnet_lists" ]; then
|
||||||
log "Processing remote subnets routing rules for '$section' section"
|
log "Processing remote subnets routing rules for '$section' section"
|
||||||
prepare_common_ruleset "$section" "subnets" "$route_rule_tag"
|
|
||||||
config_list_foreach "$section" "remote_subnet_lists" configure_remote_domain_or_subnet_list_handler \
|
config_list_foreach "$section" "remote_subnet_lists" configure_remote_domain_or_subnet_list_handler \
|
||||||
"subnets" "$section" "$route_rule_tag"
|
"subnets" "$section" "$route_rule_tag"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
prepare_common_ruleset() {
|
|
||||||
local section="$1"
|
|
||||||
local type="$2"
|
|
||||||
local route_rule_tag="$3"
|
|
||||||
|
|
||||||
log "Preparing a common $type ruleset for '$section' section" "debug"
|
|
||||||
ruleset_tag=$(get_ruleset_tag "$section" "common" "$type")
|
|
||||||
ruleset_filename="$ruleset_tag.json"
|
|
||||||
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_filename"
|
|
||||||
if file_exists "$ruleset_filepath"; then
|
|
||||||
log "Ruleset $ruleset_filepath already exists. Skipping." "debug"
|
|
||||||
else
|
|
||||||
sing_box_cm_create_local_source_ruleset "$ruleset_filepath"
|
|
||||||
config=$(sing_box_cm_add_local_ruleset "$config" "$ruleset_tag" "source" "$ruleset_filepath")
|
|
||||||
config=$(sing_box_cm_patch_route_rule "$config" "$route_rule_tag" "rule_set" "$ruleset_tag")
|
|
||||||
case "$type" in
|
|
||||||
domains)
|
|
||||||
config=$(sing_box_cm_patch_dns_route_rule "$config" "$SB_FAKEIP_DNS_RULE_TAG" "rule_set" "$ruleset_tag")
|
|
||||||
;;
|
|
||||||
subnets) ;;
|
|
||||||
*) log "Unsupported remote rule set type: $type" "error" ;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
configure_community_list_handler() {
|
configure_community_list_handler() {
|
||||||
local tag="$1"
|
local tag="$1"
|
||||||
local section="$2"
|
local section="$2"
|
||||||
@@ -942,99 +915,113 @@ configure_community_list_handler() {
|
|||||||
config=$(sing_box_cm_patch_dns_route_rule "$config" "$SB_FAKEIP_DNS_RULE_TAG" "rule_set" "$ruleset_tag")
|
config=$(sing_box_cm_patch_dns_route_rule "$config" "$SB_FAKEIP_DNS_RULE_TAG" "rule_set" "$ruleset_tag")
|
||||||
}
|
}
|
||||||
|
|
||||||
configure_user_domain_or_subnets_list() {
|
prepare_source_ruleset() {
|
||||||
local section="$1"
|
local section="$1"
|
||||||
local type="$2"
|
local name="$2"
|
||||||
|
local type="$3"
|
||||||
|
local route_rule_tag="$4"
|
||||||
|
|
||||||
local items ruleset_tag ruleset_filename ruleset_filepath json_array
|
log "Preparing a $name $type rule set for '$section' section" "debug"
|
||||||
case "$type" in
|
ruleset_tag=$(get_ruleset_tag "$section" "$name" "$type")
|
||||||
domains)
|
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_tag.json"
|
||||||
local user_domain_list_type
|
create_source_rule_set "$ruleset_filepath"
|
||||||
config_get user_domain_list_type "$section" "user_domain_list_type"
|
case $? in
|
||||||
case "$user_domain_list_type" in
|
0)
|
||||||
dynamic) config_get items "$section" "user_domains" ;;
|
config=$(sing_box_cm_add_local_ruleset "$config" "$ruleset_tag" "source" "$ruleset_filepath")
|
||||||
text) config_get items "$section" "user_domains_text" ;;
|
config=$(sing_box_cm_patch_route_rule "$config" "$route_rule_tag" "rule_set" "$ruleset_tag")
|
||||||
esac
|
case "$type" in
|
||||||
;;
|
domains)
|
||||||
subnets)
|
config=$(sing_box_cm_patch_dns_route_rule "$config" "$SB_FAKEIP_DNS_RULE_TAG" "rule_set" "$ruleset_tag")
|
||||||
local user_subnet_list_type
|
;;
|
||||||
config_get user_subnet_list_type "$section" "user_subnet_list_type"
|
subnets) ;;
|
||||||
case "$user_subnet_list_type" in
|
*)
|
||||||
dynamic) config_get items "$section" "user_subnets" ;;
|
log "Unsupported remote rule set type: $type" "error"
|
||||||
text) config_get items "$section" "user_subnets_text" ;;
|
return 1
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
|
3) log "Source rule set $ruleset_filepath already exists, skipping." "debug" ;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
configure_user_domain_list() {
|
||||||
|
local section="$1"
|
||||||
|
local route_rule_tag="$2"
|
||||||
|
|
||||||
|
prepare_source_ruleset "$section" "user" "domains" "$route_rule_tag"
|
||||||
|
|
||||||
|
local user_domain_list_type items json_array
|
||||||
|
config_get user_domain_list_type "$section" "user_domain_list_type"
|
||||||
|
case "$user_domain_list_type" in
|
||||||
|
dynamic) config_get items "$section" "user_domains" ;;
|
||||||
|
text) config_get items "$section" "user_domains_text" ;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
ruleset_tag=$(get_ruleset_tag "$section" "common" "$type")
|
items="$(parse_domain_or_subnet_string_to_commas_string "$items" "domains")"
|
||||||
ruleset_filename="$ruleset_tag.json"
|
|
||||||
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_filename"
|
|
||||||
items="$(parse_domain_or_subnet_string_to_commas_string "$items" "$type")"
|
|
||||||
json_array="$(comma_string_to_json_array "$items")"
|
json_array="$(comma_string_to_json_array "$items")"
|
||||||
case "$type" in
|
patch_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array"
|
||||||
domains) sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array" ;;
|
|
||||||
subnets)
|
|
||||||
sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
|
||||||
nft_add_set_elements "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME" "$items"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
}
|
||||||
|
|
||||||
configure_local_domain_or_subnet_lists() {
|
configure_user_subnet_list() {
|
||||||
local section="$1"
|
local section="$1"
|
||||||
local type="$2"
|
local route_rule_tag="$2"
|
||||||
local route_rule_tag="$3"
|
|
||||||
|
|
||||||
local ruleset_tag ruleset_filename ruleset_filepath
|
prepare_source_ruleset "$section" "user" "subnets" "$route_rule_tag"
|
||||||
ruleset_tag="$(get_ruleset_tag "$section" "local" "$type")"
|
|
||||||
ruleset_filename="$ruleset_tag.json"
|
|
||||||
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_filename"
|
|
||||||
|
|
||||||
sing_box_cm_create_local_source_ruleset "$ruleset_filepath"
|
local user_subnet_list_type items json_array
|
||||||
config=$(sing_box_cm_add_local_ruleset "$config" "$ruleset_tag" "source" "$ruleset_filepath")
|
config_get user_subnet_list_type "$section" "user_subnet_list_type"
|
||||||
config=$(sing_box_cm_patch_route_rule "$config" "$route_rule_tag" "rule_set" "$ruleset_tag")
|
case "$user_subnet_list_type" in
|
||||||
|
dynamic) config_get items "$section" "user_subnets" ;;
|
||||||
case "$type" in
|
text) config_get items "$section" "user_subnets_text" ;;
|
||||||
domains)
|
|
||||||
config_list_foreach "$section" "local_domain_lists" import_local_domain_or_subnet_list "$type" \
|
|
||||||
"$section" "$ruleset_filepath"
|
|
||||||
config=$(sing_box_cm_patch_dns_route_rule "$config" "$SB_FAKEIP_DNS_RULE_TAG" "rule_set" "$ruleset_tag")
|
|
||||||
;;
|
|
||||||
subnets)
|
|
||||||
config_list_foreach "$section" "local_subnet_lists" import_local_domain_or_subnet_list "$type" \
|
|
||||||
"$section" "$ruleset_filepath"
|
|
||||||
;;
|
|
||||||
*) log "Unsupported local rule set type: $type" "error" ;;
|
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
items="$(parse_domain_or_subnet_string_to_commas_string "$items" "subnets")"
|
||||||
|
json_array="$(comma_string_to_json_array "$items")"
|
||||||
|
patch_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
||||||
|
nft_add_set_elements "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME" "$items"
|
||||||
}
|
}
|
||||||
|
|
||||||
import_local_domain_or_subnet_list() {
|
configure_local_domain_lists() {
|
||||||
local filepath="$1"
|
local section="$1"
|
||||||
local type="$2"
|
local route_rule_tag="$2"
|
||||||
local section="$3"
|
|
||||||
local ruleset_filepath="$4"
|
|
||||||
|
|
||||||
if ! file_exists "$filepath"; then
|
prepare_source_ruleset "$section" "local" "domains" "$route_rule_tag"
|
||||||
log "File $filepath not found" "error"
|
|
||||||
|
config_list_foreach "$section" "local_domain_lists" import_local_domain_list_handler "$ruleset_filepath"
|
||||||
|
}
|
||||||
|
|
||||||
|
import_local_domain_list_handler() {
|
||||||
|
local local_domain_list_filepath="$1"
|
||||||
|
local ruleset_filepath="$2"
|
||||||
|
|
||||||
|
if ! file_exists "$local_domain_list_filepath"; then
|
||||||
|
log "Local domain list file $local_domain_list_filepath not found" "error"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local items json_array
|
import_plain_domain_list_to_local_source_ruleset_chunked "$local_domain_list_filepath" "$ruleset_filepath"
|
||||||
items="$(parse_domain_or_subnet_file_to_comma_string "$filepath" "$type")"
|
}
|
||||||
|
|
||||||
if [ -z "$items" ]; then
|
configure_local_subnet_lists() {
|
||||||
log "No valid $type found in $filepath" "warn"
|
local section="$1"
|
||||||
return 0
|
local route_rule_tag="$2"
|
||||||
|
|
||||||
|
prepare_source_ruleset "$section" "local" "subnets" "$route_rule_tag"
|
||||||
|
|
||||||
|
config_list_foreach "$section" "local_subnet_lists" import_local_subnets_list_handler "$ruleset_filepath"
|
||||||
|
}
|
||||||
|
|
||||||
|
import_local_subnets_list_handler() {
|
||||||
|
local local_subnet_list_filepath="$1"
|
||||||
|
local ruleset_filepath="$2"
|
||||||
|
|
||||||
|
if ! file_exists "$local_subnet_list_filepath"; then
|
||||||
|
log "Local subnet list file $local_subnet_list_filepath not found" "error"
|
||||||
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
json_array="$(comma_string_to_json_array "$items")"
|
import_plain_subnet_list_to_local_source_ruleset_chunked "$local_subnet_list_filepath" "$ruleset_filepath"
|
||||||
case "$type" in
|
nft_add_set_elements_from_file_chunked "$local_subnet_list_filepath" "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME"
|
||||||
domains) sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array" ;;
|
|
||||||
subnets)
|
|
||||||
sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
|
||||||
nft_add_set_elements_from_file_chunked "$filepath" "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
}
|
||||||
|
|
||||||
configure_remote_domain_or_subnet_list_handler() {
|
configure_remote_domain_or_subnet_list_handler() {
|
||||||
@@ -1045,9 +1032,10 @@ configure_remote_domain_or_subnet_list_handler() {
|
|||||||
|
|
||||||
local file_extension
|
local file_extension
|
||||||
file_extension=$(url_get_file_extension "$url")
|
file_extension=$(url_get_file_extension "$url")
|
||||||
|
log "Detected file extension: '$file_extension'" "debug"
|
||||||
case "$file_extension" in
|
case "$file_extension" in
|
||||||
json | srs)
|
json | srs)
|
||||||
log "Detected file extension: '$file_extension' → proceeding with processing" "debug"
|
log "Creating a remote $type ruleset from the source URL" "info"
|
||||||
local basename ruleset_tag format detour update_interval
|
local basename ruleset_tag format detour update_interval
|
||||||
basename=$(url_get_basename "$url")
|
basename=$(url_get_basename "$url")
|
||||||
ruleset_tag=$(get_ruleset_tag "$section" "$basename" "remote-$type")
|
ruleset_tag=$(get_ruleset_tag "$section" "$basename" "remote-$type")
|
||||||
@@ -1066,7 +1054,7 @@ configure_remote_domain_or_subnet_list_handler() {
|
|||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
log "Detected file extension: '$file_extension' → no processing needed, managed on list_update" "debug"
|
prepare_source_ruleset "$section" "remote" "$type" "$route_rule_tag"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
@@ -1279,17 +1267,41 @@ import_domains_from_remote_domain_list_handler() {
|
|||||||
|
|
||||||
local file_extension
|
local file_extension
|
||||||
file_extension=$(url_get_file_extension "$url")
|
file_extension=$(url_get_file_extension "$url")
|
||||||
|
log "Detected file extension: '$file_extension'" "debug"
|
||||||
case "$file_extension" in
|
case "$file_extension" in
|
||||||
json | srs)
|
json | srs)
|
||||||
log "Detected file extension: '$file_extension' → no update needed, sing-box manages updates" "debug"
|
log "No update needed - sing-box manages updates automatically."
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
log "Detected file extension: '$file_extension' → proceeding with processing" "debug"
|
log "Import domains from a remote plain-text list"
|
||||||
import_domains_or_subnets_from_remote_file "$url" "$section" "domains"
|
import_domains_from_remote_plain_file "$url" "$section"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import_domains_from_remote_plain_file() {
|
||||||
|
local url="$1"
|
||||||
|
local section="$2"
|
||||||
|
|
||||||
|
local tmpfile http_proxy_address items json_array
|
||||||
|
tmpfile=$(mktemp)
|
||||||
|
http_proxy_address="$(get_service_proxy_address)"
|
||||||
|
|
||||||
|
download_to_file "$url" "$tmpfile" "$http_proxy_address"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ ! -s "$tmpfile" ]; then
|
||||||
|
log "Download $url list failed" "error"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
convert_crlf_to_lf "$tmpfile"
|
||||||
|
ruleset_tag=$(get_ruleset_tag "$section" "remote" "domains")
|
||||||
|
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_tag.json"
|
||||||
|
import_plain_domain_list_to_local_source_ruleset_chunked "$tmpfile" "$ruleset_filepath"
|
||||||
|
|
||||||
|
rm -f "$tmpfile"
|
||||||
|
}
|
||||||
|
|
||||||
import_subnets_from_remote_subnet_lists() {
|
import_subnets_from_remote_subnet_lists() {
|
||||||
local section="$1"
|
local section="$1"
|
||||||
local remote_subnet_lists
|
local remote_subnet_lists
|
||||||
@@ -1308,61 +1320,23 @@ import_subnets_from_remote_subnet_list_handler() {
|
|||||||
|
|
||||||
local file_extension
|
local file_extension
|
||||||
file_extension="$(url_get_file_extension "$url")"
|
file_extension="$(url_get_file_extension "$url")"
|
||||||
|
log "Detected file extension: '$file_extension'" "debug"
|
||||||
case "$file_extension" in
|
case "$file_extension" in
|
||||||
json)
|
json)
|
||||||
log "Detected file extension: '$file_extension' → proceeding with processing" "debug"
|
log "Import subnets from a remote JSON list" "info"
|
||||||
import_subnets_from_remote_json_file "$url"
|
import_subnets_from_remote_json_file "$url"
|
||||||
;;
|
;;
|
||||||
srs)
|
srs)
|
||||||
log "Detected file extension: '$file_extension' → proceeding with processing" "debug"
|
log "Import subnets from a remote SRS list" "info"
|
||||||
import_subnets_from_remote_srs_file "$url"
|
import_subnets_from_remote_srs_file "$url"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
log "Detected file extension: '$file_extension' → proceeding with processing" "debug"
|
log "Import subnets from a remote plain-text list" "info"
|
||||||
import_domains_or_subnets_from_remote_file "$url" "$section" "subnets"
|
import_subnets_from_remote_plain_file "$url" "$section"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
import_domains_or_subnets_from_remote_file() {
|
|
||||||
local url="$1"
|
|
||||||
local section="$2"
|
|
||||||
local type="$3"
|
|
||||||
|
|
||||||
local tmpfile http_proxy_address items json_array
|
|
||||||
tmpfile=$(mktemp)
|
|
||||||
http_proxy_address="$(get_service_proxy_address)"
|
|
||||||
|
|
||||||
download_to_file "$url" "$tmpfile" "$http_proxy_address"
|
|
||||||
|
|
||||||
if [ $? -ne 0 ] || [ ! -s "$tmpfile" ]; then
|
|
||||||
log "Download $url list failed" "error"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
convert_crlf_to_lf "$tmpfile"
|
|
||||||
items="$(parse_domain_or_subnet_file_to_comma_string "$tmpfile" "$type")"
|
|
||||||
|
|
||||||
if [ -z "$items" ]; then
|
|
||||||
log "No valid $type found in $url" "warn"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
ruleset_tag=$(get_ruleset_tag "$section" "common" "$type")
|
|
||||||
ruleset_filename="$ruleset_tag.json"
|
|
||||||
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_filename"
|
|
||||||
json_array="$(comma_string_to_json_array "$items")"
|
|
||||||
case "$type" in
|
|
||||||
domains) sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array" ;;
|
|
||||||
subnets)
|
|
||||||
sing_box_cm_patch_local_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
|
||||||
nft_add_set_elements_from_file_chunked "$tmpfile" "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
rm -f "$tmpfile"
|
|
||||||
}
|
|
||||||
|
|
||||||
import_subnets_from_remote_json_file() {
|
import_subnets_from_remote_json_file() {
|
||||||
local url="$1"
|
local url="$1"
|
||||||
local json_tmpfile subnets_tmpfile http_proxy_address
|
local json_tmpfile subnets_tmpfile http_proxy_address
|
||||||
@@ -1398,8 +1372,8 @@ import_subnets_from_remote_srs_file() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! decompile_srs_file "$binary_tmpfile" "$json_tmpfile"; then
|
if ! decompile_binary_ruleset "$binary_tmpfile" "$json_tmpfile"; then
|
||||||
log "Failed to decompile SRS file" "error"
|
log "Failed to decompile binary rule set file" "error"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1408,6 +1382,31 @@ import_subnets_from_remote_srs_file() {
|
|||||||
rm -f "$binary_tmpfile" "$json_tmpfile" "$subnets_tmpfile"
|
rm -f "$binary_tmpfile" "$json_tmpfile" "$subnets_tmpfile"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import_subnets_from_remote_plain_file() {
|
||||||
|
local url="$1"
|
||||||
|
local section="$2"
|
||||||
|
|
||||||
|
local tmpfile http_proxy_address items json_array
|
||||||
|
tmpfile=$(mktemp)
|
||||||
|
http_proxy_address="$(get_service_proxy_address)"
|
||||||
|
|
||||||
|
download_to_file "$url" "$tmpfile" "$http_proxy_address"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ] || [ ! -s "$tmpfile" ]; then
|
||||||
|
log "Download $url list failed" "error"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
convert_crlf_to_lf "$tmpfile"
|
||||||
|
|
||||||
|
ruleset_tag=$(get_ruleset_tag "$section" "remote" "subnets")
|
||||||
|
ruleset_filepath="$TMP_RULESET_FOLDER/$ruleset_tag.json"
|
||||||
|
import_plain_subnet_list_to_local_source_ruleset_chunked "$tmpfile" "$ruleset_filepath"
|
||||||
|
nft_add_set_elements_from_file_chunked "$tmpfile" "$NFT_TABLE_NAME" "$NFT_COMMON_SET_NAME"
|
||||||
|
|
||||||
|
rm -f "$tmpfile"
|
||||||
|
}
|
||||||
|
|
||||||
## Support functions
|
## Support functions
|
||||||
get_service_proxy_address() {
|
get_service_proxy_address() {
|
||||||
local download_lists_via_proxy
|
local download_lists_via_proxy
|
||||||
@@ -1516,46 +1515,6 @@ nft_list_all_traffic_from_ip() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
nft_add_set_elements_from_file_chunked() {
|
|
||||||
local filepath="$1"
|
|
||||||
local nft_table_name="$2"
|
|
||||||
local nft_set_name="$3"
|
|
||||||
local chunk_size="${4:-5000}"
|
|
||||||
|
|
||||||
local array count
|
|
||||||
count=0
|
|
||||||
while IFS= read -r line; do
|
|
||||||
line=$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
|
||||||
|
|
||||||
[ -z "$line" ] && continue
|
|
||||||
|
|
||||||
if ! is_ipv4 "$line" && ! is_ipv4_cidr "$line"; then
|
|
||||||
log "'$line' is not IPv4 or IPv4 CIDR" "debug"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$array" ]; then
|
|
||||||
array="$line"
|
|
||||||
else
|
|
||||||
array="$array,$line"
|
|
||||||
fi
|
|
||||||
|
|
||||||
count=$((count + 1))
|
|
||||||
|
|
||||||
if [ "$count" = "$chunk_size" ]; then
|
|
||||||
log "Adding $count elements to nft set $nft_set_name" "debug"
|
|
||||||
nft_add_set_elements "$nft_table_name" "$nft_set_name" "$array"
|
|
||||||
array=""
|
|
||||||
count=0
|
|
||||||
fi
|
|
||||||
done < "$filepath"
|
|
||||||
|
|
||||||
if [ -n "$array" ]; then
|
|
||||||
log "Adding $count elements to nft set $nft_set_name" "debug"
|
|
||||||
nft_add_set_elements "$nft_table_name" "$nft_set_name" "$array"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Diagnotics
|
# Diagnotics
|
||||||
check_proxy() {
|
check_proxy() {
|
||||||
local sing_box_config_path
|
local sing_box_config_path
|
||||||
|
|||||||
@@ -105,37 +105,6 @@ get_domain_resolver_tag() {
|
|||||||
echo "$section-$postfix"
|
echo "$section-$postfix"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Constructs and returns a ruleset tag using section, name, optional type, and a fixed postfix
|
|
||||||
get_ruleset_tag() {
|
|
||||||
local section="$1"
|
|
||||||
local name="$2"
|
|
||||||
local type="$3"
|
|
||||||
local postfix="ruleset"
|
|
||||||
|
|
||||||
if [ -n "$type" ]; then
|
|
||||||
echo "$section-$name-$type-$postfix"
|
|
||||||
else
|
|
||||||
echo "$section-$name-$postfix"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Determines the ruleset format based on the file extension (json → source, srs → binary)
|
|
||||||
get_ruleset_format_by_file_extension() {
|
|
||||||
local file_extension="$1"
|
|
||||||
|
|
||||||
local format
|
|
||||||
case "$file_extension" in
|
|
||||||
json) format="source" ;;
|
|
||||||
srs) format="binary" ;;
|
|
||||||
*)
|
|
||||||
log "Unsupported file extension: .$file_extension" "error"
|
|
||||||
return 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "$format"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Converts a comma-separated string into a JSON array string
|
# Converts a comma-separated string into a JSON array string
|
||||||
comma_string_to_json_array() {
|
comma_string_to_json_array() {
|
||||||
local input="$1"
|
local input="$1"
|
||||||
@@ -300,25 +269,6 @@ convert_crlf_to_lf() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Decompiles a sing-box SRS binary file into a JSON ruleset file
|
|
||||||
decompile_srs_file() {
|
|
||||||
local binary_filepath="$1"
|
|
||||||
local output_filepath="$2"
|
|
||||||
|
|
||||||
log "Decompiling $binary_filepath to $output_filepath" "debug"
|
|
||||||
|
|
||||||
if ! file_exists "$binary_filepath"; then
|
|
||||||
log "File $binary_filepath not found" "error"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
sing-box rule-set decompile "$binary_filepath" -o "$output_filepath"
|
|
||||||
if [[ $? -ne 0 ]]; then
|
|
||||||
log "Decompilation command failed for $binary_filepath" "error"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
#######################################
|
#######################################
|
||||||
# Parses a whitespace-separated string, validates items as either domains
|
# Parses a whitespace-separated string, validates items as either domains
|
||||||
# or IPv4 addresses/subnets, and returns a comma-separated string of valid items.
|
# or IPv4 addresses/subnets, and returns a comma-separated string of valid items.
|
||||||
@@ -387,18 +337,4 @@ parse_domain_or_subnet_file_to_comma_string() {
|
|||||||
done < "$filepath"
|
done < "$filepath"
|
||||||
|
|
||||||
echo "$result"
|
echo "$result"
|
||||||
}
|
|
||||||
|
|
||||||
# Extracts all ip_cidr entries from a JSON ruleset file and writes them to an output file.
|
|
||||||
extract_ip_cidr_from_json_ruleset_to_file() {
|
|
||||||
local json_file="$1"
|
|
||||||
local output_file="$2"
|
|
||||||
|
|
||||||
if [ ! -f "$json_file" ]; then
|
|
||||||
log "JSON file not found: $json_file" "error"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log "Extracting ip_cidr entries from $json_file to $output_file" "debug"
|
|
||||||
jq -r '.rules[].ip_cidr[]' "$json_file" > "$output_file"
|
|
||||||
}
|
}
|
||||||
@@ -27,4 +27,44 @@ nft_add_set_elements() {
|
|||||||
local elements="$3"
|
local elements="$3"
|
||||||
|
|
||||||
nft add element inet "$table" "$set" "{ $elements }"
|
nft add element inet "$table" "$set" "{ $elements }"
|
||||||
|
}
|
||||||
|
|
||||||
|
nft_add_set_elements_from_file_chunked() {
|
||||||
|
local filepath="$1"
|
||||||
|
local nft_table_name="$2"
|
||||||
|
local nft_set_name="$3"
|
||||||
|
local chunk_size="${4:-5000}"
|
||||||
|
|
||||||
|
local array count
|
||||||
|
count=0
|
||||||
|
while IFS= read -r line; do
|
||||||
|
line=$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||||
|
|
||||||
|
[ -z "$line" ] && continue
|
||||||
|
|
||||||
|
if ! is_ipv4 "$line" && ! is_ipv4_cidr "$line"; then
|
||||||
|
log "'$line' is not IPv4 or IPv4 CIDR" "debug"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$array" ]; then
|
||||||
|
array="$line"
|
||||||
|
else
|
||||||
|
array="$array,$line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
count=$((count + 1))
|
||||||
|
|
||||||
|
if [ "$count" = "$chunk_size" ]; then
|
||||||
|
log "Adding $count elements to nft set $nft_set_name" "debug"
|
||||||
|
nft_add_set_elements "$nft_table_name" "$nft_set_name" "$array"
|
||||||
|
array=""
|
||||||
|
count=0
|
||||||
|
fi
|
||||||
|
done < "$filepath"
|
||||||
|
|
||||||
|
if [ -n "$array" ]; then
|
||||||
|
log "Adding $count elements to nft set $nft_set_name" "debug"
|
||||||
|
nft_add_set_elements "$nft_table_name" "$nft_set_name" "$array"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
180
podkop/files/usr/lib/rulesets.sh
Normal file
180
podkop/files/usr/lib/rulesets.sh
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
# Constructs and returns a ruleset tag using section, name, optional type, and a fixed postfix
|
||||||
|
get_ruleset_tag() {
|
||||||
|
local section="$1"
|
||||||
|
local name="$2"
|
||||||
|
local type="$3"
|
||||||
|
local postfix="ruleset"
|
||||||
|
|
||||||
|
if [ -n "$type" ]; then
|
||||||
|
echo "$section-$name-$type-$postfix"
|
||||||
|
else
|
||||||
|
echo "$section-$name-$postfix"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Creates a new ruleset JSON file if it doesn't already exist
|
||||||
|
create_source_rule_set() {
|
||||||
|
local ruleset_filepath="$1"
|
||||||
|
|
||||||
|
if file_exists "$ruleset_filepath"; then
|
||||||
|
return 3
|
||||||
|
fi
|
||||||
|
|
||||||
|
jq -n '{version: 3, rules: []}' > "$ruleset_filepath"
|
||||||
|
}
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Patch a source ruleset JSON file for sing-box by appending a new ruleset object containing the provided key
|
||||||
|
# and value.
|
||||||
|
# Arguments:
|
||||||
|
# filepath: path to the JSON file to patch
|
||||||
|
# key: the ruleset key to insert (e.g., "ip_cidr")
|
||||||
|
# value: a JSON array of values to assign to the key
|
||||||
|
# Example:
|
||||||
|
# patch_source_ruleset_rules "/tmp/sing-box/ruleset.json" "ip_cidr" '["1.1.1.1","2.2.2.2"]'
|
||||||
|
#######################################
|
||||||
|
patch_source_ruleset_rules() {
|
||||||
|
local filepath="$1"
|
||||||
|
local key="$2"
|
||||||
|
local value="$3"
|
||||||
|
|
||||||
|
local tmpfile=$(mktemp)
|
||||||
|
|
||||||
|
jq --arg key "$key" --argjson value "$value" \
|
||||||
|
'( .rules | map(has($key)) | index(true) ) as $idx |
|
||||||
|
if $idx != null then
|
||||||
|
.rules[$idx][$key] = (.rules[$idx][$key] + $value | unique)
|
||||||
|
else
|
||||||
|
.rules += [{ ($key): $value }]
|
||||||
|
end' "$filepath" > "$tmpfile"
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
rm -f "$tmpfile"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mv "$tmpfile" "$filepath"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Imports a plain domain list into a ruleset in chunks, validating domains and appending them as domain_suffix rules
|
||||||
|
import_plain_domain_list_to_local_source_ruleset_chunked() {
|
||||||
|
local plain_list_filepath="$1"
|
||||||
|
local ruleset_filepath="$2"
|
||||||
|
local chunk_size="${3:-5000}"
|
||||||
|
|
||||||
|
local array count json_array
|
||||||
|
count=0
|
||||||
|
while IFS= read -r line; do
|
||||||
|
line=$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||||
|
|
||||||
|
[ -z "$line" ] && continue
|
||||||
|
|
||||||
|
if ! is_domain_suffix "$line"; then
|
||||||
|
log "'$line' is not a valid domain" "debug"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$array" ]; then
|
||||||
|
array="$line"
|
||||||
|
else
|
||||||
|
array="$array,$line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
count=$((count + 1))
|
||||||
|
|
||||||
|
if [ "$count" = "$chunk_size" ]; then
|
||||||
|
log "Adding $count elements to rule set at $ruleset_filepath" "debug"
|
||||||
|
json_array="$(comma_string_to_json_array "$array")"
|
||||||
|
patch_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array"
|
||||||
|
array=""
|
||||||
|
count=0
|
||||||
|
fi
|
||||||
|
done < "$plain_list_filepath"
|
||||||
|
|
||||||
|
if [ -n "$array" ]; then
|
||||||
|
log "Adding $count elements to rule set at $ruleset_filepath" "debug"
|
||||||
|
json_array="$(comma_string_to_json_array "$array")"
|
||||||
|
patch_source_ruleset_rules "$ruleset_filepath" "domain_suffix" "$json_array"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Imports a plain IPv4/CIDR list into a ruleset in chunks, validating entries and appending them as ip_cidr rules
|
||||||
|
import_plain_subnet_list_to_local_source_ruleset_chunked() {
|
||||||
|
local plain_list_filepath="$1"
|
||||||
|
local ruleset_filepath="$2"
|
||||||
|
local chunk_size="${3:-5000}"
|
||||||
|
|
||||||
|
local array count json_array
|
||||||
|
count=0
|
||||||
|
while IFS= read -r line; do
|
||||||
|
line=$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||||
|
|
||||||
|
[ -z "$line" ] && continue
|
||||||
|
|
||||||
|
if ! is_ipv4 "$line" && ! is_ipv4_cidr "$line"; then
|
||||||
|
log "'$line' is not IPv4 or IPv4 CIDR" "debug"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$array" ]; then
|
||||||
|
array="$line"
|
||||||
|
else
|
||||||
|
array="$array,$line"
|
||||||
|
fi
|
||||||
|
|
||||||
|
count=$((count + 1))
|
||||||
|
|
||||||
|
if [ "$count" = "$chunk_size" ]; then
|
||||||
|
log "Adding $count elements to ruleset at $ruleset_filepath" "debug"
|
||||||
|
json_array="$(comma_string_to_json_array "$array")"
|
||||||
|
patch_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
||||||
|
array=""
|
||||||
|
count=0
|
||||||
|
fi
|
||||||
|
done < "$plain_list_filepath"
|
||||||
|
|
||||||
|
if [ -n "$array" ]; then
|
||||||
|
log "Adding $count elements to ruleset at $ruleset_filepath" "debug"
|
||||||
|
json_array="$(comma_string_to_json_array "$array")"
|
||||||
|
patch_source_ruleset_rules "$ruleset_filepath" "ip_cidr" "$json_array"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determines the ruleset format based on the file extension (json → source, srs → binary)
|
||||||
|
get_ruleset_format_by_file_extension() {
|
||||||
|
local file_extension="$1"
|
||||||
|
|
||||||
|
local format
|
||||||
|
case "$file_extension" in
|
||||||
|
json) format="source" ;;
|
||||||
|
srs) format="binary" ;;
|
||||||
|
*)
|
||||||
|
log "Unsupported file extension: .$file_extension" "error"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "$format"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Decompiles a sing-box SRS binary file into a JSON ruleset file
|
||||||
|
decompile_binary_ruleset() {
|
||||||
|
local binary_filepath="$1"
|
||||||
|
local output_filepath="$2"
|
||||||
|
|
||||||
|
log "Decompiling $binary_filepath to $output_filepath" "debug"
|
||||||
|
sing-box rule-set decompile "$binary_filepath" -o "$output_filepath"
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
log "Decompilation command failed for $binary_filepath" "error"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extracts all ip_cidr entries from a JSON ruleset file and writes them to an output file.
|
||||||
|
extract_ip_cidr_from_json_ruleset_to_file() {
|
||||||
|
local json_file="$1"
|
||||||
|
local output_file="$2"
|
||||||
|
|
||||||
|
log "Extracting ip_cidr entries from $json_file to $output_file" "debug"
|
||||||
|
jq -r '.rules[].ip_cidr[]' "$json_file" > "$output_file"
|
||||||
|
}
|
||||||
@@ -1365,51 +1365,6 @@ sing_box_cm_configure_clash_api() {
|
|||||||
+ (if $secret != "" then { secret: $secret } else {} end)'
|
+ (if $secret != "" then { secret: $secret } else {} end)'
|
||||||
}
|
}
|
||||||
|
|
||||||
#######################################
|
|
||||||
# Create a local source ruleset JSON file for sing-box.
|
|
||||||
# Arguments:
|
|
||||||
# filepath: path to the JSON file to create
|
|
||||||
# Example:
|
|
||||||
# sing_box_cm_create_local_source_ruleset "/tmp/sing-box/ruleset.json"
|
|
||||||
#######################################
|
|
||||||
sing_box_cm_create_local_source_ruleset() {
|
|
||||||
local filepath="$1"
|
|
||||||
|
|
||||||
jq -n '{version: 3, rules: []}' > "$filepath"
|
|
||||||
}
|
|
||||||
|
|
||||||
#######################################
|
|
||||||
# Patch a local source ruleset JSON file for sing-box by adding unique! values to a given key.
|
|
||||||
# Arguments:
|
|
||||||
# filepath: path to the JSON file to patch
|
|
||||||
# key: the ruleset key to update (e.g., "ip_cidr")
|
|
||||||
# value: a JSON array of values to add to the key
|
|
||||||
# Example:
|
|
||||||
# sing_box_cm_patch_local_source_ruleset_rules "/tmp/sing-box/ruleset.json" "ip_cidr" '["1.1.1.1","2.2.2.2"]'
|
|
||||||
#######################################
|
|
||||||
sing_box_cm_patch_local_source_ruleset_rules() {
|
|
||||||
local filepath="$1"
|
|
||||||
local key="$2"
|
|
||||||
local value="$3"
|
|
||||||
|
|
||||||
value=$(_normalize_arg "$value")
|
|
||||||
|
|
||||||
local content
|
|
||||||
content="$(cat "$filepath")"
|
|
||||||
|
|
||||||
echo "$content" | jq \
|
|
||||||
--arg key "$key" \
|
|
||||||
--argjson value "$value" '
|
|
||||||
([.rules[]?[$key][]] | unique) as $existing
|
|
||||||
| ($value - $existing) as $value
|
|
||||||
| if ($value | length) > 0 then
|
|
||||||
.rules += [{($key): $value}]
|
|
||||||
else
|
|
||||||
.
|
|
||||||
end
|
|
||||||
' > "$filepath"
|
|
||||||
}
|
|
||||||
|
|
||||||
#######################################
|
#######################################
|
||||||
# Save a sing-box JSON configuration to a file, removing service-specific tags.
|
# Save a sing-box JSON configuration to a file, removing service-specific tags.
|
||||||
# Arguments:
|
# Arguments:
|
||||||
|
|||||||
Reference in New Issue
Block a user