mirror of
https://github.com/sub-store-org/Sub-Store.git
synced 2025-08-10 00:52:40 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42767cea21 |
@@ -31,7 +31,7 @@ Core functionalities:
|
||||
- [x] Clash Proxy JSON(single line)
|
||||
- [x] QX (SS, SSR, VMess, Trojan, HTTP, SOCKS5, VLESS)
|
||||
- [x] Loon (SS, SSR, VMess, Trojan, HTTP, SOCKS5, SOCKS5-TLS, WireGuard, VLESS, Hysteria 2)
|
||||
- [x] Surge (SS, VMess, Trojan, HTTP, SOCKS5, SOCKS5-TLS, TUIC, Snell, Hysteria 2, SSH(Password authentication only), External Proxy Program(only for macOS), WireGuard(Surge to Surge))
|
||||
- [x] Surge (SS, VMess, Trojan, HTTP, SOCKS5, SOCKS5-TLS, TUIC, Snell, Hysteria 2, SSH(Password authentication only), SSR(external, only for macOS), External Proxy Program(only for macOS), WireGuard(Surge to Surge))
|
||||
- [x] Surfboard (SS, VMess, Trojan, HTTP, SOCKS5, SOCKS5-TLS, WireGuard(Surfboard to Surfboard))
|
||||
- [x] Shadowrocket (SS, SSR, VMess, Trojan, HTTP, SOCKS5, Snell, VLESS, WireGuard, Hysteria, Hysteria 2, TUIC)
|
||||
- [x] Clash.Meta (SS, SSR, VMess, Trojan, HTTP, SOCKS5, Snell, VLESS, WireGuard, Hysteria, Hysteria 2, TUIC)
|
||||
@@ -46,7 +46,6 @@ Core functionalities:
|
||||
- [x] Clash
|
||||
- [x] Surfboard
|
||||
- [x] Surge
|
||||
- [x] SurgeMac(Use mihomo to support protocols that are not supported by Surge itself)
|
||||
- [x] Loon
|
||||
- [x] Shadowrocket
|
||||
- [x] QX
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sub-store",
|
||||
"version": "2.14.385",
|
||||
"version": "2.14.369",
|
||||
"description": "Advanced Subscription Manager for QX, Loon, Surge, Stash and ShadowRocket.",
|
||||
"main": "src/main.js",
|
||||
"scripts": {
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
isNotBlank,
|
||||
ipAddress,
|
||||
getRandomPort,
|
||||
numberToString,
|
||||
} from '@/utils';
|
||||
import PROXY_PROCESSORS, { ApplyProcessor } from './processors';
|
||||
import PROXY_PREPROCESSORS from './preprocessors';
|
||||
@@ -78,13 +77,7 @@ function parse(raw) {
|
||||
return proxies;
|
||||
}
|
||||
|
||||
async function processFn(
|
||||
proxies,
|
||||
operators = [],
|
||||
targetPlatform,
|
||||
source,
|
||||
$options,
|
||||
) {
|
||||
async function processFn(proxies, operators = [], targetPlatform, source) {
|
||||
for (const item of operators) {
|
||||
// process script
|
||||
let script;
|
||||
@@ -183,7 +176,6 @@ async function processFn(
|
||||
targetPlatform,
|
||||
$arguments,
|
||||
source,
|
||||
$options,
|
||||
);
|
||||
} else {
|
||||
processor = PROXY_PROCESSORS[item.type](item.args || {});
|
||||
@@ -210,6 +202,8 @@ function produce(proxies, targetPlatform, type, opts = {}) {
|
||||
);
|
||||
|
||||
proxies = proxies.map((proxy) => {
|
||||
proxy._subName = proxy.subName;
|
||||
proxy._collectionName = proxy.collectionName;
|
||||
proxy._resolved = proxy.resolved;
|
||||
|
||||
if (!isNotBlank(proxy.name)) {
|
||||
@@ -230,7 +224,6 @@ function produce(proxies, targetPlatform, type, opts = {}) {
|
||||
|
||||
// 处理 端口跳跃
|
||||
if (proxy.ports) {
|
||||
proxy.ports = String(proxy.ports);
|
||||
if (!['ClashMeta'].includes(targetPlatform)) {
|
||||
proxy.ports = proxy.ports.replace(/\//g, ',');
|
||||
}
|
||||
@@ -244,10 +237,21 @@ function produce(proxies, targetPlatform, type, opts = {}) {
|
||||
|
||||
$.log(`Producing proxies for target: ${targetPlatform}`);
|
||||
if (typeof producer.type === 'undefined' || producer.type === 'SINGLE') {
|
||||
let localPort = 10000;
|
||||
let list = proxies
|
||||
.map((proxy) => {
|
||||
try {
|
||||
return producer.produce(proxy, type, opts);
|
||||
let line = producer.produce(proxy, type, opts);
|
||||
if (
|
||||
line.length > 0 &&
|
||||
line.includes('__SubStoreLocalPort__')
|
||||
) {
|
||||
line = line.replace(
|
||||
/__SubStoreLocalPort__/g,
|
||||
localPort++,
|
||||
);
|
||||
}
|
||||
return line;
|
||||
} catch (err) {
|
||||
$.error(
|
||||
`Cannot produce proxy: ${JSON.stringify(
|
||||
@@ -266,7 +270,7 @@ function produce(proxies, targetPlatform, type, opts = {}) {
|
||||
proxies.length > 0 &&
|
||||
proxies.every((p) => p.type === 'wireguard')
|
||||
) {
|
||||
list = `#!name=${proxies[0]?._subName}
|
||||
list = `#!name=${proxies[0]?.subName}
|
||||
#!desc=${proxies[0]?._desc ?? ''}
|
||||
#!category=${proxies[0]?._category ?? ''}
|
||||
${list}`;
|
||||
@@ -312,23 +316,7 @@ function safeMatch(parser, line) {
|
||||
}
|
||||
}
|
||||
|
||||
function formatTransportPath(path) {
|
||||
if (typeof path === 'string' || typeof path === 'number') {
|
||||
path = String(path).trim();
|
||||
|
||||
if (path === '') {
|
||||
return '/';
|
||||
} else if (!path.startsWith('/')) {
|
||||
return '/' + path;
|
||||
}
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
function lastParse(proxy) {
|
||||
if (typeof proxy.password === 'number') {
|
||||
proxy.password = numberToString(proxy.password);
|
||||
}
|
||||
if (proxy.interface) {
|
||||
proxy['interface-name'] = proxy.interface;
|
||||
delete proxy.interface;
|
||||
@@ -356,17 +344,6 @@ function lastParse(proxy) {
|
||||
delete proxy['ws-headers'];
|
||||
}
|
||||
|
||||
const transportPath = proxy[`${proxy.network}-opts`]?.path;
|
||||
|
||||
if (Array.isArray(transportPath)) {
|
||||
proxy[`${proxy.network}-opts`].path = transportPath.map((item) =>
|
||||
formatTransportPath(item),
|
||||
);
|
||||
} else if (transportPath != null) {
|
||||
proxy[`${proxy.network}-opts`].path =
|
||||
formatTransportPath(transportPath);
|
||||
}
|
||||
|
||||
if (proxy.type === 'trojan') {
|
||||
if (proxy.network === 'tcp') {
|
||||
delete proxy.network;
|
||||
@@ -447,13 +424,13 @@ function lastParse(proxy) {
|
||||
proxy[`${proxy.network}-opts`].path = [transportPath];
|
||||
}
|
||||
}
|
||||
// if (['hysteria', 'hysteria2', 'tuic'].includes(proxy.type)) {
|
||||
if (proxy.ports) {
|
||||
proxy.ports = String(proxy.ports).replace(/\//g, ',');
|
||||
} else {
|
||||
delete proxy.ports;
|
||||
if (['hysteria', 'hysteria2'].includes(proxy.type)) {
|
||||
if (proxy.ports) {
|
||||
proxy.ports = proxy.ports.replace(/\//g, ',');
|
||||
} else {
|
||||
delete proxy.ports;
|
||||
}
|
||||
}
|
||||
// }
|
||||
if (
|
||||
['hysteria2'].includes(proxy.type) &&
|
||||
proxy.obfs &&
|
||||
|
||||
@@ -316,7 +316,7 @@ function RegexDeleteOperator(regex) {
|
||||
1. This function name should be `operator`!
|
||||
2. Always declare variables before using them!
|
||||
*/
|
||||
function ScriptOperator(script, targetPlatform, $arguments, source, $options) {
|
||||
function ScriptOperator(script, targetPlatform, $arguments, source) {
|
||||
return {
|
||||
name: 'Script Operator',
|
||||
func: async (proxies) => {
|
||||
@@ -326,7 +326,6 @@ function ScriptOperator(script, targetPlatform, $arguments, source, $options) {
|
||||
'operator',
|
||||
script,
|
||||
$arguments,
|
||||
$options,
|
||||
);
|
||||
output = operator(proxies, targetPlatform, { source, ...env });
|
||||
})();
|
||||
@@ -339,9 +338,9 @@ function ScriptOperator(script, targetPlatform, $arguments, source, $options) {
|
||||
'operator',
|
||||
`async function operator(input = []) {
|
||||
if (input && (input.$files || input.$content)) {
|
||||
let { $content, $files, $options } = input
|
||||
let { $content, $files } = input
|
||||
${script}
|
||||
return { $content, $files, $options }
|
||||
return { $content, $files }
|
||||
} else {
|
||||
let proxies = input
|
||||
let list = []
|
||||
@@ -353,7 +352,6 @@ function ScriptOperator(script, targetPlatform, $arguments, source, $options) {
|
||||
}
|
||||
}`,
|
||||
$arguments,
|
||||
$options,
|
||||
);
|
||||
output = operator(proxies, targetPlatform, { source, ...env });
|
||||
})();
|
||||
@@ -421,23 +419,26 @@ const DOMAIN_RESOLVERS = {
|
||||
const id = hex_md5(`GOOGLE:${domain}:${type}`);
|
||||
const cached = resourceCache.get(id);
|
||||
if (!noCache && cached) return cached;
|
||||
const answerType = type === 'IPv6' ? 'AAAA' : 'A';
|
||||
const res = await doh({
|
||||
url: 'https://8.8.4.4/dns-query',
|
||||
domain,
|
||||
type: answerType,
|
||||
const resp = await $.http.get({
|
||||
url: `https://8.8.4.4/resolve?name=${encodeURIComponent(
|
||||
domain,
|
||||
)}&type=${
|
||||
type === 'IPv6' ? 'AAAA' : 'A'
|
||||
}&edns_client_subnet=${edns}`,
|
||||
headers: {
|
||||
accept: 'application/dns-json',
|
||||
},
|
||||
timeout,
|
||||
edns,
|
||||
});
|
||||
|
||||
const { answers } = res;
|
||||
const body = JSON.parse(resp.body);
|
||||
if (body['Status'] !== 0) {
|
||||
throw new Error(`Status is ${body['Status']}`);
|
||||
}
|
||||
const answers = body['Answer'];
|
||||
if (!Array.isArray(answers) || answers.length === 0) {
|
||||
throw new Error('No answers');
|
||||
}
|
||||
const result = answers
|
||||
.filter((i) => i?.type === answerType)
|
||||
.map((i) => i?.data)
|
||||
.filter((i) => i);
|
||||
const result = answers.map((i) => i?.data).filter((i) => i);
|
||||
if (result.length === 0) {
|
||||
throw new Error('No answers');
|
||||
}
|
||||
@@ -471,27 +472,28 @@ const DOMAIN_RESOLVERS = {
|
||||
resourceCache.set(id, result);
|
||||
return result;
|
||||
},
|
||||
Cloudflare: async function (domain, type, noCache, timeout, edns) {
|
||||
Cloudflare: async function (domain, type, noCache, timeout) {
|
||||
const id = hex_md5(`CLOUDFLARE:${domain}:${type}`);
|
||||
const cached = resourceCache.get(id);
|
||||
if (!noCache && cached) return cached;
|
||||
const answerType = type === 'IPv6' ? 'AAAA' : 'A';
|
||||
const res = await doh({
|
||||
url: 'https://1.0.0.1/dns-query',
|
||||
domain,
|
||||
type: answerType,
|
||||
const resp = await $.http.get({
|
||||
url: `https://1.0.0.1/dns-query?name=${encodeURIComponent(
|
||||
domain,
|
||||
)}&type=${type === 'IPv6' ? 'AAAA' : 'A'}`,
|
||||
headers: {
|
||||
accept: 'application/dns-json',
|
||||
},
|
||||
timeout,
|
||||
edns,
|
||||
});
|
||||
|
||||
const { answers } = res;
|
||||
const body = JSON.parse(resp.body);
|
||||
if (body['Status'] !== 0) {
|
||||
throw new Error(`Status is ${body['Status']}`);
|
||||
}
|
||||
const answers = body['Answer'];
|
||||
if (!Array.isArray(answers) || answers.length === 0) {
|
||||
throw new Error('No answers');
|
||||
}
|
||||
const result = answers
|
||||
.filter((i) => i?.type === answerType)
|
||||
.map((i) => i?.data)
|
||||
.filter((i) => i);
|
||||
const result = answers.map((i) => i?.data).filter((i) => i);
|
||||
if (result.length === 0) {
|
||||
throw new Error('No answers');
|
||||
}
|
||||
@@ -792,7 +794,7 @@ function TypeFilter(types) {
|
||||
1. This function name should be `filter`!
|
||||
2. Always declare variables before using them!
|
||||
*/
|
||||
function ScriptFilter(script, targetPlatform, $arguments, source, $options) {
|
||||
function ScriptFilter(script, targetPlatform, $arguments, source) {
|
||||
return {
|
||||
name: 'Script Filter',
|
||||
func: async (proxies) => {
|
||||
@@ -802,7 +804,6 @@ function ScriptFilter(script, targetPlatform, $arguments, source, $options) {
|
||||
'filter',
|
||||
script,
|
||||
$arguments,
|
||||
$options,
|
||||
);
|
||||
output = filter(proxies, targetPlatform, { source, ...env });
|
||||
})();
|
||||
@@ -825,7 +826,6 @@ function ScriptFilter(script, targetPlatform, $arguments, source, $options) {
|
||||
return list
|
||||
}`,
|
||||
$arguments,
|
||||
$options,
|
||||
);
|
||||
output = filter(proxies, targetPlatform, { source, ...env });
|
||||
})();
|
||||
@@ -966,7 +966,7 @@ function clone(object) {
|
||||
return JSON.parse(JSON.stringify(object));
|
||||
}
|
||||
|
||||
function createDynamicFunction(name, script, $arguments, $options) {
|
||||
function createDynamicFunction(name, script, $arguments) {
|
||||
const flowUtils = {
|
||||
getFlowField,
|
||||
getFlowHeaders,
|
||||
@@ -978,7 +978,6 @@ function createDynamicFunction(name, script, $arguments, $options) {
|
||||
if ($.env.isLoon) {
|
||||
return new Function(
|
||||
'$arguments',
|
||||
'$options',
|
||||
'$substore',
|
||||
'lodash',
|
||||
'$persistentStore',
|
||||
@@ -992,7 +991,6 @@ function createDynamicFunction(name, script, $arguments, $options) {
|
||||
`${script}\n return ${name}`,
|
||||
)(
|
||||
$arguments,
|
||||
$options,
|
||||
$,
|
||||
lodash,
|
||||
// eslint-disable-next-line no-undef
|
||||
@@ -1010,7 +1008,6 @@ function createDynamicFunction(name, script, $arguments, $options) {
|
||||
} else {
|
||||
return new Function(
|
||||
'$arguments',
|
||||
'$options',
|
||||
'$substore',
|
||||
'lodash',
|
||||
'ProxyUtils',
|
||||
@@ -1021,7 +1018,6 @@ function createDynamicFunction(name, script, $arguments, $options) {
|
||||
`${script}\n return ${name}`,
|
||||
)(
|
||||
$arguments,
|
||||
$options,
|
||||
$,
|
||||
lodash,
|
||||
ProxyUtils,
|
||||
|
||||
@@ -163,11 +163,9 @@ export default function Clash_Producer() {
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
if (type !== 'internal') {
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
}
|
||||
if (
|
||||
|
||||
@@ -178,11 +178,9 @@ export default function ClashMeta_Producer() {
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
if (type !== 'internal') {
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
}
|
||||
if (
|
||||
|
||||
@@ -181,11 +181,9 @@ export default function ShadowRocket_Producer() {
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
if (type !== 'internal') {
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
}
|
||||
if (
|
||||
|
||||
@@ -204,6 +204,11 @@ const tlsParser = (proxy, parsedProxy) => {
|
||||
if (proxy.ca) parsedProxy.tls.certificate_path = `${proxy.ca}`;
|
||||
if (proxy.ca_str) parsedProxy.tls.certificate = [proxy.ca_str];
|
||||
if (proxy['ca-str']) parsedProxy.tls.certificate = [proxy['ca-str']];
|
||||
if (proxy['client-fingerprint'] && proxy['client-fingerprint'] !== '')
|
||||
parsedProxy.tls.utls = {
|
||||
enabled: true,
|
||||
fingerprint: proxy['client-fingerprint'],
|
||||
};
|
||||
if (proxy['reality-opts']) {
|
||||
parsedProxy.tls.reality = { enabled: true };
|
||||
if (proxy['reality-opts']['public-key'])
|
||||
@@ -212,13 +217,7 @@ const tlsParser = (proxy, parsedProxy) => {
|
||||
if (proxy['reality-opts']['short-id'])
|
||||
parsedProxy.tls.reality.short_id =
|
||||
proxy['reality-opts']['short-id'];
|
||||
parsedProxy.tls.utls = { enabled: true };
|
||||
}
|
||||
if (proxy['client-fingerprint'] && proxy['client-fingerprint'] !== '')
|
||||
parsedProxy.tls.utls = {
|
||||
enabled: true,
|
||||
fingerprint: proxy['client-fingerprint'],
|
||||
};
|
||||
if (!parsedProxy.tls.enabled) delete parsedProxy.tls;
|
||||
};
|
||||
|
||||
|
||||
@@ -272,11 +272,9 @@ export default function Stash_Producer() {
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
if (type !== 'internal') {
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
}
|
||||
}
|
||||
if (
|
||||
|
||||
@@ -15,9 +15,6 @@ const ipVersions = {
|
||||
export default function Surge_Producer() {
|
||||
const produce = (proxy, type, opts = {}) => {
|
||||
proxy.name = proxy.name.replace(/=|,/g, '');
|
||||
if (proxy.ports) {
|
||||
proxy.ports = String(proxy.ports);
|
||||
}
|
||||
switch (proxy.type) {
|
||||
case 'ss':
|
||||
return shadowsocks(proxy);
|
||||
@@ -679,7 +676,7 @@ function tuic(proxy) {
|
||||
);
|
||||
|
||||
if (isPresent(proxy, 'ports')) {
|
||||
result.append(`,port-hopping="${proxy.ports.replace(/,/g, ';')}"`);
|
||||
result.append(`,port-hopping=${proxy.ports.replace(/,/g, ';')}`);
|
||||
}
|
||||
|
||||
result.appendIfPresent(
|
||||
@@ -948,7 +945,7 @@ function hysteria2(proxy) {
|
||||
result.appendIfPresent(`,password=${proxy.password}`, 'password');
|
||||
|
||||
if (isPresent(proxy, 'ports')) {
|
||||
result.append(`,port-hopping="${proxy.ports.replace(/,/g, ';')}"`);
|
||||
result.append(`,port-hopping=${proxy.ports.replace(/,/g, ';')}`);
|
||||
}
|
||||
|
||||
result.appendIfPresent(
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { Base64 } from 'js-base64';
|
||||
import { Result, isPresent } from './utils';
|
||||
import Surge_Producer from './surge';
|
||||
import ClashMeta_Producer from './clashmeta';
|
||||
import { isIPv4, isIPv6 } from '@/utils';
|
||||
import $ from '@/core/app';
|
||||
|
||||
@@ -10,22 +8,14 @@ const targetPlatform = 'SurgeMac';
|
||||
const surge_Producer = Surge_Producer();
|
||||
|
||||
export default function SurgeMac_Producer() {
|
||||
const produce = (proxy, type, opts = {}) => {
|
||||
const produce = (proxy) => {
|
||||
switch (proxy.type) {
|
||||
case 'external':
|
||||
return external(proxy);
|
||||
// case 'ssr':
|
||||
// return shadowsocksr(proxy);
|
||||
default: {
|
||||
try {
|
||||
return surge_Producer.produce(proxy, type, opts);
|
||||
} catch (e) {
|
||||
$.log(
|
||||
`${proxy.name} is not supported on ${targetPlatform}, try to use Mihomo(SurgeMac - External Proxy Program) instead`,
|
||||
);
|
||||
return mihomo(proxy, type, opts);
|
||||
}
|
||||
}
|
||||
case 'ssr':
|
||||
return shadowsocksr(proxy);
|
||||
default:
|
||||
return surge_Producer.produce(proxy);
|
||||
}
|
||||
};
|
||||
return { produce };
|
||||
@@ -70,7 +60,6 @@ function external(proxy) {
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function shadowsocksr(proxy) {
|
||||
const external_proxy = {
|
||||
...proxy,
|
||||
@@ -112,65 +101,6 @@ function shadowsocksr(proxy) {
|
||||
|
||||
return external(external_proxy);
|
||||
}
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function mihomo(proxy, type, opts) {
|
||||
const clashProxy = ClashMeta_Producer().produce([proxy], 'internal')?.[0];
|
||||
if (clashProxy) {
|
||||
const localPort = opts?.localPort || proxy._localPort || 65535;
|
||||
const ipv6 = ['ipv4', 'v4-only'].includes(proxy['ip-version'])
|
||||
? false
|
||||
: true;
|
||||
const external_proxy = {
|
||||
name: proxy.name,
|
||||
type: 'external',
|
||||
exec: proxy._exec || '/usr/local/bin/mihomo',
|
||||
'local-port': localPort,
|
||||
args: [
|
||||
'-config',
|
||||
Base64.encode(
|
||||
JSON.stringify({
|
||||
'mixed-port': localPort,
|
||||
ipv6,
|
||||
mode: 'global',
|
||||
dns: {
|
||||
enable: true,
|
||||
ipv6,
|
||||
nameserver: [
|
||||
'https://223.6.6.6/dns-query',
|
||||
'https://120.53.53.53/dns-query',
|
||||
],
|
||||
},
|
||||
proxies: [
|
||||
{
|
||||
...clashProxy,
|
||||
name: 'proxy',
|
||||
},
|
||||
],
|
||||
'proxy-groups': [
|
||||
{
|
||||
name: 'GLOBAL',
|
||||
type: 'select',
|
||||
proxies: ['proxy'],
|
||||
},
|
||||
],
|
||||
}),
|
||||
),
|
||||
],
|
||||
addresses: [],
|
||||
};
|
||||
|
||||
// https://manual.nssurge.com/policy/external-proxy.html
|
||||
if (isIP(proxy.server)) {
|
||||
external_proxy.addresses.push(proxy.server);
|
||||
} else {
|
||||
$.log(
|
||||
`Platform ${targetPlatform}, proxy type ${proxy.type}: addresses should be an IP address, but got ${proxy.server}`,
|
||||
);
|
||||
}
|
||||
opts.localPort = localPort - 1;
|
||||
return external(external_proxy);
|
||||
}
|
||||
}
|
||||
|
||||
function isIP(ip) {
|
||||
return isIPv4(ip) || isIPv6(ip);
|
||||
|
||||
@@ -70,24 +70,6 @@ async function downloadSubscription(req, res) {
|
||||
includeUnsupportedProxy,
|
||||
resultFormat,
|
||||
} = req.query;
|
||||
let $options = {};
|
||||
if (req.query.$options) {
|
||||
try {
|
||||
// 支持 `#${encodeURIComponent(JSON.stringify({arg1: "1"}))}`
|
||||
$options = JSON.parse(decodeURIComponent(req.query.$options));
|
||||
} catch (e) {
|
||||
for (const pair of req.query.$options.split('&')) {
|
||||
const key = pair.split('=')[0];
|
||||
const value = pair.split('=')[1];
|
||||
// 部分兼容之前的逻辑 const value = pair.split('=')[1] || true;
|
||||
$options[key] =
|
||||
value == null || value === ''
|
||||
? true
|
||||
: decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
$.info(`传入 $options: ${JSON.stringify($options)}`);
|
||||
}
|
||||
if (url) {
|
||||
url = decodeURIComponent(url);
|
||||
$.info(`指定远程订阅 URL: ${url}`);
|
||||
@@ -134,7 +116,6 @@ async function downloadSubscription(req, res) {
|
||||
produceOpts: {
|
||||
'include-unsupported-proxy': includeUnsupportedProxy,
|
||||
},
|
||||
$options,
|
||||
});
|
||||
|
||||
if (
|
||||
@@ -266,25 +247,6 @@ async function downloadCollection(req, res) {
|
||||
resultFormat,
|
||||
} = req.query;
|
||||
|
||||
let $options = {};
|
||||
if (req.query.$options) {
|
||||
try {
|
||||
// 支持 `#${encodeURIComponent(JSON.stringify({arg1: "1"}))}`
|
||||
$options = JSON.parse(decodeURIComponent(req.query.$options));
|
||||
} catch (e) {
|
||||
for (const pair of req.query.$options.split('&')) {
|
||||
const key = pair.split('=')[0];
|
||||
const value = pair.split('=')[1];
|
||||
// 部分兼容之前的逻辑 const value = pair.split('=')[1] || true;
|
||||
$options[key] =
|
||||
value == null || value === ''
|
||||
? true
|
||||
: decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
$.info(`传入 $options: ${JSON.stringify($options)}`);
|
||||
}
|
||||
|
||||
if (ignoreFailedRemoteSub != null && ignoreFailedRemoteSub !== '') {
|
||||
ignoreFailedRemoteSub = decodeURIComponent(ignoreFailedRemoteSub);
|
||||
$.info(`指定忽略失败的远程订阅: ${ignoreFailedRemoteSub}`);
|
||||
@@ -310,7 +272,6 @@ async function downloadCollection(req, res) {
|
||||
produceOpts: {
|
||||
'include-unsupported-proxy': includeUnsupportedProxy,
|
||||
},
|
||||
$options,
|
||||
});
|
||||
|
||||
// forward flow header from the first subscription in this collection
|
||||
|
||||
@@ -60,24 +60,6 @@ async function getFile(req, res) {
|
||||
mergeSources,
|
||||
ignoreFailedRemoteFile,
|
||||
} = req.query;
|
||||
let $options = {};
|
||||
if (req.query.$options) {
|
||||
try {
|
||||
// 支持 `#${encodeURIComponent(JSON.stringify({arg1: "1"}))}`
|
||||
$options = JSON.parse(decodeURIComponent(req.query.$options));
|
||||
} catch (e) {
|
||||
for (const pair of req.query.$options.split('&')) {
|
||||
const key = pair.split('=')[0];
|
||||
const value = pair.split('=')[1];
|
||||
// 部分兼容之前的逻辑 const value = pair.split('=')[1] || true;
|
||||
$options[key] =
|
||||
value == null || value === ''
|
||||
? true
|
||||
: decodeURIComponent(value);
|
||||
}
|
||||
}
|
||||
$.info(`传入 $options: ${JSON.stringify($options)}`);
|
||||
}
|
||||
if (url) {
|
||||
url = decodeURIComponent(url);
|
||||
$.info(`指定远程文件 URL: ${url}`);
|
||||
@@ -119,7 +101,6 @@ async function getFile(req, res) {
|
||||
content,
|
||||
mergeSources,
|
||||
ignoreFailedRemoteFile,
|
||||
$options,
|
||||
});
|
||||
|
||||
try {
|
||||
|
||||
@@ -3,7 +3,6 @@ import $ from '@/core/app';
|
||||
import migrate from '@/utils/migration';
|
||||
import download from '@/utils/download';
|
||||
import { syncArtifacts } from '@/restful/sync';
|
||||
import { gistBackupAction } from '@/restful/miscs';
|
||||
|
||||
import registerSubscriptionRoutes from './subscriptions';
|
||||
import registerCollectionRoutes from './collections';
|
||||
@@ -45,81 +44,20 @@ export default function serve() {
|
||||
$app.start();
|
||||
|
||||
if ($.env.isNode) {
|
||||
// Deprecated: SUB_STORE_BACKEND_CRON
|
||||
const backend_sync_cron =
|
||||
eval('process.env.SUB_STORE_BACKEND_SYNC_CRON') ||
|
||||
eval('process.env.SUB_STORE_BACKEND_CRON');
|
||||
if (backend_sync_cron) {
|
||||
$.info(`[SYNC CRON] ${backend_sync_cron} enabled`);
|
||||
const backend_cron = eval('process.env.SUB_STORE_BACKEND_CRON');
|
||||
if (backend_cron) {
|
||||
$.info(`[CRON] ${backend_cron} enabled`);
|
||||
const { CronJob } = eval(`require("cron")`);
|
||||
new CronJob(
|
||||
backend_sync_cron,
|
||||
backend_cron,
|
||||
async function () {
|
||||
try {
|
||||
$.info(`[SYNC CRON] ${backend_sync_cron} started`);
|
||||
$.info(`[CRON] ${backend_cron} started`);
|
||||
await syncArtifacts();
|
||||
$.info(`[SYNC CRON] ${backend_sync_cron} finished`);
|
||||
$.info(`[CRON] ${backend_cron} finished`);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`[SYNC CRON] ${backend_sync_cron} error: ${
|
||||
e.message ?? e
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}, // onTick
|
||||
null, // onComplete
|
||||
true, // start
|
||||
// 'Asia/Shanghai' // timeZone
|
||||
);
|
||||
}
|
||||
const backend_download_cron = eval(
|
||||
'process.env.SUB_STORE_BACKEND_DOWNLOAD_CRON',
|
||||
);
|
||||
if (backend_download_cron) {
|
||||
$.info(`[DOWNLOAD CRON] ${backend_download_cron} enabled`);
|
||||
const { CronJob } = eval(`require("cron")`);
|
||||
new CronJob(
|
||||
backend_download_cron,
|
||||
async function () {
|
||||
try {
|
||||
$.info(
|
||||
`[DOWNLOAD CRON] ${backend_download_cron} started`,
|
||||
);
|
||||
await gistBackupAction('download');
|
||||
$.info(
|
||||
`[DOWNLOAD CRON] ${backend_download_cron} finished`,
|
||||
);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`[DOWNLOAD CRON] ${backend_download_cron} error: ${
|
||||
e.message ?? e
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}, // onTick
|
||||
null, // onComplete
|
||||
true, // start
|
||||
// 'Asia/Shanghai' // timeZone
|
||||
);
|
||||
}
|
||||
const backend_upload_cron = eval(
|
||||
'process.env.SUB_STORE_BACKEND_UPLOAD_CRON',
|
||||
);
|
||||
if (backend_upload_cron) {
|
||||
$.info(`[UPLOAD CRON] ${backend_upload_cron} enabled`);
|
||||
const { CronJob } = eval(`require("cron")`);
|
||||
new CronJob(
|
||||
backend_upload_cron,
|
||||
async function () {
|
||||
try {
|
||||
$.info(`[UPLOAD CRON] ${backend_upload_cron} started`);
|
||||
await gistBackupAction('upload');
|
||||
$.info(`[UPLOAD CRON] ${backend_upload_cron} finished`);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`[UPLOAD CRON] ${backend_upload_cron} error: ${
|
||||
e.message ?? e
|
||||
}`,
|
||||
`[CRON] ${backend_cron} error: ${e.message ?? e}`,
|
||||
);
|
||||
}
|
||||
}, // onTick
|
||||
|
||||
@@ -80,72 +80,10 @@ async function refresh(_, res) {
|
||||
success(res);
|
||||
}
|
||||
|
||||
async function gistBackupAction(action) {
|
||||
// read token
|
||||
const { gistToken, syncPlatform } = $.read(SETTINGS_KEY);
|
||||
if (!gistToken) throw new Error('GitHub Token is required for backup!');
|
||||
|
||||
const gist = new Gist({
|
||||
token: gistToken,
|
||||
key: GIST_BACKUP_KEY,
|
||||
syncPlatform,
|
||||
});
|
||||
let content;
|
||||
const settings = $.read(SETTINGS_KEY);
|
||||
const updated = settings.syncTime;
|
||||
switch (action) {
|
||||
case 'upload':
|
||||
// update syncTime
|
||||
settings.syncTime = new Date().getTime();
|
||||
$.write(settings, SETTINGS_KEY);
|
||||
content = $.read('#sub-store');
|
||||
if ($.env.isNode) content = JSON.stringify($.cache, null, ` `);
|
||||
$.info(`上传备份中...`);
|
||||
try {
|
||||
await gist.upload({
|
||||
[GIST_BACKUP_FILE_NAME]: { content },
|
||||
});
|
||||
$.info(`上传备份完成`);
|
||||
} catch (err) {
|
||||
// restore syncTime if upload failed
|
||||
settings.syncTime = updated;
|
||||
$.write(settings, SETTINGS_KEY);
|
||||
throw err;
|
||||
}
|
||||
break;
|
||||
case 'download':
|
||||
$.info(`还原备份中...`);
|
||||
content = await gist.download(GIST_BACKUP_FILE_NAME);
|
||||
try {
|
||||
if (Object.keys(JSON.parse(content).settings).length === 0) {
|
||||
throw new Error('备份文件应该至少包含 settings 字段');
|
||||
}
|
||||
} catch (err) {
|
||||
$.error(
|
||||
`Gist 备份文件校验失败, 无法还原\nReason: ${
|
||||
err.message ?? err
|
||||
}`,
|
||||
);
|
||||
throw new Error('Gist 备份文件校验失败, 无法还原');
|
||||
}
|
||||
// restore settings
|
||||
$.write(content, '#sub-store');
|
||||
if ($.env.isNode) {
|
||||
content = JSON.parse(content);
|
||||
$.cache = content;
|
||||
$.persistCache();
|
||||
}
|
||||
$.info(`perform migration after restoring from gist...`);
|
||||
migrate();
|
||||
$.info(`migration completed`);
|
||||
$.info(`还原备份完成`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
async function gistBackup(req, res) {
|
||||
const { action } = req.query;
|
||||
// read token
|
||||
const { gistToken } = $.read(SETTINGS_KEY);
|
||||
const { gistToken, syncPlatform } = $.read(SETTINGS_KEY);
|
||||
if (!gistToken) {
|
||||
failed(
|
||||
res,
|
||||
@@ -155,8 +93,68 @@ async function gistBackup(req, res) {
|
||||
),
|
||||
);
|
||||
} else {
|
||||
const gist = new Gist({
|
||||
token: gistToken,
|
||||
key: GIST_BACKUP_KEY,
|
||||
syncPlatform,
|
||||
});
|
||||
try {
|
||||
await gistBackupAction(action);
|
||||
let content;
|
||||
const settings = $.read(SETTINGS_KEY);
|
||||
const updated = settings.syncTime;
|
||||
switch (action) {
|
||||
case 'upload':
|
||||
// update syncTime
|
||||
settings.syncTime = new Date().getTime();
|
||||
$.write(settings, SETTINGS_KEY);
|
||||
content = $.read('#sub-store');
|
||||
if ($.env.isNode)
|
||||
content = JSON.stringify($.cache, null, ` `);
|
||||
$.info(`上传备份中...`);
|
||||
try {
|
||||
await gist.upload({
|
||||
[GIST_BACKUP_FILE_NAME]: { content },
|
||||
});
|
||||
} catch (err) {
|
||||
// restore syncTime if upload failed
|
||||
settings.syncTime = updated;
|
||||
$.write(settings, SETTINGS_KEY);
|
||||
throw err;
|
||||
}
|
||||
break;
|
||||
case 'download':
|
||||
$.info(`还原备份中...`);
|
||||
content = await gist.download(GIST_BACKUP_FILE_NAME);
|
||||
try {
|
||||
if (
|
||||
Object.keys(JSON.parse(content).settings).length ===
|
||||
0
|
||||
) {
|
||||
throw new Error(
|
||||
'备份文件应该至少包含 settings 字段',
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
$.error(
|
||||
`Gist 备份文件校验失败, 无法还原\nReason: ${
|
||||
err.message ?? err
|
||||
}`,
|
||||
);
|
||||
throw new Error('Gist 备份文件校验失败, 无法还原');
|
||||
}
|
||||
// restore settings
|
||||
$.write(content, '#sub-store');
|
||||
if ($.env.isNode) {
|
||||
content = JSON.parse(content);
|
||||
$.cache = content;
|
||||
$.persistCache();
|
||||
}
|
||||
$.info(`perform migration after restoring from gist...`);
|
||||
migrate();
|
||||
$.info(`migration completed`);
|
||||
$.info(`还原备份完成`);
|
||||
break;
|
||||
}
|
||||
success(res);
|
||||
} catch (err) {
|
||||
$.error(
|
||||
@@ -173,5 +171,3 @@ async function gistBackup(req, res) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { gistBackupAction };
|
||||
|
||||
@@ -146,8 +146,7 @@ async function compareSub(req, res) {
|
||||
// add id
|
||||
original.forEach((proxy, i) => {
|
||||
proxy.id = i;
|
||||
proxy._subName = sub.name;
|
||||
proxy._subDisplayName = sub.displayName;
|
||||
proxy.subName = sub.name;
|
||||
});
|
||||
|
||||
// apply processors
|
||||
@@ -238,10 +237,8 @@ async function compareCollection(req, res) {
|
||||
.flat();
|
||||
|
||||
currentProxies.forEach((proxy) => {
|
||||
proxy._subName = sub.name;
|
||||
proxy._subDisplayName = sub.displayName;
|
||||
proxy._collectionName = collection.name;
|
||||
proxy._collectionDisplayName = collection.displayName;
|
||||
proxy.subName = sub.name;
|
||||
proxy.collectionName = collection.name;
|
||||
});
|
||||
|
||||
// apply processors
|
||||
@@ -279,8 +276,7 @@ async function compareCollection(req, res) {
|
||||
|
||||
original.forEach((proxy, i) => {
|
||||
proxy.id = i;
|
||||
proxy._collectionName = collection.name;
|
||||
proxy._collectionDisplayName = collection.displayName;
|
||||
proxy.collectionName = collection.name;
|
||||
});
|
||||
|
||||
const processed = await ProxyUtils.process(
|
||||
|
||||
@@ -37,7 +37,6 @@ async function produceArtifact({
|
||||
produceOpts = {},
|
||||
subscription,
|
||||
awaitCustomCache,
|
||||
$options,
|
||||
}) {
|
||||
platform = platform || 'JSON';
|
||||
|
||||
@@ -151,8 +150,7 @@ async function produceArtifact({
|
||||
.flat();
|
||||
|
||||
proxies.forEach((proxy) => {
|
||||
proxy._subName = sub.name;
|
||||
proxy._subDisplayName = sub.displayName;
|
||||
proxy.subName = sub.name;
|
||||
});
|
||||
// apply processors
|
||||
proxies = await ProxyUtils.process(
|
||||
@@ -160,7 +158,6 @@ async function produceArtifact({
|
||||
sub.process || [],
|
||||
platform,
|
||||
{ [sub.name]: sub },
|
||||
$options,
|
||||
);
|
||||
if (proxies.length === 0) {
|
||||
throw new Error(`订阅 ${name} 中不含有效节点`);
|
||||
@@ -253,10 +250,8 @@ async function produceArtifact({
|
||||
.flat();
|
||||
|
||||
currentProxies.forEach((proxy) => {
|
||||
proxy._subName = sub.name;
|
||||
proxy._subDisplayName = sub.displayName;
|
||||
proxy._collectionName = collection.name;
|
||||
proxy._collectionDisplayName = collection.displayName;
|
||||
proxy.subName = sub.name;
|
||||
proxy.collectionName = collection.name;
|
||||
});
|
||||
|
||||
// apply processors
|
||||
@@ -264,11 +259,7 @@ async function produceArtifact({
|
||||
currentProxies,
|
||||
sub.process || [],
|
||||
platform,
|
||||
{
|
||||
[sub.name]: sub,
|
||||
_collection: collection,
|
||||
$options,
|
||||
},
|
||||
{ [sub.name]: sub, _collection: collection },
|
||||
);
|
||||
results[name] = currentProxies;
|
||||
processed++;
|
||||
@@ -312,8 +303,7 @@ async function produceArtifact({
|
||||
);
|
||||
|
||||
proxies.forEach((proxy) => {
|
||||
proxy._collectionName = collection.name;
|
||||
proxy._collectionDisplayName = collection.displayName;
|
||||
proxy.collectionName = collection.name;
|
||||
});
|
||||
|
||||
// apply own processors
|
||||
@@ -322,7 +312,6 @@ async function produceArtifact({
|
||||
collection.process || [],
|
||||
platform,
|
||||
{ _collection: collection },
|
||||
$options,
|
||||
);
|
||||
if (proxies.length === 0) {
|
||||
throw new Error(`组合订阅 ${name} 中不含有效节点`);
|
||||
@@ -344,7 +333,6 @@ async function produceArtifact({
|
||||
}
|
||||
exist[proxy.name] = true;
|
||||
}
|
||||
console.log(proxies);
|
||||
return ProxyUtils.produce(proxies, platform, produceType, produceOpts);
|
||||
} else if (type === 'rule') {
|
||||
const allRules = $.read(RULES_KEY);
|
||||
@@ -472,10 +460,10 @@ async function produceArtifact({
|
||||
const processed =
|
||||
Array.isArray(file.process) && file.process.length > 0
|
||||
? await ProxyUtils.process(
|
||||
{ $files: files, $content: filesContent, $options },
|
||||
{ $files: files, $content: filesContent },
|
||||
file.process,
|
||||
)
|
||||
: { $content: filesContent, $files: files, $options };
|
||||
: { $content: filesContent, $files: files };
|
||||
|
||||
return processed?.$content ?? '';
|
||||
}
|
||||
|
||||
@@ -49,11 +49,6 @@ export default async function download(
|
||||
const requestTimeout = timeout || defaultTimeout;
|
||||
const id = hex_md5(userAgent + url);
|
||||
|
||||
if ($arguments?.cacheKey === true) {
|
||||
$.error(`使用自定义缓存时 cacheKey 的值不能为空`);
|
||||
$arguments.cacheKey = undefined;
|
||||
}
|
||||
|
||||
const customCacheKey = $arguments?.cacheKey
|
||||
? `#sub-store-cached-custom-${$arguments?.cacheKey}`
|
||||
: undefined;
|
||||
|
||||
@@ -111,12 +111,6 @@ function getRandomPort(portString) {
|
||||
}
|
||||
}
|
||||
|
||||
function numberToString(value) {
|
||||
return Number.isSafeInteger(value)
|
||||
? String(value)
|
||||
: BigInt(value).toString();
|
||||
}
|
||||
|
||||
export {
|
||||
ipAddress,
|
||||
isIPv4,
|
||||
@@ -129,5 +123,4 @@ export {
|
||||
// utf8ArrayToStr,
|
||||
getPolicyDescriptor,
|
||||
getRandomPort,
|
||||
numberToString,
|
||||
};
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
name: Sub-Store
|
||||
description: "支持 Surge 正式版的参数设置功能. 测落地功能 ability: http-client-policy, 同步配置的定时 cronexp: 55 23 * * *"
|
||||
description: '支持 Surge 正式版的参数设置功能. 测落地功能 ability: http-client-policy, 同步配置的定时 cronexp: 55 23 * * *'
|
||||
compat_arguments:
|
||||
ability: http-client-policy
|
||||
cronexp: 55 23 * * *
|
||||
sync: '"Sub-Store Sync"'
|
||||
timeout: "120"
|
||||
timeout: '120'
|
||||
engine: auto
|
||||
produce: '"# Sub-Store Produce"'
|
||||
produce_cronexp: 50 */6 * * *
|
||||
@@ -12,26 +12,26 @@ compat_arguments:
|
||||
produce_col: '"col1,col2"'
|
||||
compat_arguments_desc: '\n1️⃣ ability\n\n默认已开启测落地能力\n需要配合脚本操作\n如 https://raw.githubusercontent.com/Keywos/rule/main/cname.js\n填写任意其他值关闭\n\n2️⃣ cronexp\n\n同步配置定时任务\n默认为每天 23 点 55 分\n\n定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 ''同步'' 或 ''同步配置''\n\n3️⃣ sync\n\n自定义定时任务名\n便于在脚本编辑器中选择\n若设为 # 可取消定时任务\n\n4️⃣ timeout\n\n脚本超时, 单位为秒\n\n5️⃣ engine\n\n默认为自动使用 webview 引擎, 可设为指定 jsc, 但 jsc 容易爆内存\n\n6️⃣ produce\n\n自定义处理订阅的定时任务名\n一般用于定时处理耗时较长的订阅, 以更新缓存\n这样 Surge 中拉取的时候就能用到缓存, 不至于总是超时\n若设为 # 可取消此定时任务\n默认不开启\n\n7️⃣ produce_cronexp\n\n配置处理订阅的定时任务\n\n默认为每 6 小时\n\n9️⃣ produce_sub\n\n自定义需定时处理的单条订阅名\n多个用 , 连接\n\n🔟 produce_col\n\n自定义需定时处理的组合订阅名\n多个用 , 连接\n\n⚠️ 注意: 是 名称(name) 不是 显示名称(displayName)\n如果名称需要编码, 请编码后再用 , 连接\n顺序: 并发执行单条订阅, 然后并发执行组合订阅'
|
||||
scriptings:
|
||||
- http_request:
|
||||
name: Sub-Store Core
|
||||
match: ^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info)))
|
||||
script_url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js
|
||||
body_required: true
|
||||
- http_request:
|
||||
name: Sub-Store Simple
|
||||
match: ^https?:\/\/sub\.store
|
||||
script_url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js
|
||||
body_required: true
|
||||
- schedule:
|
||||
name: "{{{sync}}}"
|
||||
cron: "{{{cronexp}}}"
|
||||
script_url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js
|
||||
- schedule:
|
||||
name: "{{{produce}}}"
|
||||
cron: "{{{produce_cronexp}}}"
|
||||
script_url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js
|
||||
arguments:
|
||||
_compat.$argument: '"sub={{{produce_sub}}}&col={{{produce_col}}}"'
|
||||
- http_request:
|
||||
name: Sub-Store Core
|
||||
match: ^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info)))
|
||||
script_url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js
|
||||
body_required: true
|
||||
- http_request:
|
||||
name: Sub-Store Simple
|
||||
match: ^https?:\/\/sub\.store
|
||||
script_url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js
|
||||
body_required: true
|
||||
- schedule:
|
||||
name: '{{{sync}}}'
|
||||
cron: '{{{cronexp}}}'
|
||||
script_url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js
|
||||
- schedule:
|
||||
name: '{{{produce}}}'
|
||||
cron: '{{{produce_cronexp}}}'
|
||||
script_url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js
|
||||
arguments:
|
||||
_compat.$argument: '"sub={{{produce_sub}}}&col={{{produce_col}}}"'
|
||||
mitm:
|
||||
hostnames:
|
||||
- sub.store
|
||||
- sub.store
|
||||
|
||||
@@ -14,7 +14,7 @@ DOMAIN,sub-store.vercel.app,PROXY
|
||||
hostname=sub.store
|
||||
|
||||
[Script]
|
||||
http-request ^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))) script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js, requires-body=true, timeout=120, tag=Sub-Store Core
|
||||
http-request ^https?:\/\/sub\.store script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js, requires-body=true, timeout=120, tag=Sub-Store Simple
|
||||
http-request ^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))) script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js, requires-body=true, timeout=120, tag=Sub-Store Core
|
||||
http-request ^https?:\/\/sub\.store script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js, requires-body=true, timeout=120, tag=Sub-Store Simple
|
||||
|
||||
cron "55 23 * * *" script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js, timeout=120, tag=Sub-Store Sync
|
||||
cron "55 23 * * *" script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js, timeout=120, tag=Sub-Store Sync
|
||||
@@ -2,6 +2,6 @@
|
||||
"name": "Sub-Store",
|
||||
"description": "定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'",
|
||||
"task": [
|
||||
"55 23 * * * https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js, tag=Sub-Store Sync, img-url=https://raw.githubusercontent.com/58xinian/icon/master/Sub-Store1.png"
|
||||
"55 23 * * * https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js, tag=Sub-Store Sync, img-url=https://raw.githubusercontent.com/58xinian/icon/master/Sub-Store1.png"
|
||||
]
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
hostname=sub.store
|
||||
|
||||
^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))) url script-analyze-echo-response https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js
|
||||
^https?:\/\/sub\.store url script-analyze-echo-response https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js
|
||||
^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))) url script-analyze-echo-response https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js
|
||||
^https?:\/\/sub\.store url script-analyze-echo-response https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js
|
||||
@@ -25,13 +25,13 @@ cron:
|
||||
|
||||
script-providers:
|
||||
sub-store-0:
|
||||
url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js
|
||||
url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js
|
||||
interval: 86400
|
||||
|
||||
sub-store-1:
|
||||
url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js
|
||||
url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js
|
||||
interval: 86400
|
||||
|
||||
cron-sync-artifacts:
|
||||
url: https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js
|
||||
url: https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js
|
||||
interval: 86400
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
hostname = %APPEND% sub.store
|
||||
|
||||
[Script]
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js,requires-body=true,timeout={{{timeout}}},ability="{{{ability}}}",engine={{{engine}}}
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js,requires-body=true,timeout={{{timeout}}},ability="{{{ability}}}",engine={{{engine}}}
|
||||
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
@@ -7,7 +7,7 @@ hostname = %APPEND% sub.store
|
||||
|
||||
[Script]
|
||||
# 主程序 已经去掉 Sub-Store Core 的参数 [,ability=http-client-policy] 不会爆内存,这个参数在 Surge 非常占用内存; 如果不需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 则可以使用此脚本
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js,requires-body=true,timeout=120
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js,requires-body=true,timeout=120
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js,requires-body=true,timeout=120
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout=120
|
||||
|
||||
Sub-Store Sync=type=cron,cronexp=55 23 * * *,wake-system=1,timeout=120,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js
|
||||
Sub-Store Sync=type=cron,cronexp=55 23 * * *,wake-system=1,timeout=120,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
hostname = %APPEND% sub.store
|
||||
|
||||
[Script]
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js,requires-body=true,timeout=120,ability=http-client-policy
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js,requires-body=true,timeout=120
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js,requires-body=true,timeout=120,ability=http-client-policy
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout=120
|
||||
|
||||
Sub-Store Sync=type=cron,cronexp=55 23 * * *,wake-system=1,timeout=120,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js
|
||||
Sub-Store Sync=type=cron,cronexp=55 23 * * *,wake-system=1,timeout=120,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
hostname = %APPEND% sub.store
|
||||
|
||||
[Script]
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-1.min.js,requires-body=true,timeout={{{timeout}}},ability="{{{ability}}}",engine={{{engine}}}
|
||||
Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api\/(preview|sync|(utils\/node-info))),script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-1.min.js,requires-body=true,timeout={{{timeout}}},ability="{{{ability}}}",engine={{{engine}}}
|
||||
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://raw.githubusercontent.com/sub-store-org/Sub-Store/release/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
@@ -10,8 +10,8 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// 2. 域名解析后 会多一个 `_resolved` 字段, 表示是否解析成功
|
||||
// 3. 域名解析后会有`_IPv4`, `_IPv6`, `_IP`(若有多个步骤, 只取第一次成功的 v4 或 v6 数据), `_IP4P`(若解析类型为 IPv6 且符合 IP4P 类型, 将自动转换), `_domain` 字段, `_resolved_ips` 为解析出的所有 IP
|
||||
// 4. 节点字段 `exec` 为 `ssr-local` 路径, 默认 `/usr/local/bin/ssr-local`; 端口从 10000 开始递增(暂不支持配置)
|
||||
// 5. `_subName` 为单条订阅名, `_subDisplayName` 为单条订阅显示名
|
||||
// 6. `_collectionName` 为组合订阅名, `_collectionDisplayName` 为组合订阅显示名
|
||||
// 5. `_subName` 为单条订阅名
|
||||
// 6. `_collectionName` 为组合订阅名
|
||||
// 7. `tls-fingerprint` 为 tls 指纹
|
||||
// 8. `underlying-proxy` 为前置代理
|
||||
// 9. `trojan`, `tuic`, `hysteria`, `hysteria2`, `juicity` 会在解析时设置 `tls`: true (会使用 tls 类协议的通用逻辑), 输出时删除
|
||||
@@ -23,17 +23,6 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
|
||||
// $arguments 为传入的脚本参数
|
||||
|
||||
// $options 为通过链接传入的参数
|
||||
// 例如: { arg1: 'a', arg2: 'b' }
|
||||
// 可这样传:
|
||||
// 先这样处理 encodeURIComponent(JSON.stringify({ arg1: 'a', arg2: 'b' }))
|
||||
// /api/file/foo?$options=%7B%22arg1%22%3A%22a%22%2C%22arg2%22%3A%22b%22%7D
|
||||
// 或这样传:
|
||||
// 先这样处理 encodeURIComponent('arg1=a&arg2=b')
|
||||
// /api/file/foo?$options=arg1%3Da%26arg2%3Db
|
||||
|
||||
// console.log($options)
|
||||
|
||||
// targetPlatform 为输出的目标平台
|
||||
|
||||
// lodash
|
||||
@@ -144,7 +133,7 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// yaml.proxies.unshift(...clashMetaProxies)
|
||||
// $content = ProxyUtils.yaml.dump(yaml)
|
||||
|
||||
// { $content, $files, $options } will be passed to the next operator
|
||||
// { $content, $files } will be passed to the next operator
|
||||
// $content is the final content of the file
|
||||
|
||||
// flowUtils 为机场订阅流量信息处理工具
|
||||
@@ -152,7 +141,7 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// 1. https://t.me/zhetengsha/948
|
||||
|
||||
// context 为传入的上下文
|
||||
// 其中 source 为 订阅和组合订阅的数据, 有三种情况, 按需判断 (若只需要取订阅/组合订阅名称 直接用 `_subName` `_subDisplayName` `_collectionName` `_collectionDisplayName` 即可)
|
||||
// 有三种情况, 按需判断
|
||||
|
||||
// 若存在 `source._collection` 且 `source._collection.subscriptions` 中的 key 在 `source` 上也存在, 说明输出结果为组合订阅, 但是脚本设置在单条订阅上
|
||||
|
||||
|
||||
Reference in New Issue
Block a user