mirror of
https://github.com/sub-store-org/Sub-Store.git
synced 2025-08-10 00:52:40 +00:00
Compare commits
37 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b6f848a6e6 | ||
|
|
99d058bcf1 | ||
|
|
533103e765 | ||
|
|
cf82764171 | ||
|
|
7b783c1fe3 | ||
|
|
372eff9a44 | ||
|
|
d3b5a529d7 | ||
|
|
8049134bb5 | ||
|
|
3f620700a4 | ||
|
|
9e64a68481 | ||
|
|
9ce5916414 | ||
|
|
047c21fe70 | ||
|
|
47849dc6d0 | ||
|
|
af06086c1b | ||
|
|
4a6a147667 | ||
|
|
c6540d14cd | ||
|
|
3db71ec531 | ||
|
|
cf156c2f17 | ||
|
|
e28e2a78fb | ||
|
|
b0a2c709e8 | ||
|
|
5dc2c8ced7 | ||
|
|
d2a65ee0fe | ||
|
|
4dd4ae98ca | ||
|
|
0d41eb467f | ||
|
|
ba1c91a7a5 | ||
|
|
30fa87c172 | ||
|
|
1eaa33948b | ||
|
|
619e256ed8 | ||
|
|
b46209e164 | ||
|
|
a1ba4e273e | ||
|
|
bfc95ed92a | ||
|
|
32f591ec56 | ||
|
|
cea16d8c44 | ||
|
|
93a1ba7b50 | ||
|
|
e6d1aa1150 | ||
|
|
26e83798da | ||
|
|
cf35afcab2 |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sub-store",
|
||||
"version": "2.14.309",
|
||||
"version": "2.14.331",
|
||||
"description": "Advanced Subscription Manager for QX, Loon, Surge, Stash and ShadowRocket.",
|
||||
"main": "src/main.js",
|
||||
"scripts": {
|
||||
|
||||
@@ -15,7 +15,7 @@ import $ from '@/core/app';
|
||||
import { FILES_KEY, MODULES_KEY } from '@/constants';
|
||||
import { findByName } from '@/utils/database';
|
||||
import { produceArtifact } from '@/restful/sync';
|
||||
import { getFlag, getISO, MMDB } from '@/utils/geo';
|
||||
import { getFlag, removeFlag, getISO, MMDB } from '@/utils/geo';
|
||||
import Gist from '@/utils/gist';
|
||||
|
||||
function preprocess(raw) {
|
||||
@@ -199,6 +199,10 @@ function produce(proxies, targetPlatform, type, opts = {}) {
|
||||
);
|
||||
|
||||
proxies = proxies.map((proxy) => {
|
||||
proxy._subName = proxy.subName;
|
||||
proxy._collectionName = proxy.collectionName;
|
||||
proxy._resolved = proxy.resolved;
|
||||
|
||||
if (!isNotBlank(proxy.name)) {
|
||||
proxy.name = `${proxy.type} ${proxy.server}:${proxy.port}`;
|
||||
}
|
||||
@@ -272,6 +276,7 @@ export const ProxyUtils = {
|
||||
isIP,
|
||||
yaml: YAML,
|
||||
getFlag,
|
||||
removeFlag,
|
||||
getISO,
|
||||
MMDB,
|
||||
Gist,
|
||||
|
||||
@@ -340,6 +340,8 @@ function URI_VMess() {
|
||||
transportPath = Array.isArray(transportPath)
|
||||
? transportPath[0]
|
||||
: transportPath;
|
||||
} else {
|
||||
transportPath = '/';
|
||||
}
|
||||
}
|
||||
if (transportPath || transportHost) {
|
||||
@@ -851,6 +853,9 @@ function Clash_All() {
|
||||
}
|
||||
}
|
||||
|
||||
if (proxy['server-cert-fingerprint']) {
|
||||
proxy['tls-fingerprint'] = proxy['server-cert-fingerprint'];
|
||||
}
|
||||
if (proxy.fingerprint) {
|
||||
proxy['tls-fingerprint'] = proxy.fingerprint;
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ function Base64Encoded() {
|
||||
'aHR0c', // htt
|
||||
'dmxlc3M=', // vless
|
||||
'aHlzdGVyaWEy', // hysteria2
|
||||
'aHkyOi8v', // hy2://
|
||||
'd2lyZWd1YXJkOi8v', // wireguard://
|
||||
'd2c6Ly8=', // wg://
|
||||
'dHVpYzovLw==', // tuic://
|
||||
|
||||
@@ -2,7 +2,7 @@ import resourceCache from '@/utils/resource-cache';
|
||||
import scriptResourceCache from '@/utils/script-resource-cache';
|
||||
import { isIPv4, isIPv6 } from '@/utils';
|
||||
import { FULL } from '@/utils/logical';
|
||||
import { getFlag } from '@/utils/geo';
|
||||
import { getFlag, removeFlag } from '@/utils/geo';
|
||||
import lodash from 'lodash';
|
||||
import $ from '@/core/app';
|
||||
import { hex_md5 } from '@/vendor/md5';
|
||||
@@ -462,7 +462,7 @@ const DOMAIN_RESOLVERS = {
|
||||
const cached = resourceCache.get(id);
|
||||
if (!noCache && cached) return cached;
|
||||
const resp = await $.http.get({
|
||||
url: `http://223.6.6.6/resolve?name=${encodeURIComponent(
|
||||
url: `http://223.6.6.6/resolve?edns_client_subnet=223.6.6.6/24&name=${encodeURIComponent(
|
||||
domain,
|
||||
)}&type=${type === 'IPv6' ? 'AAAA' : 'A'}&short=1`,
|
||||
headers: {
|
||||
@@ -482,7 +482,7 @@ const DOMAIN_RESOLVERS = {
|
||||
const cached = resourceCache.get(id);
|
||||
if (!noCache && cached) return cached;
|
||||
const resp = await $.http.get({
|
||||
url: `http://119.28.28.28/d?type=${
|
||||
url: `http://119.28.28.28/d?ip=119.28.28.28&type=${
|
||||
type === 'IPv6' ? 'AAAA' : 'A'
|
||||
}&dn=${encodeURIComponent(domain)}`,
|
||||
headers: {
|
||||
@@ -512,12 +512,17 @@ function ResolveDomainOperator({ provider, type: _type, filter, cache }) {
|
||||
return {
|
||||
name: 'Resolve Domain Operator',
|
||||
func: async (proxies) => {
|
||||
proxies.forEach((p, i) => {
|
||||
if (!p['_no-resolve'] && p['no-resolve']) {
|
||||
proxies[i]['_no-resolve'] = p['no-resolve'];
|
||||
}
|
||||
});
|
||||
const results = {};
|
||||
const limit = 15; // more than 20 concurrency may result in surge TCP connection shortage.
|
||||
const totalDomain = [
|
||||
...new Set(
|
||||
proxies
|
||||
.filter((p) => !isIP(p.server) && !p['no-resolve'])
|
||||
.filter((p) => !isIP(p.server) && !p['_no-resolve'])
|
||||
.map((c) => c.server),
|
||||
),
|
||||
];
|
||||
@@ -543,7 +548,7 @@ function ResolveDomainOperator({ provider, type: _type, filter, cache }) {
|
||||
await Promise.all(currentBatch);
|
||||
}
|
||||
proxies.forEach((p) => {
|
||||
if (!p['no-resolve']) {
|
||||
if (!p['_no-resolve']) {
|
||||
if (results[p.server]) {
|
||||
if (_type === 'IP4P') {
|
||||
const { server, port } = parseIP4P(
|
||||
@@ -578,7 +583,7 @@ function ResolveDomainOperator({ provider, type: _type, filter, cache }) {
|
||||
|
||||
return proxies.filter((p) => {
|
||||
if (filter === 'removeFailed') {
|
||||
return isIP(p.server) || p['no-resolve'] || p.resolved;
|
||||
return isIP(p.server) || p['_no-resolve'] || p.resolved;
|
||||
} else if (filter === 'IPOnly') {
|
||||
return isIP(p.server);
|
||||
} else if (filter === 'IPv4Only') {
|
||||
@@ -864,13 +869,6 @@ function clone(object) {
|
||||
return JSON.parse(JSON.stringify(object));
|
||||
}
|
||||
|
||||
// remove flag
|
||||
function removeFlag(str) {
|
||||
return str
|
||||
.replace(/[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]|🏴☠️|🏳️🌈/g, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
function createDynamicFunction(name, script, $arguments) {
|
||||
const flowUtils = {
|
||||
getFlowField,
|
||||
|
||||
@@ -158,6 +158,7 @@ export default function Clash_Producer() {
|
||||
delete proxy.collectionName;
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
|
||||
@@ -173,6 +173,7 @@ export default function ClashMeta_Producer() {
|
||||
delete proxy.collectionName;
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
|
||||
@@ -176,6 +176,7 @@ export default function ShadowRocket_Producer() {
|
||||
delete proxy.collectionName;
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
|
||||
@@ -239,7 +239,7 @@ export default function Stash_Producer() {
|
||||
delete proxy.tls;
|
||||
}
|
||||
if (proxy['tls-fingerprint']) {
|
||||
proxy.fingerprint = proxy['tls-fingerprint'];
|
||||
proxy['server-cert-fingerprint'] = proxy['tls-fingerprint'];
|
||||
}
|
||||
delete proxy['tls-fingerprint'];
|
||||
|
||||
@@ -265,6 +265,7 @@ export default function Stash_Producer() {
|
||||
delete proxy.collectionName;
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
|
||||
@@ -6,7 +6,7 @@ const targetPlatform = 'Surfboard';
|
||||
|
||||
export default function Surfboard_Producer() {
|
||||
const produce = (proxy) => {
|
||||
proxy.name = proxy.name.replace(/=/g, '');
|
||||
proxy.name = proxy.name.replace(/=|,/g, '');
|
||||
switch (proxy.type) {
|
||||
case 'ss':
|
||||
return shadowsocks(proxy);
|
||||
|
||||
@@ -14,13 +14,14 @@ const ipVersions = {
|
||||
|
||||
export default function Surge_Producer() {
|
||||
const produce = (proxy, type, opts = {}) => {
|
||||
proxy.name = proxy.name.replace(/=|,/g, '');
|
||||
switch (proxy.type) {
|
||||
case 'ss':
|
||||
return shadowsocks(proxy);
|
||||
case 'trojan':
|
||||
return trojan(proxy);
|
||||
case 'vmess':
|
||||
return vmess(proxy);
|
||||
return vmess(proxy, opts['include-unsupported-proxy']);
|
||||
case 'http':
|
||||
return http(proxy);
|
||||
case 'socks5':
|
||||
@@ -263,7 +264,7 @@ function trojan(proxy) {
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
function vmess(proxy) {
|
||||
function vmess(proxy, includeUnsupportedProxy) {
|
||||
const result = new Result(proxy);
|
||||
result.append(`${proxy.name}=${proxy.type},${proxy.server},${proxy.port}`);
|
||||
result.appendIfPresent(`,username=${proxy.uuid}`, 'uuid');
|
||||
@@ -277,7 +278,7 @@ function vmess(proxy) {
|
||||
);
|
||||
|
||||
// transport
|
||||
handleTransport(result, proxy);
|
||||
handleTransport(result, proxy, includeUnsupportedProxy);
|
||||
|
||||
// AEAD
|
||||
if (isPresent(proxy, 'aead')) {
|
||||
@@ -1012,7 +1013,7 @@ function hysteria2(proxy) {
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
function handleTransport(result, proxy) {
|
||||
function handleTransport(result, proxy, includeUnsupportedProxy) {
|
||||
if (isPresent(proxy, 'network')) {
|
||||
if (proxy.network === 'ws') {
|
||||
result.append(`,ws=true`);
|
||||
@@ -1038,7 +1039,13 @@ function handleTransport(result, proxy) {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`network ${proxy.network} is unsupported`);
|
||||
if (includeUnsupportedProxy && ['http'].includes(proxy.network)) {
|
||||
$.info(
|
||||
`Include Unsupported Proxy: nework ${proxy.network} -> tcp`,
|
||||
);
|
||||
} else {
|
||||
throw new Error(`network ${proxy.network} is unsupported`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export default function URI_Producer() {
|
||||
delete proxy.collectionName;
|
||||
delete proxy.id;
|
||||
delete proxy.resolved;
|
||||
delete proxy['no-resolve'];
|
||||
for (const key in proxy) {
|
||||
if (proxy[key] == null || /^_/i.test(key)) {
|
||||
delete proxy[key];
|
||||
|
||||
@@ -11,17 +11,65 @@ import { syncToGist } from '@/restful/artifacts';
|
||||
import { findByName } from '@/utils/database';
|
||||
|
||||
!(async function () {
|
||||
const settings = $.read(SETTINGS_KEY);
|
||||
// if GitHub token is not configured
|
||||
if (!settings.githubUser || !settings.gistToken) return;
|
||||
let arg;
|
||||
if (typeof $argument != 'undefined') {
|
||||
arg = Object.fromEntries(
|
||||
// eslint-disable-next-line no-undef
|
||||
$argument.split('&').map((item) => item.split('=')),
|
||||
);
|
||||
} else {
|
||||
arg = {};
|
||||
}
|
||||
let sub_names = (arg?.subscription ?? arg?.sub ?? '')
|
||||
.split(/,|,/g)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length > 0)
|
||||
.map((i) => decodeURIComponent(i));
|
||||
let col_names = (arg?.collection ?? arg?.col ?? '')
|
||||
.split(/,|,/g)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length > 0)
|
||||
.map((i) => decodeURIComponent(i));
|
||||
if (sub_names.length > 0 || col_names.length > 0) {
|
||||
if (sub_names.length > 0)
|
||||
await produceArtifacts(sub_names, 'subscription');
|
||||
if (col_names.length > 0)
|
||||
await produceArtifacts(col_names, 'collection');
|
||||
} else {
|
||||
const settings = $.read(SETTINGS_KEY);
|
||||
// if GitHub token is not configured
|
||||
if (!settings.githubUser || !settings.gistToken) return;
|
||||
|
||||
const artifacts = $.read(ARTIFACTS_KEY);
|
||||
if (!artifacts || artifacts.length === 0) return;
|
||||
const artifacts = $.read(ARTIFACTS_KEY);
|
||||
if (!artifacts || artifacts.length === 0) return;
|
||||
|
||||
const shouldSync = artifacts.some((artifact) => artifact.sync);
|
||||
if (shouldSync) await doSync();
|
||||
const shouldSync = artifacts.some((artifact) => artifact.sync);
|
||||
if (shouldSync) await doSync();
|
||||
}
|
||||
})().finally(() => $.done());
|
||||
|
||||
async function produceArtifacts(names, type) {
|
||||
try {
|
||||
if (names.length > 0) {
|
||||
$.info(`produceArtifacts ${type} 开始: ${names.join(', ')}`);
|
||||
await Promise.all(
|
||||
names.map(async (name) => {
|
||||
try {
|
||||
await produceArtifact({
|
||||
type,
|
||||
name,
|
||||
});
|
||||
} catch (e) {
|
||||
$.error(`${type} ${name} error: ${e.message ?? e}`);
|
||||
}
|
||||
}),
|
||||
);
|
||||
$.info(`produceArtifacts ${type} 完成: ${names.join(', ')}`);
|
||||
}
|
||||
} catch (e) {
|
||||
$.error(`produceArtifacts error: ${e.message ?? e}`);
|
||||
}
|
||||
}
|
||||
async function doSync() {
|
||||
console.log(
|
||||
`
|
||||
@@ -69,6 +117,7 @@ async function doSync() {
|
||||
await produceArtifact({
|
||||
type: 'subscription',
|
||||
name: subName,
|
||||
awaitCustomCache: true,
|
||||
});
|
||||
} catch (e) {
|
||||
// $.error(`${e.message ?? e}`);
|
||||
|
||||
@@ -55,7 +55,11 @@ async function downloadSubscription(req, res) {
|
||||
const platform =
|
||||
req.query.target || getPlatformFromHeaders(req.headers) || 'JSON';
|
||||
|
||||
$.info(`正在下载订阅:${name}`);
|
||||
$.info(
|
||||
`正在下载订阅:${name}\n请求 User-Agent: ${
|
||||
req.headers['user-agent'] || req.headers['User-Agent']
|
||||
}`,
|
||||
);
|
||||
let {
|
||||
url,
|
||||
ua,
|
||||
@@ -152,6 +156,7 @@ async function downloadSubscription(req, res) {
|
||||
$arguments.flowUserAgent,
|
||||
undefined,
|
||||
sub.proxy,
|
||||
$arguments.flowUrl,
|
||||
);
|
||||
if (flowInfo) {
|
||||
res.set('subscription-userinfo', flowInfo);
|
||||
@@ -228,7 +233,11 @@ async function downloadCollection(req, res) {
|
||||
const allCols = $.read(COLLECTIONS_KEY);
|
||||
const collection = findByName(allCols, name);
|
||||
|
||||
$.info(`正在下载组合订阅:${name}`);
|
||||
$.info(
|
||||
`正在下载组合订阅:${name}\n请求 User-Agent: ${
|
||||
req.headers['user-agent'] || req.headers['User-Agent']
|
||||
}`,
|
||||
);
|
||||
|
||||
let {
|
||||
ignoreFailedRemoteSub,
|
||||
@@ -306,6 +315,7 @@ async function downloadCollection(req, res) {
|
||||
$arguments.flowUserAgent,
|
||||
undefined,
|
||||
sub.proxy,
|
||||
$arguments.flowUrl,
|
||||
);
|
||||
if (flowInfo) {
|
||||
res.set('subscription-userinfo', flowInfo);
|
||||
|
||||
@@ -118,6 +118,7 @@ async function getFlowInfo(req, res) {
|
||||
$arguments.flowUserAgent,
|
||||
undefined,
|
||||
sub.proxy,
|
||||
$arguments.flowUrl,
|
||||
);
|
||||
if (!flowHeaders) {
|
||||
failed(
|
||||
|
||||
@@ -36,6 +36,7 @@ async function produceArtifact({
|
||||
produceType,
|
||||
produceOpts = {},
|
||||
subscription,
|
||||
awaitCustomCache,
|
||||
}) {
|
||||
platform = platform || 'JSON';
|
||||
|
||||
@@ -67,6 +68,8 @@ async function produceArtifact({
|
||||
ua || sub.ua,
|
||||
undefined,
|
||||
sub.proxy,
|
||||
undefined,
|
||||
awaitCustomCache,
|
||||
);
|
||||
} catch (err) {
|
||||
errors[url] = err;
|
||||
@@ -112,6 +115,8 @@ async function produceArtifact({
|
||||
ua || sub.ua,
|
||||
undefined,
|
||||
sub.proxy,
|
||||
undefined,
|
||||
awaitCustomCache,
|
||||
);
|
||||
} catch (err) {
|
||||
errors[url] = err;
|
||||
@@ -503,6 +508,7 @@ async function syncArtifacts() {
|
||||
await produceArtifact({
|
||||
type: 'subscription',
|
||||
name: subName,
|
||||
awaitCustomCache: true,
|
||||
});
|
||||
} catch (e) {
|
||||
// $.error(`${e.message ?? e}`);
|
||||
|
||||
@@ -14,7 +14,14 @@ import $ from '@/core/app';
|
||||
|
||||
const tasks = new Map();
|
||||
|
||||
export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
export default async function download(
|
||||
rawUrl,
|
||||
ua,
|
||||
timeout,
|
||||
proxy,
|
||||
skipCustomCache,
|
||||
awaitCustomCache,
|
||||
) {
|
||||
let $arguments = {};
|
||||
let url = rawUrl.replace(/#noFlow$/, '');
|
||||
const rawArgs = url.split('#');
|
||||
@@ -35,10 +42,68 @@ export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
}
|
||||
}
|
||||
}
|
||||
const { isNode, isStash, isLoon, isShadowRocket, isQX } = ENV();
|
||||
const { defaultUserAgent, defaultTimeout, cacheThreshold } =
|
||||
$.read(SETTINGS_KEY);
|
||||
const userAgent = ua || defaultUserAgent || 'clash.meta';
|
||||
const requestTimeout = timeout || defaultTimeout;
|
||||
const id = hex_md5(userAgent + url);
|
||||
|
||||
const customCacheKey = $arguments?.cacheKey
|
||||
? `#sub-store-cached-custom-${$arguments?.cacheKey}`
|
||||
: undefined;
|
||||
|
||||
if (customCacheKey && !skipCustomCache) {
|
||||
const customCached = $.read(customCacheKey);
|
||||
const cached = resourceCache.get(id);
|
||||
if (!$arguments?.noCache && cached) {
|
||||
$.info(
|
||||
`乐观缓存: URL ${url}\n存在有效的常规缓存\n使用常规缓存以避免重复请求`,
|
||||
);
|
||||
return cached;
|
||||
}
|
||||
if (customCached) {
|
||||
if (awaitCustomCache) {
|
||||
$.info(`乐观缓存: URL ${url}\n本次进行请求 尝试更新缓存`);
|
||||
try {
|
||||
await download(
|
||||
rawUrl.replace(/(\?|&)cacheKey=.*?(&|$)/, ''),
|
||||
ua,
|
||||
timeout,
|
||||
proxy,
|
||||
true,
|
||||
);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`乐观缓存: URL ${url} 更新缓存发生错误 ${
|
||||
e.message ?? e
|
||||
}`,
|
||||
);
|
||||
$.info('使用乐观缓存的数据刷新缓存, 防止后续请求');
|
||||
resourceCache.set(id, customCached);
|
||||
}
|
||||
} else {
|
||||
$.info(
|
||||
`乐观缓存: URL ${url}\n本次返回自定义缓存 ${$arguments?.cacheKey}\n并进行请求 尝试异步更新缓存`,
|
||||
);
|
||||
download(
|
||||
rawUrl.replace(/(\?|&)cacheKey=.*?(&|$)/, ''),
|
||||
ua,
|
||||
timeout,
|
||||
proxy,
|
||||
true,
|
||||
).catch((e) => {
|
||||
$.error(
|
||||
`乐观缓存: URL ${url} 异步更新缓存发生错误 ${
|
||||
e.message ?? e
|
||||
}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
return customCached;
|
||||
}
|
||||
}
|
||||
|
||||
// const downloadUrlMatch = url.match(/^\/api\/(file|module)\/(.+)/);
|
||||
// if (downloadUrlMatch) {
|
||||
// let type = downloadUrlMatch?.[1];
|
||||
@@ -56,12 +121,6 @@ export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
// return item.content;
|
||||
// }
|
||||
|
||||
const { isNode, isStash, isLoon, isShadowRocket, isQX } = ENV();
|
||||
const { defaultUserAgent, defaultTimeout, cacheThreshold } =
|
||||
$.read(SETTINGS_KEY);
|
||||
const userAgent = ua || defaultUserAgent || 'clash.meta';
|
||||
const requestTimeout = timeout || defaultTimeout;
|
||||
const id = hex_md5(userAgent + url);
|
||||
if (!isNode && tasks.has(id)) {
|
||||
return tasks.get(id);
|
||||
}
|
||||
@@ -84,6 +143,10 @@ export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
if (!$arguments?.noCache && cached) {
|
||||
$.info(`使用缓存: ${url}`);
|
||||
result = cached;
|
||||
if (customCacheKey) {
|
||||
$.info(`URL ${url}\n写入自定义缓存 ${$arguments?.cacheKey}`);
|
||||
$.write(cached, customCacheKey);
|
||||
}
|
||||
} else {
|
||||
$.info(
|
||||
`Downloading...\nUser-Agent: ${userAgent}\nTimeout: ${requestTimeout}\nProxy: ${proxy}\nURL: ${url}`,
|
||||
@@ -120,6 +183,9 @@ export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
if (shouldCache) {
|
||||
resourceCache.set(id, body);
|
||||
if (customCacheKey) {
|
||||
$.info(
|
||||
`URL ${url}\n写入自定义缓存 ${$arguments?.cacheKey}`,
|
||||
);
|
||||
$.write(body, customCacheKey);
|
||||
}
|
||||
}
|
||||
@@ -151,6 +217,7 @@ export default async function download(rawUrl, ua, timeout, proxy) {
|
||||
$arguments.flowUserAgent,
|
||||
undefined,
|
||||
proxy,
|
||||
$arguments.flowUrl,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -10,6 +10,7 @@ const {
|
||||
isShadowRocket,
|
||||
isLanceX,
|
||||
isEgern,
|
||||
isGUIforCores,
|
||||
} = ENV();
|
||||
let backend = 'Node';
|
||||
if (isNode) backend = 'Node';
|
||||
@@ -20,6 +21,7 @@ if (isStash) backend = 'Stash';
|
||||
if (isShadowRocket) backend = 'ShadowRocket';
|
||||
if (isEgern) backend = 'Egern';
|
||||
if (isLanceX) backend = 'LanceX';
|
||||
if (isGUIforCores) backend = 'GUI.for.Cores';
|
||||
|
||||
let meta = {};
|
||||
|
||||
@@ -36,6 +38,10 @@ try {
|
||||
// eslint-disable-next-line no-undef
|
||||
meta.script = $script;
|
||||
}
|
||||
if (typeof $Plugin !== 'undefined') {
|
||||
// eslint-disable-next-line no-undef
|
||||
meta.plugin = $Plugin;
|
||||
}
|
||||
if (isNode) {
|
||||
meta.node = {
|
||||
version: eval('process.version'),
|
||||
|
||||
@@ -10,8 +10,8 @@ export function getFlowField(headers) {
|
||||
)[0];
|
||||
return headers[subkey];
|
||||
}
|
||||
export async function getFlowHeaders(rawUrl, ua, timeout, proxy) {
|
||||
let url = rawUrl;
|
||||
export async function getFlowHeaders(rawUrl, ua, timeout, proxy, flowUrl) {
|
||||
let url = flowUrl || rawUrl;
|
||||
let $arguments = {};
|
||||
const rawArgs = url.split('#');
|
||||
url = url.split('#')[0];
|
||||
@@ -48,60 +48,76 @@ export async function getFlowHeaders(rawUrl, ua, timeout, proxy) {
|
||||
'Quantumult%20X/1.0.30 (iPhone14,2; iOS 15.6)';
|
||||
const requestTimeout = timeout || defaultTimeout;
|
||||
const http = HTTP();
|
||||
try {
|
||||
if (flowUrl) {
|
||||
$.info(
|
||||
`使用 HEAD 方法获取流量信息: ${url}, User-Agent: ${
|
||||
`使用 GET 方法从响应体获取流量信息: ${flowUrl}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}`,
|
||||
);
|
||||
const { headers } = await http.head({
|
||||
url: url
|
||||
.split(/[\r\n]+/)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length)[0],
|
||||
headers: {
|
||||
'User-Agent': userAgent,
|
||||
...(isStash && proxy
|
||||
? {
|
||||
'X-Stash-Selected-Proxy':
|
||||
encodeURIComponent(proxy),
|
||||
}
|
||||
: {}),
|
||||
...(isShadowRocket && proxy
|
||||
? { 'X-Surge-Policy': proxy }
|
||||
: {}),
|
||||
},
|
||||
timeout: requestTimeout,
|
||||
...(proxy ? { proxy } : {}),
|
||||
...(isLoon && proxy ? { node: proxy } : {}),
|
||||
...(isQX && proxy ? { opts: { policy: proxy } } : {}),
|
||||
...(proxy ? getPolicyDescriptor(proxy) : {}),
|
||||
});
|
||||
flowInfo = getFlowField(headers);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`使用 HEAD 方法获取流量信息失败: ${url}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}: ${e.message ?? e}`,
|
||||
);
|
||||
}
|
||||
if (!flowInfo) {
|
||||
$.info(
|
||||
`使用 GET 方法获取流量信息: ${url}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}`,
|
||||
);
|
||||
const { headers } = await http.get({
|
||||
url: url
|
||||
.split(/[\r\n]+/)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length)[0],
|
||||
const { body } = await http.get({
|
||||
url: flowUrl,
|
||||
headers: {
|
||||
'User-Agent': userAgent,
|
||||
},
|
||||
timeout: requestTimeout,
|
||||
});
|
||||
flowInfo = getFlowField(headers);
|
||||
flowInfo = body;
|
||||
} else {
|
||||
try {
|
||||
$.info(
|
||||
`使用 HEAD 方法从响应头获取流量信息: ${url}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}`,
|
||||
);
|
||||
const { headers } = await http.head({
|
||||
url: url
|
||||
.split(/[\r\n]+/)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length)[0],
|
||||
headers: {
|
||||
'User-Agent': userAgent,
|
||||
...(isStash && proxy
|
||||
? {
|
||||
'X-Stash-Selected-Proxy':
|
||||
encodeURIComponent(proxy),
|
||||
}
|
||||
: {}),
|
||||
...(isShadowRocket && proxy
|
||||
? { 'X-Surge-Policy': proxy }
|
||||
: {}),
|
||||
},
|
||||
timeout: requestTimeout,
|
||||
...(proxy ? { proxy } : {}),
|
||||
...(isLoon && proxy ? { node: proxy } : {}),
|
||||
...(isQX && proxy ? { opts: { policy: proxy } } : {}),
|
||||
...(proxy ? getPolicyDescriptor(proxy) : {}),
|
||||
});
|
||||
flowInfo = getFlowField(headers);
|
||||
} catch (e) {
|
||||
$.error(
|
||||
`使用 HEAD 方法从响应头获取流量信息失败: ${url}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}: ${e.message ?? e}`,
|
||||
);
|
||||
}
|
||||
if (!flowInfo) {
|
||||
$.info(
|
||||
`使用 GET 方法获取流量信息: ${url}, User-Agent: ${
|
||||
userAgent || ''
|
||||
}`,
|
||||
);
|
||||
const { headers } = await http.get({
|
||||
url: url
|
||||
.split(/[\r\n]+/)
|
||||
.map((i) => i.trim())
|
||||
.filter((i) => i.length)[0],
|
||||
headers: {
|
||||
'User-Agent': userAgent,
|
||||
},
|
||||
timeout: requestTimeout,
|
||||
});
|
||||
flowInfo = getFlowField(headers);
|
||||
}
|
||||
}
|
||||
if (flowInfo) {
|
||||
headersResourceCache.set(url, flowInfo);
|
||||
|
||||
@@ -430,6 +430,13 @@ export function getISO(name) {
|
||||
return ISOFlags[getFlag(name)]?.[0];
|
||||
}
|
||||
|
||||
// remove flag
|
||||
export function removeFlag(str) {
|
||||
return str
|
||||
.replace(/[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]|🏴☠️|🏳️🌈/g, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
export class MMDB {
|
||||
constructor({ country, asn } = {}) {
|
||||
if ($.env.isNode) {
|
||||
@@ -438,11 +445,21 @@ export class MMDB {
|
||||
const countryFile =
|
||||
country || eval('process.env.SUB_STORE_MMDB_COUNTRY_PATH');
|
||||
const asnFile = asn || eval('process.env.SUB_STORE_MMDB_ASN_PATH');
|
||||
// $.info(
|
||||
// `GeoLite2 Country MMDB: ${countryFile}, exists: ${fs.existsSync(
|
||||
// countryFile,
|
||||
// )}`,
|
||||
// );
|
||||
if (countryFile) {
|
||||
this.countryReader = Reader.openBuffer(
|
||||
fs.readFileSync(countryFile),
|
||||
);
|
||||
}
|
||||
// $.info(
|
||||
// `GeoLite2 ASN MMDB: ${asnFile}, exists: ${fs.existsSync(
|
||||
// asnFile,
|
||||
// )}`,
|
||||
// );
|
||||
if (asnFile) {
|
||||
if (!fs.existsSync(asnFile))
|
||||
throw new Error('GeoLite2 ASN MMDB does not exist');
|
||||
|
||||
4
backend/src/vendor/express.js
vendored
4
backend/src/vendor/express.js
vendored
@@ -161,7 +161,7 @@ export default function express({ substore: $, port, host }) {
|
||||
|
||||
function Response() {
|
||||
let statusCode = 200;
|
||||
const { isQX, isLoon, isSurge } = ENV();
|
||||
const { isQX, isLoon, isSurge, isGUIforCores } = ENV();
|
||||
const headers = DEFAULT_HEADERS;
|
||||
const STATUS_CODE_MAP = {
|
||||
200: 'HTTP/1.1 200 OK',
|
||||
@@ -184,7 +184,7 @@ export default function express({ substore: $, port, host }) {
|
||||
body,
|
||||
headers,
|
||||
};
|
||||
if (isQX) {
|
||||
if (isQX || isGUIforCores) {
|
||||
$done(response);
|
||||
} else if (isLoon || isSurge) {
|
||||
$done({
|
||||
|
||||
48
backend/src/vendor/open-api.js
vendored
48
backend/src/vendor/open-api.js
vendored
@@ -8,6 +8,7 @@ const isStash =
|
||||
const isShadowRocket = 'undefined' !== typeof $rocket;
|
||||
const isEgern = 'object' == typeof egern;
|
||||
const isLanceX = 'undefined' != typeof $native;
|
||||
const isGUIforCores = typeof $Plugins !== 'undefined';
|
||||
|
||||
export class OpenAPI {
|
||||
constructor(name = 'untitled', debug = false) {
|
||||
@@ -48,7 +49,10 @@ export class OpenAPI {
|
||||
this.cache = JSON.parse($prefs.valueForKey(this.name) || '{}');
|
||||
if (isLoon || isSurge)
|
||||
this.cache = JSON.parse($persistentStore.read(this.name) || '{}');
|
||||
|
||||
if (isGUIforCores)
|
||||
this.cache = JSON.parse(
|
||||
$Plugins.SubStoreCache.get(this.name) || '{}',
|
||||
);
|
||||
if (isNode) {
|
||||
// create a json for root cache
|
||||
const basePath =
|
||||
@@ -86,6 +90,7 @@ export class OpenAPI {
|
||||
const data = JSON.stringify(this.cache, null, 2);
|
||||
if (isQX) $prefs.setValueForKey(data, this.name);
|
||||
if (isLoon || isSurge) $persistentStore.write(data, this.name);
|
||||
if (isGUIforCores) $Plugins.SubStoreCache.set(this.name, data);
|
||||
if (isNode) {
|
||||
const basePath =
|
||||
eval('process.env.SUB_STORE_DATA_BASE_PATH') || '.';
|
||||
@@ -118,6 +123,9 @@ export class OpenAPI {
|
||||
if (isNode) {
|
||||
this.root[key] = data;
|
||||
}
|
||||
if (isGUIforCores) {
|
||||
return $Plugins.SubStoreCache.set(key, data);
|
||||
}
|
||||
} else {
|
||||
this.cache[key] = data;
|
||||
}
|
||||
@@ -137,6 +145,9 @@ export class OpenAPI {
|
||||
if (isNode) {
|
||||
return this.root[key];
|
||||
}
|
||||
if (isGUIforCores) {
|
||||
return $Plugins.SubStoreCache.get(key);
|
||||
}
|
||||
} else {
|
||||
return this.cache[key];
|
||||
}
|
||||
@@ -155,6 +166,9 @@ export class OpenAPI {
|
||||
if (isNode) {
|
||||
delete this.root[key];
|
||||
}
|
||||
if (isGUIforCores) {
|
||||
return $Plugins.SubStoreCache.remove(key);
|
||||
}
|
||||
} else {
|
||||
delete this.cache[key];
|
||||
}
|
||||
@@ -220,6 +234,9 @@ export class OpenAPI {
|
||||
});
|
||||
}
|
||||
}
|
||||
if (isGUIforCores) {
|
||||
$Plugins.Notify(title, subtitle + '\n' + content);
|
||||
}
|
||||
}
|
||||
|
||||
// other helper functions
|
||||
@@ -240,7 +257,7 @@ export class OpenAPI {
|
||||
}
|
||||
|
||||
done(value = {}) {
|
||||
if (isQX || isLoon || isSurge) {
|
||||
if (isQX || isLoon || isSurge || isGUIforCores) {
|
||||
$done(value);
|
||||
} else if (isNode) {
|
||||
if (typeof $context !== 'undefined') {
|
||||
@@ -262,11 +279,12 @@ export function ENV() {
|
||||
isShadowRocket,
|
||||
isEgern,
|
||||
isLanceX,
|
||||
isGUIforCores,
|
||||
};
|
||||
}
|
||||
|
||||
export function HTTP(defaultOptions = { baseURL: '' }) {
|
||||
const { isQX, isLoon, isSurge, isNode } = ENV();
|
||||
const { isQX, isLoon, isSurge, isNode, isGUIforCores } = ENV();
|
||||
const methods = [
|
||||
'GET',
|
||||
'POST',
|
||||
@@ -356,6 +374,30 @@ export function HTTP(defaultOptions = { baseURL: '' }) {
|
||||
});
|
||||
});
|
||||
});
|
||||
} else if (isGUIforCores) {
|
||||
worker = new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const response = await $Plugins.Requests({
|
||||
method,
|
||||
url: options.url,
|
||||
headers: options.headers,
|
||||
body: options.body,
|
||||
options: {
|
||||
Proxy: options.proxy,
|
||||
Timeout: options.timeout
|
||||
? options.timeout / 1000
|
||||
: 15,
|
||||
},
|
||||
});
|
||||
resolve({
|
||||
statusCode: response.status,
|
||||
headers: response.headers,
|
||||
body: response.body,
|
||||
});
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let timeoutid;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!name=Sub-Store
|
||||
#!desc=高级订阅管理工具. 定时任务默认为每天 23 点 55 分
|
||||
#!desc=高级订阅管理工具. 定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'
|
||||
#!openUrl=https://sub.store
|
||||
#!author=Peng-YM
|
||||
#!homepage=https://github.com/sub-store-org/Sub-Store
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "Sub-Store",
|
||||
"description": "定时任务默认为每天 23 点 55 分",
|
||||
"description": "定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'",
|
||||
"task": [
|
||||
"55 23 * * * https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js, tag=Sub-Store Sync, img-url=https://raw.githubusercontent.com/58xinian/icon/master/Sub-Store1.png"
|
||||
]
|
||||
|
||||
@@ -6,17 +6,29 @@ Sub-Store Releases: [`https://github.com/sub-store-org/Sub-Store/releases`](http
|
||||
|
||||
Telegram 频道: [`https://t.me/cool_scripts` ](https://t.me/cool_scripts)
|
||||
|
||||
## 脚本配置:
|
||||
## 服务器/云平台/Docker/Android 版
|
||||
|
||||
https://xream.notion.site/Sub-Store-abe6a96944724dc6a36833d5c9ab7c87
|
||||
|
||||
## App 版
|
||||
|
||||
### 1. Loon
|
||||
安装使用 插件 [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Loon.plugin`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Loon.plugin) 即可。
|
||||
|
||||
### 2. Surge
|
||||
|
||||
#### 关于 Surge 的格外说明
|
||||
|
||||
Surge Mac 版如何支持 SSR, 如何去除 HTTP 传输层以支持 类似 VMess HTTP 节点等 请查看 [链接参数说明](https://github.com/sub-store-org/Sub-Store/wiki/%E9%93%BE%E6%8E%A5%E5%8F%82%E6%95%B0%E8%AF%B4%E6%98%8E)
|
||||
|
||||
定时处理订阅 功能, 避免 App 内拉取超时, 请查看 [定时处理订阅](https://t.me/zhetengsha/1449)
|
||||
|
||||
0. 最新 Surge iOS TestFlight 版本 可使用 Beta 版(支持最新 Surge iOS TestFlight 版本的特性): [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Beta.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Beta.sgmodule)
|
||||
|
||||
1. 官方默认版模块(支持 App 内使用编辑参数): [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge.sgmodule)
|
||||
|
||||
> 最新版 Surge 已删除 `ability: http-client-policy` 参数, 模块暂不做修改, 对测落地功能无影响
|
||||
|
||||
2. 经典版, 不支持编辑参数, 固定带 ability 参数版本, 使用 jsc 引擎时, 可能会爆内存, 如果需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 请使用此带 ability 参数版本: [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-ability.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-ability.sgmodule)
|
||||
|
||||
3. 经典版, 不支持编辑参数, 固定不带 ability 参数版本: [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Noability.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Noability.sgmodule)
|
||||
@@ -31,9 +43,17 @@ Telegram 频道: [`https://t.me/cool_scripts` ](https://t.me/cool_scripts)
|
||||
安装使用 覆写 [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Stash.stoverride`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Stash.stoverride) 即可。
|
||||
|
||||
### 5. Shadowrocket
|
||||
安装使用 模块 [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge.sgmodule) 即可。
|
||||
安装使用 模块 [`https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Noability.sgmodule`](https://raw.githubusercontent.com/sub-store-org/Sub-Store/master/config/Surge-Noability.sgmodule) 即可。
|
||||
|
||||
## 使用 Sub-Store
|
||||
1. 使用 Safari 打开这个 https://sub.store 如网页正常打开并且未弹出任何错误提示,说明 Sub-Store 已经配置成功。
|
||||
2. 可以把 Sub-Store 添加到主屏幕,即可获得类似于 APP 的使用体验。
|
||||
3. 更详细的使用指南请参考[文档](https://www.notion.so/Sub-Store-6259586994d34c11a4ced5c406264b46)。
|
||||
|
||||
## 链接参数说明
|
||||
|
||||
https://github.com/sub-store-org/Sub-Store/wiki/%E9%93%BE%E6%8E%A5%E5%8F%82%E6%95%B0%E8%AF%B4%E6%98%8E
|
||||
|
||||
## 脚本使用说明
|
||||
|
||||
https://github.com/sub-store-org/Sub-Store/wiki/%E8%84%9A%E6%9C%AC%E4%BD%BF%E7%94%A8%E8%AF%B4%E6%98%8E
|
||||
@@ -1,5 +1,5 @@
|
||||
name: Sub-Store
|
||||
desc: 高级订阅管理工具 @Peng-YM. 定时任务默认为每天 23 点 55 分
|
||||
desc: 高级订阅管理工具 @Peng-YM. 定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'
|
||||
icon: https://raw.githubusercontent.com/cc63/ICON/main/Sub-Store.png
|
||||
|
||||
http:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!name=Sub-Store(β)
|
||||
#!desc=支持 Surge 正式版的参数设置功能. 测落地功能 ability: http-client-policy, 同步配置的定时 cronexp: 55 23 * * *
|
||||
#!category=订阅管理
|
||||
#!arguments=ability:http-client-policy,cronexp:55 23 * * *,sync:"Sub-Store Sync",timeout:120,engine:auto
|
||||
#!arguments-desc=\n1️⃣ ability\n\n默认已开启测落地能力\n需要配合脚本操作\n如 https://raw.githubusercontent.com/Keywos/rule/main/cname.js\n填写任意其他值关闭\n\n2️⃣ cronexp\n\n同步配置定时任务\n默认为每天 23 点 55 分\n\n3️⃣ sync\n\n自定义定时任务名\n便于在脚本编辑器中选择\n若设为 # 可取消定时任务\n\n4️⃣ timeout\n\n超时, 单位为秒\n\n5️⃣ engine\n\n默认为自动使用 webview 引擎, 可设为指定 jsc, 但 jsc 容易爆内存
|
||||
#!arguments=ability:http-client-policy,cronexp:55 23 * * *,sync:"Sub-Store Sync",timeout:120,engine:auto,produce:"# Sub-Store Produce",produce_cronexp:50 */6 * * *,produce_sub:"sub1,sub2",produce_col:"col1,col2"
|
||||
#!arguments-desc=\n1️⃣ ability\n\n默认已开启测落地能力\n需要配合脚本操作\n如 https://raw.githubusercontent.com/Keywos/rule/main/cname.js\n填写任意其他值关闭\n\n2️⃣ cronexp\n\n同步配置定时任务\n默认为每天 23 点 55 分\n\n定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'\n\n3️⃣ sync\n\n自定义定时任务名\n便于在脚本编辑器中选择\n若设为 # 可取消定时任务\n\n4️⃣ timeout\n\n脚本超时, 单位为秒\n\n5️⃣ engine\n\n默认为自动使用 webview 引擎, 可设为指定 jsc, 但 jsc 容易爆内存\n\n6️⃣ produce\n\n自定义处理订阅的定时任务名\n一般用于定时处理耗时较长的订阅, 以更新缓存\n这样 Surge 中拉取的时候就能用到缓存, 不至于总是超时\n若设为 # 可取消此定时任务\n默认不开启\n\n7️⃣ produce_cronexp\n\n配置处理订阅的定时任务\n\n默认为每 6 小时\n\n9️⃣ produce_sub\n\n自定义需定时处理的单条订阅名\n多个用 , 连接\n\n🔟 produce_col\n\n自定义需定时处理的组合订阅名\n多个用 , 连接\n\n⚠️ 注意: 是 名称(name) 不是 显示名称(displayName)\n如果名称需要编码, 请编码后再用 , 连接\n顺序: 并发执行单条订阅, 然后并发执行组合订阅
|
||||
|
||||
[MITM]
|
||||
hostname = %APPEND% sub.store
|
||||
@@ -12,4 +12,6 @@ Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api
|
||||
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
@@ -1,5 +1,5 @@
|
||||
#!name=Sub-Store
|
||||
#!desc=高级订阅管理工具 @Peng-YM 无 ability 参数版本,不会爆内存, 如果需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 可以用带 ability 参数. 定时任务默认为每天 23 点 55 分
|
||||
#!desc=高级订阅管理工具 @Peng-YM 无 ability 参数版本,不会爆内存, 如果需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 可以用带 ability 参数. 定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'
|
||||
#!category=订阅管理
|
||||
|
||||
[MITM]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!name=Sub-Store
|
||||
#!desc=高级订阅管理工具 @Peng-YM 带 ability 参数版本, 使用 jsc 引擎时, 可能会爆内存, 如果不需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 可以用不带 ability 参数版本. 定时任务默认为每天 23 点 55 分
|
||||
#!desc=高级订阅管理工具 @Peng-YM 带 ability 参数版本, 使用 jsc 引擎时, 可能会爆内存, 如果不需要使用指定节点功能 例如[加旗帜脚本或者cname脚本] 可以用不带 ability 参数版本. 定时任务默认为每天 23 点 55 分. 定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'
|
||||
#!category=订阅管理
|
||||
|
||||
[MITM]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!name=Sub-Store
|
||||
#!desc=支持 Surge 正式版的参数设置功能. 测落地功能 ability: http-client-policy, 同步配置的定时 cronexp: 55 23 * * *
|
||||
#!category=订阅管理
|
||||
#!arguments=ability:http-client-policy,cronexp:55 23 * * *,sync:"Sub-Store Sync",timeout:120,engine:auto
|
||||
#!arguments-desc=\n1️⃣ ability\n\n默认已开启测落地能力\n需要配合脚本操作\n如 https://raw.githubusercontent.com/Keywos/rule/main/cname.js\n填写任意其他值关闭\n\n2️⃣ cronexp\n\n同步配置定时任务\n默认为每天 23 点 55 分\n\n3️⃣ sync\n\n自定义定时任务名\n便于在脚本编辑器中选择\n若设为 # 可取消定时任务\n\n4️⃣ timeout\n\n超时, 单位为秒\n\n5️⃣ engine\n\n默认为自动使用 webview 引擎, 可设为指定 jsc, 但 jsc 容易爆内存
|
||||
#!arguments=ability:http-client-policy,cronexp:55 23 * * *,sync:"Sub-Store Sync",timeout:120,engine:auto,produce:"# Sub-Store Produce",produce_cronexp:50 */6 * * *,produce_sub:"sub1,sub2",produce_col:"col1,col2"
|
||||
#!arguments-desc=\n1️⃣ ability\n\n默认已开启测落地能力\n需要配合脚本操作\n如 https://raw.githubusercontent.com/Keywos/rule/main/cname.js\n填写任意其他值关闭\n\n2️⃣ cronexp\n\n同步配置定时任务\n默认为每天 23 点 55 分\n\n定时任务指定时将订阅/文件上传到私有 Gist. 在前端, 叫做 '同步' 或 '同步配置'\n\n3️⃣ sync\n\n自定义定时任务名\n便于在脚本编辑器中选择\n若设为 # 可取消定时任务\n\n4️⃣ timeout\n\n脚本超时, 单位为秒\n\n5️⃣ engine\n\n默认为自动使用 webview 引擎, 可设为指定 jsc, 但 jsc 容易爆内存\n\n6️⃣ produce\n\n自定义处理订阅的定时任务名\n一般用于定时处理耗时较长的订阅, 以更新缓存\n这样 Surge 中拉取的时候就能用到缓存, 不至于总是超时\n若设为 # 可取消此定时任务\n默认不开启\n\n7️⃣ produce_cronexp\n\n配置处理订阅的定时任务\n\n默认为每 6 小时\n\n9️⃣ produce_sub\n\n自定义需定时处理的单条订阅名\n多个用 , 连接\n\n🔟 produce_col\n\n自定义需定时处理的组合订阅名\n多个用 , 连接\n\n⚠️ 注意: 是 名称(name) 不是 显示名称(displayName)\n如果名称需要编码, 请编码后再用 , 连接\n顺序: 并发执行单条订阅, 然后并发执行组合订阅
|
||||
|
||||
[MITM]
|
||||
hostname = %APPEND% sub.store
|
||||
@@ -12,4 +12,6 @@ Sub-Store Core=type=http-request,pattern=^https?:\/\/sub\.store\/((download)|api
|
||||
|
||||
Sub-Store Simple=type=http-request,pattern=^https?:\/\/sub\.store,script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/sub-store-0.min.js,requires-body=true,timeout={{{timeout}}},engine={{{engine}}}
|
||||
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
{{{sync}}}=type=cron,cronexp="{{{cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}}
|
||||
|
||||
{{{produce}}}=type=cron,cronexp="{{{produce_cronexp}}}",wake-system=1,timeout={{{timeout}}},script-path=https://github.com/sub-store-org/Sub-Store/releases/latest/download/cron-sync-artifacts.min.js,engine={{{engine}}},argument="sub={{{produce_sub}}}&col={{{produce_col}}}"
|
||||
@@ -7,10 +7,12 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// proxies 为传入的内部节点数组
|
||||
// 结构大致参考了 Clash.Meta(mihomo) 有私货
|
||||
// 可在预览界面点击节点查看 JSON 结构 或查看 `target=JSON` 的通用订阅
|
||||
// 1. `no-resolve` 为不解析域名
|
||||
// 2. 域名解析后 会多一个 `resolved` 字段
|
||||
// 1. `_no-resolve` 为不解析域名
|
||||
// 2. 域名解析后 会多一个 `_resolved` 字段
|
||||
// 3. 域名解析后会有`_IPv4`, `_IPv6`, `_IP`(若有多个步骤, 只取第一次成功的 v4 或 v6 数据), `_domain` 字段
|
||||
// 4. 节点字段 `exec` 为 `ssr-local` 路径, 默认 `/usr/local/bin/ssr-local`; 端口从 10000 开始递增(暂不支持配置)
|
||||
// 5. `_subName` 为单条订阅名
|
||||
// 6. `_collectionName` 为组合订阅名
|
||||
|
||||
// $arguments 为传入的脚本参数
|
||||
|
||||
@@ -38,12 +40,15 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// isIP,
|
||||
// yaml, // yaml 解析和生成
|
||||
// getFlag, // 获取 emoji 旗帜
|
||||
// removeFlag, // 移除 emoji 旗帜
|
||||
// getISO, // 获取 ISO 3166-1 alpha-2 代码
|
||||
// Gist, // Gist 类
|
||||
// }
|
||||
|
||||
// 示例: 给节点名添加前缀
|
||||
// $server.name = `[${ProxyUtils.getISO($server.name)}] ${$server.name}`
|
||||
// 示例: 给节点名添加旗帜
|
||||
// $server.name = `[${ProxyUtils.getFlag($server.name).replace(/🇹🇼/g, '🇼🇸')}] ${ProxyUtils.removeFlag($server.name)}`
|
||||
|
||||
// 示例: 从 sni 文件中读取内容并进行节点操作
|
||||
// const sni = await produceArtifact({
|
||||
@@ -96,40 +101,9 @@ function operator(proxies = [], targetPlatform, context) {
|
||||
// })
|
||||
|
||||
// 4. 一个比较折腾的方案: 在脚本操作中, 把内容同步到另一个 gist
|
||||
|
||||
// async function operator(proxies = []) {
|
||||
// const $ = $substore
|
||||
// const GITHUB_TOKEN = 'ghp_xxxxxxxxxxxxxxxxxxxxx'
|
||||
// const GIST_NAME = 'share'
|
||||
// const FILENAME = 'mihomo.yaml'
|
||||
// let files = {}
|
||||
// let content = await produceArtifact({
|
||||
// type: 'subscription',
|
||||
// subscription: {},
|
||||
// content: 'proxies:\n' + proxies.map((proxy) => ' - ' + JSON.stringify(proxy) + '\n').join(''),
|
||||
// platform: 'ClashMeta',
|
||||
// })
|
||||
// const manager = new ProxyUtils.Gist({
|
||||
// token: GITHUB_TOKEN,
|
||||
// key: GIST_NAME,
|
||||
// });
|
||||
// files[encodeURIComponent(FILENAME)] = {
|
||||
// content,
|
||||
// };
|
||||
// const res = await manager.upload(files);
|
||||
// let body = {};
|
||||
// try {
|
||||
// body = JSON.parse(res.body);
|
||||
// // eslint-disable-next-line no-empty
|
||||
// } catch (e) {}
|
||||
// const raw_url =
|
||||
// body.files[encodeURIComponent(FILENAME)]?.raw_url;
|
||||
// console.log(raw_url)
|
||||
// const new_url = raw_url?.replace(/\/raw\/[^/]*\/(.*)/, '/raw/$1');
|
||||
// console.log(new_url)
|
||||
// $.notify('🌍 Sub-Store', `更新到 Gist: ${new_url}`);
|
||||
// return proxies
|
||||
// }
|
||||
// 见 https://t.me/zhetengsha/1428
|
||||
//
|
||||
// const content = ProxyUtils.produce(proxies, platform)
|
||||
|
||||
// // YAML
|
||||
// ProxyUtils.yaml.load('YAML String')
|
||||
|
||||
Reference in New Issue
Block a user