Skip to content

Commit

Permalink
Refactor: migrate to undici fetch
Browse files Browse the repository at this point in the history
  • Loading branch information
SukkaW committed Jan 9, 2025
1 parent c94e28b commit f51cea7
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 84 deletions.
3 changes: 2 additions & 1 deletion Build/lib/fetch-retry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ setGlobalDispatcher(agent.compose(
}),
interceptors.cache({
store: new BetterSqlite3CacheStore({
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db')
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db'),
maxEntrySize: 1024 * 1024 * 50 // 50 MiB
})
})
));
Expand Down
5 changes: 2 additions & 3 deletions Build/lib/fetch-text-by-line.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { TextLineStream } from './text-line-transform-stream';
import type { ReadableStream } from 'node:stream/web';
import { TextDecoderStream } from 'node:stream/web';
import { processLine, ProcessLineStream } from './process-line';
import type { NodeFetchResponse } from './make-fetch-happen';
import { $$fetch } from './fetch-retry';
import type { UndiciResponseData } from './fetch-retry';
import type { Response as UnidiciWebResponse } from 'undici';
Expand Down Expand Up @@ -38,7 +37,7 @@ export async function readFileByLineNew(file: string): Promise<AsyncIterable<str
return fsp.open(file, 'r').then(fdReadLines);
}

function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
function ensureResponseBody<T extends UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
if (resp.body == null) {
throw new Error('Failed to fetch remote text');
}
Expand All @@ -48,7 +47,7 @@ function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | U
return resp.body;
}

export const createReadlineInterfaceFromResponse: ((resp: NodeFetchResponse | UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
export const createReadlineInterfaceFromResponse: ((resp: UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
const stream = ensureResponseBody(resp);

const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
Expand Down
148 changes: 74 additions & 74 deletions Build/lib/make-fetch-happen.ts
Original file line number Diff line number Diff line change
@@ -1,86 +1,86 @@
import path from 'node:path';
import fs from 'node:fs';
import makeFetchHappen from 'make-fetch-happen';
import type { FetchOptions } from 'make-fetch-happen';
import cacache from 'cacache';
import picocolors from 'picocolors';
// eslint-disable-next-line @typescript-eslint/no-restricted-imports -- type only
import type { Response as NodeFetchResponse } from 'node-fetch';
import { task } from '../trace';
import { bytes } from 'xbits';
import fsp from 'node:fs/promises';
// import makeFetchHappen from 'make-fetch-happen';
// import type { FetchOptions } from 'make-fetch-happen';
// import cacache from 'cacache';
// import picocolors from 'picocolors';

export type { NodeFetchResponse };
import { task } from '../trace';
// import { bytes } from 'xbits';

const cachePath = path.resolve(__dirname, '../../.cache/__make_fetch_happen__');
fs.mkdirSync(cachePath, { recursive: true });
// fs.mkdirSync(cachePath, { recursive: true });

interface CacacheVerifyStats {
startTime: Date,
endTime: Date,
runTime: {
markStartTime: 0,
fixPerms: number,
garbageCollect: number,
rebuildIndex: number,
cleanTmp: number,
writeVerifile: number,
markEndTime: number,
total: number
},
verifiedContent: number,
reclaimedCount: number,
reclaimedSize: number,
badContentCount: number,
keptSize: number,
missingContent: number,
rejectedEntries: number,
totalEntries: number
}
// interface CacacheVerifyStats {
// startTime: Date,
// endTime: Date,
// runTime: {
// markStartTime: 0,
// fixPerms: number,
// garbageCollect: number,
// rebuildIndex: number,
// cleanTmp: number,
// writeVerifile: number,
// markEndTime: number,
// total: number
// },
// verifiedContent: number,
// reclaimedCount: number,
// reclaimedSize: number,
// badContentCount: number,
// keptSize: number,
// missingContent: number,
// rejectedEntries: number,
// totalEntries: number
// }

export const cacheGc = task(require.main === module, __filename)(
(span) => span
.traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
.then((stats: CacacheVerifyStats) => {
// console.log({ stats });
console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
})
() => fsp.rm(cachePath, { recursive: true, force: true })
// span
// .traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
// .then((stats: CacacheVerifyStats) => {
// // console.log({ stats });
// console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
// console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
// console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
// });
);

const _fetch = makeFetchHappen.defaults({
cachePath,
maxSockets: 32, /**
* They said 15 is a good default that prevents knocking out others' routers,
* I disagree. 32 is a good number.
*/
headers: {
'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
},
retry: {
retries: 5,
randomize: true
}
});
// const _fetch = makeFetchHappen.defaults({
// cachePath,
// maxSockets: 32, /**
// * They said 15 is a good default that prevents knocking out others' routers,
// * I disagree. 32 is a good number.
// */
// headers: {
// 'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
// },
// retry: {
// retries: 5,
// randomize: true
// }
// });

// export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
// return _fetch(uriOrRequest, opts).then((resp) => {
// printResponseStatus(resp);
// return resp;
// });
// }

export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
return _fetch(uriOrRequest, opts).then((resp) => {
printResponseStatus(resp);
return resp;
});
}
// export async function $delete(resp: NodeFetchResponse) {
// const cacheKey = resp.headers.get('X-Local-Cache-Key');
// if (cacheKey) {
// await cacache.rm.entry(cachePath, cacheKey);
// await cacache.verify(cachePath, { concurrency: 64 });
// }
// }

export async function $delete(resp: NodeFetchResponse) {
const cacheKey = resp.headers.get('X-Local-Cache-Key');
if (cacheKey) {
await cacache.rm.entry(cachePath, cacheKey);
await cacache.verify(cachePath, { concurrency: 64 });
}
}
// export function printResponseStatus(resp: NodeFetchResponse) {
// const status = resp.headers.get('X-Local-Cache-Status');
// if (status) {
// console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
// }
// }

export function printResponseStatus(resp: NodeFetchResponse) {
const status = resp.headers.get('X-Local-Cache-Status');
if (status) {
console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
}
}
// export { type Response as NodeFetchResponse } from 'node-fetch';
3 changes: 1 addition & 2 deletions Build/lib/parse-dnsmasq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { createReadlineInterfaceFromResponse } from './fetch-text-by-line';
// In short, single label domain suffix is ignored due to the size optimization, so no isIcann
// import tldts from 'tldts-experimental';
import tldts from 'tldts';
import type { NodeFetchResponse } from './make-fetch-happen';
import type { UndiciResponseData } from './fetch-retry';
import type { Response } from 'undici';

Expand All @@ -20,7 +19,7 @@ export function extractDomainsFromFelixDnsmasq(line: string): string | null {
return null;
}

export async function parseFelixDnsmasqFromResp(resp: NodeFetchResponse | UndiciResponseData | Response): Promise<string[]> {
export async function parseFelixDnsmasqFromResp(resp: UndiciResponseData | Response): Promise<string[]> {
const results: string[] = [];

for await (const line of createReadlineInterfaceFromResponse(resp, true)) {
Expand Down
8 changes: 4 additions & 4 deletions Build/validate-gfwlist.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ import { parse } from 'csv-parse/sync';
import { readFileByLine } from './lib/fetch-text-by-line';
import path from 'node:path';
import { OUTPUT_SURGE_DIR } from './constants/dir';
import { $fetch } from './lib/make-fetch-happen';
import { createRetrieKeywordFilter as createKeywordFilter } from 'foxts/retrie';
import { $$fetch } from './lib/fetch-retry';

export async function parseGfwList() {
const whiteSet = new Set<string>();
Expand All @@ -22,7 +22,7 @@ export async function parseGfwList() {
'?'
]);

const text = await (await $fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
const text = await (await $$fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
for (const l of atob(text).split('\n')) {
const line = processLine(l);
if (!line) continue;
Expand Down Expand Up @@ -60,11 +60,11 @@ export async function parseGfwList() {
continue;
}
}
for (const l of (await (await $fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
for (const l of (await (await $$fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
trie.add(l);
}

const res = await (await $fetch('https://litter.catbox.moe/sqmgyn.csv', {
const res = await (await $$fetch('https://litter.catbox.moe/sqmgyn.csv', {
headers: {
accept: '*/*',
'user-agent': 'curl/8.9.1'
Expand Down

0 comments on commit f51cea7

Please sign in to comment.