@@ -17,57 +17,64 @@ const puppeteerArgs = [
17
17
"--disable-gpu-shader-disk-cache" ,
18
18
"--media-cache-size=0" ,
19
19
"--disk-cache-size=0" ,
20
+ "--disable-dev-shm-usage" ,
20
21
] ;
21
22
22
23
const browser = await puppeteer . launch ( { headless : "new" , args : puppeteerArgs } ) ;
23
24
24
- const args = Bun . argv . slice ( 2 ) ;
25
+ try {
26
+ const args = Bun . argv . slice ( 2 ) ;
25
27
26
- const rawData = args [ 0 ] ;
28
+ const rawData = args [ 0 ] ;
27
29
28
- const input = decode ( rawData ) as Task ;
30
+ const input = decode ( rawData ) as Task ;
29
31
30
- switch ( input . type ) {
31
- case "scrape" :
32
- let results : ScraperResult [ ] = [ ] ;
33
- const mangaseeSeries = input . data . filter ( ( item ) => item . source === SeriesSource . MangaSee ) ;
34
- const asuraSeries = input . data . filter ( ( item ) => item . source === SeriesSource . AsuraScans ) ;
35
- const reaperSeries = input . data . filter ( ( item ) => item . source === SeriesSource . ReaperScans ) ;
32
+ switch ( input . type ) {
33
+ case "scrape" :
34
+ let results : ScraperResult [ ] = [ ] ;
35
+ const mangaseeSeries = input . data . filter ( ( item ) => item . source === SeriesSource . MangaSee ) ;
36
+ const asuraSeries = input . data . filter ( ( item ) => item . source === SeriesSource . AsuraScans ) ;
37
+ const reaperSeries = input . data . filter ( ( item ) => item . source === SeriesSource . ReaperScans ) ;
36
38
37
- if ( mangaseeSeries . length > 0 ) {
38
- const mangaseeResults = await mangasee . scrape ( {
39
- browser,
40
- urls : mangaseeSeries . map ( ( item ) => item . url ) ,
41
- } ) ;
39
+ if ( mangaseeSeries . length > 0 ) {
40
+ const mangaseeResults = await mangasee . scrape ( {
41
+ browser,
42
+ urls : mangaseeSeries . map ( ( item ) => item . url ) ,
43
+ } ) ;
42
44
43
- results . push ( ...mangaseeResults ) ;
44
- }
45
+ results . push ( ...mangaseeResults ) ;
46
+ }
45
47
46
- if ( asuraSeries . length > 0 ) {
47
- const asuraResults = await asura . scrape ( {
48
- browser,
49
- urls : asuraSeries . map ( ( item ) => item . url ) ,
50
- } ) ;
48
+ if ( asuraSeries . length > 0 ) {
49
+ const asuraResults = await asura . scrape ( {
50
+ browser,
51
+ urls : asuraSeries . map ( ( item ) => item . url ) ,
52
+ } ) ;
51
53
52
- results . push ( ...asuraResults ) ;
53
- }
54
+ results . push ( ...asuraResults ) ;
55
+ }
54
56
55
- if ( reaperSeries . length > 0 ) {
56
- const reaperResults = await reaper . scrape ( {
57
- browser,
58
- urls : reaperSeries . map ( ( item ) => item . url ) ,
59
- } ) ;
57
+ if ( reaperSeries . length > 0 ) {
58
+ const reaperResults = await reaper . scrape ( {
59
+ browser,
60
+ urls : reaperSeries . map ( ( item ) => item . url ) ,
61
+ } ) ;
60
62
61
- results . push ( ...reaperResults ) ;
62
- }
63
- Bun . write ( Bun . stdout , encode ( results ) ) ;
64
- break ;
65
- case "checkId" :
66
- const asuraId = await asura . getLatestId ( { browser } ) ;
67
- Bun . write ( Bun . stdout , encode ( asuraId ) ) ;
68
- break ;
69
- default :
70
- break ;
63
+ results . push ( ...reaperResults ) ;
64
+ }
65
+ Bun . write ( Bun . stdout , encode ( results ) ) ;
66
+ break ;
67
+ case "checkId" :
68
+ const asuraId = await asura . getLatestId ( { browser } ) ;
69
+ Bun . write ( Bun . stdout , encode ( asuraId ) ) ;
70
+ break ;
71
+ default :
72
+ break ;
73
+ }
74
+ } catch ( error ) {
75
+ console . error ( error ) ;
71
76
}
72
77
78
+ browser . close ( ) ;
79
+
73
80
process . exit ( 0 ) ;
0 commit comments