@@ -19,40 +19,72 @@ export default async function run(config, storage, user, query) {
19
19
await Promise . all ( cleanupTasks ) ;
20
20
21
21
logger . info ( "Starting batch job" ) ;
22
+
22
23
await storage . updateJobStatus ( query , 'running' ) ;
23
24
24
- const context = config . processingContext ( user ) ;
25
+ const jobfolder = storage . getJobFolder ( job . _id ) ;
26
+ await fse . ensureDir ( path . dirname ( jobfolder ) ) ;
27
+
28
+ const context = config . processingContext ( user , job ) ;
25
29
const pg = new ProcessGraph ( job . process , context , logger ) ;
26
30
await pg . execute ( ) ;
27
31
28
- const computeTasks = pg . getResults ( ) . map ( async ( datacube ) => {
29
- const response = await GeeResults . retrieve ( context , datacube , logger ) ;
30
- const params = datacube . getOutputFormatParameters ( ) ;
31
- const filename = ( params . name || String ( Utils . generateHash ( ) ) ) + GeeResults . getFileExtension ( datacube , config ) ;
32
- const filepath = storage . getJobFile ( job . _id , filename ) ;
33
- logger . debug ( "Storing result to: " + filepath ) ;
34
- await fse . ensureDir ( path . dirname ( filepath ) ) ;
35
- await new Promise ( ( resolve , reject ) => {
36
- const writer = fse . createWriteStream ( filepath ) ;
37
- response . data . pipe ( writer ) ;
38
- writer . on ( 'error' , reject ) ;
39
- writer . on ( 'close' , resolve ) ;
40
- } ) ;
41
- return { filepath, datacube } ;
32
+ const computeTasks = pg . getResults ( ) . map ( async ( dc ) => {
33
+ const format = config . getOutputFormat ( dc . getOutputFormat ( ) ) ;
34
+ const datacube = format . preprocess ( GeeResults . BATCH , context , dc , logger ) ;
35
+
36
+ if ( format . canExport ( ) ) {
37
+ const tasks = await format . export ( context . ee , dc , context . getResource ( ) ) ;
38
+ storage . addTasks ( job , tasks ) ;
39
+ context . startTaskMonitor ( ) ;
40
+ const filepath = await new Promise ( ( resolve , reject ) => {
41
+ setInterval ( async ( ) => {
42
+ const updatedJob = await storage . getById ( job . _id , job . user_id ) ;
43
+ if ( ! updatedJob ) {
44
+ reject ( new Error ( "Job was deleted" ) ) ;
45
+ }
46
+ if ( [ 'canceled' , 'error' , 'finished' ] . includes ( updatedJob . status ) ) {
47
+ // todo: resolve google drive URLs
48
+ resolve ( job . googleDriveResults ) ;
49
+ }
50
+ } , 10000 ) ;
51
+ } ) ;
52
+ return { filepath, datacube } ;
53
+ }
54
+ else {
55
+ const response = await format . retrieve ( context . ee , dc ) ;
56
+ const params = datacube . getOutputFormatParameters ( ) ;
57
+ const filename = ( params . name || String ( Utils . generateHash ( ) ) ) + GeeResults . getFileExtension ( datacube , config ) ;
58
+ const filepath = storage . getJobFile ( job . _id , filename ) ;
59
+ await new Promise ( ( resolve , reject ) => {
60
+ const writer = fse . createWriteStream ( filepath ) ;
61
+ response . data . pipe ( writer ) ;
62
+ writer . on ( 'error' , reject ) ;
63
+ writer . on ( 'close' , resolve ) ;
64
+ } ) ;
65
+ return { filepath, datacube } ;
66
+ }
42
67
} ) ;
43
68
44
69
await Promise . all ( computeTasks ) ;
45
70
46
71
const results = [ ] ;
47
72
for ( const task of computeTasks ) {
48
- results . push ( await task ) ;
73
+ const { filepath, datacube } = await task ;
74
+ if ( Array . isArray ( filepath ) ) {
75
+ filepath . forEach ( fp => results . push ( { filepath : fp , datacube } ) ) ;
76
+ }
77
+ else {
78
+ results . push ( { filepath, datacube } ) ;
79
+ }
49
80
}
50
81
51
82
const item = await createSTAC ( storage , job , results ) ;
52
83
const stacpath = storage . getJobFile ( job . _id , 'stac.json' ) ;
53
84
await fse . writeJSON ( stacpath , item , { spaces : 2 } ) ;
54
85
55
86
logger . info ( "Finished" ) ;
87
+ // todo: set to error is any task failed
56
88
storage . updateJobStatus ( query , 'finished' ) ;
57
89
} catch ( e ) {
58
90
logger . error ( e ) ;
@@ -78,17 +110,36 @@ async function createSTAC(storage, job, results) {
78
110
let endTime = null ;
79
111
const extents = [ ] ;
80
112
for ( const { filepath, datacube } of results ) {
81
- const filename = path . basename ( filepath ) ;
82
- const stat = await fse . stat ( filepath ) ;
83
- let asset = {
84
- href : path . relative ( folder , filepath ) ,
85
- roles : [ "data" ] ,
86
- type : Utils . extensionToMediaType ( filepath ) ,
87
- title : filename ,
88
- "file:size" : stat . size ,
89
- created : stat . birthtime ,
90
- updated : stat . mtime
91
- } ;
113
+ if ( ! filepath ) {
114
+ continue ;
115
+ }
116
+
117
+ let asset ;
118
+ let filename ;
119
+ if ( Utils . isUrl ( filepath ) ) {
120
+ let url = new URL ( filepath ) ;
121
+ console . log ( url ) ;
122
+ filename = path . basename ( url . pathname || url . hash . substring ( 1 ) ) ;
123
+ asset = {
124
+ href : filepath ,
125
+ roles : [ "data" ] ,
126
+ // type: Utils.extensionToMediaType(filepath),
127
+ title : filename
128
+ } ;
129
+ }
130
+ else {
131
+ filename = path . basename ( filepath ) ;
132
+ const stat = await fse . stat ( filepath ) ;
133
+ asset = {
134
+ href : path . relative ( folder , filepath ) ,
135
+ roles : [ "data" ] ,
136
+ type : Utils . extensionToMediaType ( filepath ) ,
137
+ title : filename ,
138
+ "file:size" : stat . size ,
139
+ created : stat . birthtime ,
140
+ updated : stat . mtime
141
+ } ;
142
+ }
92
143
93
144
if ( datacube . hasT ( ) ) {
94
145
const t = datacube . dimT ( ) ;
0 commit comments