@@ -10,6 +10,7 @@ import { request } from '~/lib/request'
10
10
import { getPodcastByPodcastIndexId } from '~/controllers/podcast'
11
11
import { Podcast } from '~/entities'
12
12
import { ValueTagOriginal } from 'podverse-shared'
13
+ import { chunkArray } from '~/lib/utility'
13
14
const shortid = require ( 'shortid' )
14
15
const sha1 = require ( 'crypto-js/sha1' )
15
16
const encHex = require ( 'crypto-js/enc-hex' )
@@ -174,16 +175,19 @@ export const addRecentlyUpdatedFeedUrlsToPriorityQueue = async (sinceTime?: numb
174
175
}
175
176
}
176
177
177
- // TODO: THIS TAKES A VERY LONG TIME TO COMPLETE,
178
- // AND IS ARBITRARILY LIMITED TO 10000...
179
- // const uniquePodcastIndexIds = [...new Set(recentlyUpdatedPodcastIndexIds)].slice(0, 10000)
180
-
181
- // console.log('unique recentlyUpdatedPodcastIndexIds count', uniquePodcastIndexIds.length)
182
-
183
- // Send the feedUrls with matching podcastIndexIds found in our database to
184
- // the priority parsing queue for immediate parsing.
185
- if ( recentlyUpdatedPodcastIndexIds . length > 0 ) {
186
- await addFeedUrlsByPodcastIndexId ( recentlyUpdatedPodcastIndexIds )
178
+ const recentlyUpdatedPodcastIndexIdsChunks = chunkArray ( recentlyUpdatedPodcastIndexIds , 500 )
179
+ console . log ( 'recentlyUpdatedPodcastIndexIdsChunks array count' , recentlyUpdatedPodcastIndexIdsChunks . length )
180
+ let chunkIndex = 0
181
+ for ( const chunk of recentlyUpdatedPodcastIndexIdsChunks ) {
182
+ try {
183
+ chunkIndex ++
184
+ console . log ( 'sending feedUrls chunk to queue...' , chunkIndex )
185
+ if ( chunk . length > 0 ) {
186
+ await addFeedUrlsByPodcastIndexId ( chunk )
187
+ }
188
+ } catch ( error ) {
189
+ console . log ( 'addFeedUrlsByPodcastIndexId error:' , error )
190
+ }
187
191
}
188
192
} catch ( error ) {
189
193
console . log ( 'addRecentlyUpdatedFeedUrlsToPriorityQueue' , error )
0 commit comments