@@ -22,6 +22,12 @@ type allanimeResponse struct {
22
22
} `json:"data"`
23
23
}
24
24
25
+ type result struct {
26
+ index int
27
+ links []string
28
+ err error
29
+ }
30
+
25
31
func decodeProviderID (encoded string ) string {
26
32
// Split the string into pairs of characters (.. equivalent of 'sed s/../&\n/g')
27
33
re := regexp .MustCompile (".." )
@@ -152,11 +158,6 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
152
158
return nil , err
153
159
}
154
160
155
- type result struct {
156
- index int
157
- links []string
158
- err error
159
- }
160
161
161
162
// Pre-count valid URLs and create slice to preserve order
162
163
validURLs := make ([]string , 0 )
@@ -174,11 +175,15 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
174
175
results := make (chan result , len (validURLs ))
175
176
orderedResults := make ([][]string , len (validURLs ))
176
177
178
+ // Add a channel for high priority links
179
+ highPriorityLink := make (chan []string , 1 )
180
+
177
181
// Create rate limiter
178
182
rateLimiter := time .NewTicker (50 * time .Millisecond )
179
183
defer rateLimiter .Stop ()
180
184
181
185
// Launch goroutines
186
+ remainingURLs := len (validURLs )
182
187
for i , sourceUrl := range validURLs {
183
188
go func (idx int , url string ) {
184
189
<- rateLimiter .C // Rate limit the requests
@@ -222,6 +227,21 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
222
227
links = append (links , link )
223
228
}
224
229
230
+ // Check if any of the extracted links are high priority
231
+ for _ , link := range links {
232
+ for _ , domain := range LinkPriorities [:3 ] { // Check only top 3 priority domains
233
+ if strings .Contains (link , domain ) {
234
+ // Found high priority link, send it immediately
235
+ select {
236
+ case highPriorityLink <- []string {link }:
237
+ default :
238
+ // Channel already has a high priority link
239
+ }
240
+ break
241
+ }
242
+ }
243
+ }
244
+
225
245
results <- result {
226
246
index : idx ,
227
247
links : links ,
@@ -234,6 +254,17 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
234
254
var collectedErrors []error
235
255
successCount := 0
236
256
257
+ // First, try to get a high priority link
258
+ select {
259
+ case links := <- highPriorityLink :
260
+ // Continue extracting other links in background
261
+ go collectRemainingResults (results , orderedResults , & successCount , & collectedErrors , remainingURLs )
262
+ return links , nil
263
+ case <- time .After (2 * time .Second ): // Wait only briefly for high priority link
264
+ // No high priority link found quickly, proceed with normal collection
265
+ }
266
+
267
+ // Continue with existing result collection logic
237
268
// Collect results maintaining order
238
269
for successCount < len (validURLs ) {
239
270
select {
@@ -270,6 +301,25 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
270
301
return allLinks , nil
271
302
}
272
303
304
+ // Helper function to collect remaining results in background
305
+ func collectRemainingResults (results chan result , orderedResults [][]string , successCount * int , collectedErrors * []error , remainingURLs int ) {
306
+ for * successCount < remainingURLs {
307
+ select {
308
+ case res := <- results :
309
+ if res .err != nil {
310
+ Log (fmt .Sprintf ("Error processing URL %d: %v" , res .index + 1 , res .err ))
311
+ * collectedErrors = append (* collectedErrors , fmt .Errorf ("URL %d: %w" , res .index + 1 , res .err ))
312
+ } else {
313
+ orderedResults [res .index ] = res .links
314
+ * successCount ++
315
+ Log (fmt .Sprintf ("Successfully processed URL %d/%d" , res .index + 1 , remainingURLs ))
316
+ }
317
+ case <- time .After (10 * time .Second ):
318
+ return
319
+ }
320
+ }
321
+ }
322
+
273
323
// converts the ordered slice of link slices into a single slice
274
324
func flattenResults (results [][]string ) []string {
275
325
var totalLen int
0 commit comments