Skip to content

Commit 2b6d9e7

Browse files
authored
Feat: Use highest priority link if extracted early (#50)
* Fix: Remove unnecessary output at end * Feat: Use highest priority link if extracted early
1 parent 5ff0e19 commit 2b6d9e7

File tree

2 files changed

+58
-7
lines changed

2 files changed

+58
-7
lines changed

internal/curd.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,8 +151,9 @@ func ExitCurd(err error) {
151151
}
152152

153153
CurdOut("Have a great day!")
154-
if err != nil {
155-
CurdOut(err)
154+
// If the error is not about the connection refused, print the error
155+
if err != nil && !strings.Contains(err.Error(), "dial unix "+anime.Ep.Player.SocketPath+": connect: connection refused") {
156+
CurdOut(fmt.Sprintf("Erorr: %v", err))
156157
if runtime.GOOS == "windows" {
157158
fmt.Println("Press Enter to exit")
158159
var wait string

internal/episode_url.go

Lines changed: 55 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,12 @@ type allanimeResponse struct {
2222
} `json:"data"`
2323
}
2424

25+
type result struct {
26+
index int
27+
links []string
28+
err error
29+
}
30+
2531
func decodeProviderID(encoded string) string {
2632
// Split the string into pairs of characters (.. equivalent of 'sed s/../&\n/g')
2733
re := regexp.MustCompile("..")
@@ -152,11 +158,6 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
152158
return nil, err
153159
}
154160

155-
type result struct {
156-
index int
157-
links []string
158-
err error
159-
}
160161

161162
// Pre-count valid URLs and create slice to preserve order
162163
validURLs := make([]string, 0)
@@ -174,11 +175,15 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
174175
results := make(chan result, len(validURLs))
175176
orderedResults := make([][]string, len(validURLs))
176177

178+
// Add a channel for high priority links
179+
highPriorityLink := make(chan []string, 1)
180+
177181
// Create rate limiter
178182
rateLimiter := time.NewTicker(50 * time.Millisecond)
179183
defer rateLimiter.Stop()
180184

181185
// Launch goroutines
186+
remainingURLs := len(validURLs)
182187
for i, sourceUrl := range validURLs {
183188
go func(idx int, url string) {
184189
<-rateLimiter.C // Rate limit the requests
@@ -222,6 +227,21 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
222227
links = append(links, link)
223228
}
224229

230+
// Check if any of the extracted links are high priority
231+
for _, link := range links {
232+
for _, domain := range LinkPriorities[:3] { // Check only top 3 priority domains
233+
if strings.Contains(link, domain) {
234+
// Found high priority link, send it immediately
235+
select {
236+
case highPriorityLink <- []string{link}:
237+
default:
238+
// Channel already has a high priority link
239+
}
240+
break
241+
}
242+
}
243+
}
244+
225245
results <- result{
226246
index: idx,
227247
links: links,
@@ -234,6 +254,17 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
234254
var collectedErrors []error
235255
successCount := 0
236256

257+
// First, try to get a high priority link
258+
select {
259+
case links := <-highPriorityLink:
260+
// Continue extracting other links in background
261+
go collectRemainingResults(results, orderedResults, &successCount, &collectedErrors, remainingURLs)
262+
return links, nil
263+
case <-time.After(2 * time.Second): // Wait only briefly for high priority link
264+
// No high priority link found quickly, proceed with normal collection
265+
}
266+
267+
// Continue with existing result collection logic
237268
// Collect results maintaining order
238269
for successCount < len(validURLs) {
239270
select {
@@ -270,6 +301,25 @@ func GetEpisodeURL(config CurdConfig, id string, epNo int) ([]string, error) {
270301
return allLinks, nil
271302
}
272303

304+
// Helper function to collect remaining results in background
305+
func collectRemainingResults(results chan result, orderedResults [][]string, successCount *int, collectedErrors *[]error, remainingURLs int) {
306+
for *successCount < remainingURLs {
307+
select {
308+
case res := <-results:
309+
if res.err != nil {
310+
Log(fmt.Sprintf("Error processing URL %d: %v", res.index+1, res.err))
311+
*collectedErrors = append(*collectedErrors, fmt.Errorf("URL %d: %w", res.index+1, res.err))
312+
} else {
313+
orderedResults[res.index] = res.links
314+
*successCount++
315+
Log(fmt.Sprintf("Successfully processed URL %d/%d", res.index+1, remainingURLs))
316+
}
317+
case <-time.After(10 * time.Second):
318+
return
319+
}
320+
}
321+
}
322+
273323
// converts the ordered slice of link slices into a single slice
274324
func flattenResults(results [][]string) []string {
275325
var totalLen int

0 commit comments

Comments
 (0)