Skip to content

Commit

Permalink
feat(LocalFeedRepository): throttle feed extraction
Browse files Browse the repository at this point in the history
Throttles the local feed extration in-order to avoid rate-limiting.
This is done, similary to NewPipe, by introducting an articificial
delay every 50 fetched feeds of on average 1 second. These values
may have to be changed in the future.

Ref: #6941 (comment)
  • Loading branch information
FineFindus committed Jan 11, 2025
1 parent 755f24b commit d241f30
Showing 1 changed file with 17 additions and 0 deletions.
17 changes: 17 additions & 0 deletions app/src/main/java/com/github/libretube/repo/LocalFeedRepository.kt
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,15 @@ import com.github.libretube.extensions.parallelMap
import com.github.libretube.helpers.NewPipeExtractorInstance
import com.github.libretube.helpers.PreferenceHelper
import com.github.libretube.ui.dialogs.ShareDialog.Companion.YOUTUBE_FRONTEND_URL
import kotlinx.coroutines.delay
import org.schabi.newpipe.extractor.channel.ChannelInfo
import org.schabi.newpipe.extractor.channel.tabs.ChannelTabInfo
import org.schabi.newpipe.extractor.channel.tabs.ChannelTabs
import org.schabi.newpipe.extractor.feed.FeedInfo
import org.schabi.newpipe.extractor.stream.StreamInfoItem
import java.time.Duration
import java.time.Instant
import java.util.concurrent.atomic.AtomicInteger

class LocalFeedRepository : FeedRepository {
private val relevantTabs =
Expand Down Expand Up @@ -59,6 +61,7 @@ class LocalFeedRepository : FeedRepository {
}

private suspend fun refreshFeed(channelIds: List<String>, minimumDateMillis: Long) {
val extractionCount = AtomicInteger()
for (channelIdChunk in channelIds.chunked(CHUNK_SIZE)) {
val collectedFeedItems = channelIdChunk.parallelMap { channelId ->
try {
Expand All @@ -70,6 +73,12 @@ class LocalFeedRepository : FeedRepository {
}.filterNotNull().flatten().map(StreamItem::toFeedItem)

DatabaseHolder.Database.feedDao().insertAll(collectedFeedItems)

// throttle feed extraction to avoid rate limiting
val count = extractionCount.getAndIncrement()
if (count != 0 && count % BATCH_SIZE == 0) {
delay(BATCH_DELAY.random())
}
}
}

Expand Down Expand Up @@ -106,6 +115,14 @@ class LocalFeedRepository : FeedRepository {

companion object {
private const val CHUNK_SIZE = 2
/**
* Maximum amount of feeds that should be fetched together, before a delay should be applied.
*/
private const val BATCH_SIZE = 50
/**
* Millisecond delay between two consecutive batches to avoid throttling.
*/
private val BATCH_DELAY = (500L..1500L)
private const val MAX_FEED_AGE_DAYS = 30L // 30 days
}
}

0 comments on commit d241f30

Please sign in to comment.