Skip to content

Commit

Permalink
feat(LocalFeedRepository): throttle feed extraction
Browse files Browse the repository at this point in the history
Throttles the local feed extration in-order to avoid rate-limiting.
This is done, similary to NewPipe, by introducting an articificial
delay every 50 fetched feeds of on average 1 second. These values
may have to be changed in the future.

Ref: #6941 (comment)
  • Loading branch information
FineFindus committed Jan 13, 2025
1 parent 83d257d commit e9cb3d4
Showing 1 changed file with 19 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,15 @@ import com.github.libretube.extensions.toID
import com.github.libretube.helpers.NewPipeExtractorInstance
import com.github.libretube.helpers.PreferenceHelper
import com.github.libretube.ui.dialogs.ShareDialog.Companion.YOUTUBE_FRONTEND_URL
import kotlinx.coroutines.delay
import org.schabi.newpipe.extractor.channel.ChannelInfo
import org.schabi.newpipe.extractor.channel.tabs.ChannelTabInfo
import org.schabi.newpipe.extractor.channel.tabs.ChannelTabs
import org.schabi.newpipe.extractor.feed.FeedInfo
import org.schabi.newpipe.extractor.stream.StreamInfoItem
import java.time.Duration
import java.time.Instant
import java.util.concurrent.atomic.AtomicInteger

class LocalFeedRepository : FeedRepository {
private val relevantTabs =
Expand Down Expand Up @@ -60,14 +62,22 @@ class LocalFeedRepository : FeedRepository {
}

private suspend fun refreshFeed(channelIds: List<String>, minimumDateMillis: Long) {
val extractionCount = AtomicInteger()
for (channelIdChunk in channelIds.chunked(CHUNK_SIZE)) {
val collectedFeedItems = channelIdChunk.parallelMap { channelId ->
try {
val feed = try {
getRelatedStreams(channelId, minimumDateMillis)
} catch (e: Exception) {
Log.e(channelId, e.stackTraceToString())
null
}
// throttle feed extraction to avoid rate limiting
val count = extractionCount.getAndIncrement()
if (count != 0 && count >= BATCH_SIZE) {
delay(BATCH_DELAY.random())
extractionCount.set(0)
}
feed
}.filterNotNull().flatten().map(StreamItem::toFeedItem)

DatabaseHolder.Database.feedDao().insertAll(collectedFeedItems)
Expand Down Expand Up @@ -114,6 +124,14 @@ class LocalFeedRepository : FeedRepository {

companion object {
private const val CHUNK_SIZE = 2
/**
* Maximum amount of feeds that should be fetched together, before a delay should be applied.
*/
private const val BATCH_SIZE = 50
/**
* Millisecond delay between two consecutive batches to avoid throttling.
*/
private val BATCH_DELAY = (500L..1500L)
private const val MAX_FEED_AGE_DAYS = 30L // 30 days
}
}

0 comments on commit e9cb3d4

Please sign in to comment.