Change indexing jobs to use database replica (#26692)

This commit is contained in:
Eugen Rochko 2023-08-27 22:38:01 +02:00 committed by GitHub
parent 5694e24bbf
commit e263db276f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 13 additions and 4 deletions

View file

@ -2,15 +2,20 @@
class AddToPublicStatusesIndexWorker class AddToPublicStatusesIndexWorker
include Sidekiq::Worker include Sidekiq::Worker
include DatabaseHelper
sidekiq_options queue: 'pull' sidekiq_options queue: 'pull'
def perform(account_id) def perform(account_id)
account = Account.find(account_id) with_primary do
@account = Account.find(account_id)
end
return unless account.indexable? return unless @account.indexable?
account.add_to_public_statuses_index! with_read_replica do
@account.add_to_public_statuses_index!
end
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
true true
end end

View file

@ -3,6 +3,7 @@
class Scheduler::IndexingScheduler class Scheduler::IndexingScheduler
include Sidekiq::Worker include Sidekiq::Worker
include Redisable include Redisable
include DatabaseHelper
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
@ -15,7 +16,10 @@ class Scheduler::IndexingScheduler
indexes.each do |type| indexes.each do |type|
with_redis do |redis| with_redis do |redis|
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids| redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
type.import!(ids) with_read_replica do
type.import!(ids)
end
redis.srem("chewy:queue:#{type.name}", ids) redis.srem("chewy:queue:#{type.name}", ids)
end end
end end