mastodon/app/services/backup_service.rb

199 lines
5.6 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
require 'zip'
class BackupService < BaseService
include Payloadable
include ContextHelper
attr_reader :account, :backup
def call(backup)
@backup = backup
@account = backup.user.account
build_archive!
end
private
def build_outbox_json!(file)
skeleton = serialize(collection_presenter, ActivityPub::CollectionSerializer)
skeleton[:@context] = full_context
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
add_comma = false
file.write(prepend)
account.statuses.with_includes.reorder(nil).find_in_batches do |statuses|
file.write(',') if add_comma
add_comma = true
file.write(statuses.map do |status|
Merge commit 'f877aa9d70d0d600961989b8e97c0e0ce3ac1db6' into glitch-soc/merge-upstream Conflicts: - `.github/dependabot.yml`: Upstream made changes, but we had removed it. Discarded upstream changes. - `.rubocop_todo.yml`: Upstream regenerated the file, we had some glitch-soc-specific ignores. - `app/models/account_statuses_filter.rb`: Minor upstream code style change where glitch-soc had slightly different code due to handling of local-only posts. Updated to match upstream's code style. - `app/models/status.rb`: Upstream moved ActiveRecord callback definitions, glitch-soc had an extra one. Moved the definitions as upstream did. - `app/services/backup_service.rb`: Upstream rewrote a lot of the backup service, glitch-soc had changes because of exporting local-only posts. Took upstream changes and added back code to deal with local-only posts. - `config/routes.rb`: Upstream split the file into different files, while glitch-soc had a few extra routes. Extra routes added to `config/routes/settings.rb`, `config/routes/api.rb` and `config/routes/admin.rb` - `db/schema.rb`: Upstream has new migrations, while glitch-soc had an extra migration. Updated the expected serial number to match upstream's. - `lib/mastodon/version.rb`: Upstream added support to set version tags from environment variables, while glitch-soc has an extra `+glitch` tag. Changed the code to support upstream's feature but prepending a `+glitch`. - `spec/lib/activitypub/activity/create_spec.rb`: Minor code style change upstream, while glitch-soc has extra tests due to `directMessage` handling. Applied upstream's changes while keeping glitch-soc's extra tests. - `spec/models/concerns/account_interactions_spec.rb`: Minor code style change upstream, while glitch-soc has extra tests. Applied upstream's changes while keeping glitch-soc's extra tests.
2023-05-08 17:05:55 +00:00
item = serialize_payload(ActivityPub::ActivityPresenter.from_status(status), ActivityPub::ActivitySerializer, allow_local_only: true)
item.delete('@context')
unless item[:type] == 'Announce' || item[:object][:attachment].blank?
item[:object][:attachment].each do |attachment|
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.delete_prefix('/system/')
end
end
Oj.dump(item)
end.join(','))
GC.start
end
file.write(append)
end
def build_archive!
tmp_file = Tempfile.new(%w(archive .zip))
Zip::File.open(tmp_file, create: true) do |zipfile|
dump_outbox!(zipfile)
dump_media_attachments!(zipfile)
dump_likes!(zipfile)
dump_bookmarks!(zipfile)
dump_actor!(zipfile)
end
archive_filename = "#{['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-')}.zip"
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
@backup.processed = true
@backup.save!
ensure
tmp_file.close
tmp_file.unlink
end
def dump_media_attachments!(zipfile)
MediaAttachment.attached.where(account: account).reorder(nil).find_in_batches do |media_attachments|
media_attachments.each do |m|
path = m.file&.path
next unless path
path = path.gsub(/\A.*\/system\//, '')
path = path.gsub(/\A\/+/, '')
download_to_zip(zipfile, m.file, path)
end
GC.start
end
end
def dump_outbox!(zipfile)
zipfile.get_output_stream('outbox.json') do |io|
build_outbox_json!(io)
end
end
def dump_actor!(zipfile)
actor = serialize(account, ActivityPub::ActorSerializer)
actor[:icon][:url] = "avatar#{File.extname(actor[:icon][:url])}" if actor[:icon]
actor[:image][:url] = "header#{File.extname(actor[:image][:url])}" if actor[:image]
actor[:outbox] = 'outbox.json'
actor[:likes] = 'likes.json'
2018-08-29 11:06:37 +00:00
actor[:bookmarks] = 'bookmarks.json'
download_to_zip(tar, account.avatar, "avatar#{File.extname(account.avatar.path)}") if account.avatar.exists?
download_to_zip(tar, account.header, "header#{File.extname(account.header.path)}") if account.header.exists?
json = Oj.dump(actor)
zipfile.get_output_stream('actor.json') do |io|
io.write(json)
end
end
def dump_likes!(zipfile)
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'likes.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
skeleton.delete(:totalItems)
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
zipfile.get_output_stream('likes.json') do |io|
io.write(prepend)
add_comma = false
Status.reorder(nil).joins(:favourites).includes(:account).merge(account.favourites).find_in_batches do |statuses|
io.write(',') if add_comma
add_comma = true
io.write(statuses.map do |status|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
end.join(','))
GC.start
end
io.write(append)
end
end
def dump_bookmarks!(zipfile)
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'bookmarks.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
skeleton.delete(:totalItems)
skeleton[:orderedItems] = ['!PLACEHOLDER!']
skeleton = Oj.dump(skeleton)
prepend, append = skeleton.split('"!PLACEHOLDER!"')
2018-08-29 11:06:37 +00:00
zipfile.get_output_stream('bookmarks.json') do |io|
io.write(prepend)
2018-08-29 11:06:37 +00:00
add_comma = false
Status.reorder(nil).joins(:bookmarks).includes(:account).merge(account.bookmarks).find_in_batches do |statuses|
io.write(',') if add_comma
add_comma = true
2018-08-29 11:06:37 +00:00
io.write(statuses.map do |status|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
end.join(','))
2018-08-29 11:06:37 +00:00
GC.start
end
io.write(append)
2018-08-29 11:06:37 +00:00
end
end
def collection_presenter
ActivityPub::CollectionPresenter.new(
id: 'outbox.json',
type: :ordered,
size: account.statuses_count,
items: []
)
end
def serialize(object, serializer)
ActiveModelSerializers::SerializableResource.new(
object,
serializer: serializer,
adapter: ActivityPub::Adapter,
allow_local_only: true
).as_json
end
CHUNK_SIZE = 1.megabyte
def download_to_zip(zipfile, attachment, filename)
adapter = Paperclip.io_adapters.for(attachment)
zipfile.get_output_stream(filename) do |io|
while (buffer = adapter.read(CHUNK_SIZE))
io.write(buffer)
end
end
rescue Errno::ENOENT, Seahorse::Client::NetworkingError => e
Rails.logger.warn "Could not backup file #{filename}: #{e}"
end
end