2018-02-21 22:21:32 +00:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
require 'zip'
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
class BackupService < BaseService
|
2019-12-18 15:55:21 +00:00
|
|
|
include Payloadable
|
2023-05-02 13:09:43 +00:00
|
|
|
include ContextHelper
|
2019-12-18 15:55:21 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
attr_reader :account, :backup
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
def call(backup)
|
|
|
|
@backup = backup
|
|
|
|
@account = backup.user.account
|
|
|
|
|
|
|
|
build_archive!
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def build_outbox_json!(file)
|
|
|
|
skeleton = serialize(collection_presenter, ActivityPub::CollectionSerializer)
|
|
|
|
skeleton[:@context] = full_context
|
|
|
|
skeleton[:orderedItems] = ['!PLACEHOLDER!']
|
|
|
|
skeleton = Oj.dump(skeleton)
|
|
|
|
prepend, append = skeleton.split('"!PLACEHOLDER!"')
|
|
|
|
add_comma = false
|
|
|
|
|
|
|
|
file.write(prepend)
|
2018-02-21 22:21:32 +00:00
|
|
|
|
2018-08-21 10:25:50 +00:00
|
|
|
account.statuses.with_includes.reorder(nil).find_in_batches do |statuses|
|
2023-05-02 13:09:43 +00:00
|
|
|
file.write(',') if add_comma
|
|
|
|
add_comma = true
|
|
|
|
|
|
|
|
file.write(statuses.map do |status|
|
Merge commit 'f877aa9d70d0d600961989b8e97c0e0ce3ac1db6' into glitch-soc/merge-upstream
Conflicts:
- `.github/dependabot.yml`:
Upstream made changes, but we had removed it.
Discarded upstream changes.
- `.rubocop_todo.yml`:
Upstream regenerated the file, we had some glitch-soc-specific ignores.
- `app/models/account_statuses_filter.rb`:
Minor upstream code style change where glitch-soc had slightly different code
due to handling of local-only posts.
Updated to match upstream's code style.
- `app/models/status.rb`:
Upstream moved ActiveRecord callback definitions, glitch-soc had an extra one.
Moved the definitions as upstream did.
- `app/services/backup_service.rb`:
Upstream rewrote a lot of the backup service, glitch-soc had changes because
of exporting local-only posts.
Took upstream changes and added back code to deal with local-only posts.
- `config/routes.rb`:
Upstream split the file into different files, while glitch-soc had a few
extra routes.
Extra routes added to `config/routes/settings.rb`, `config/routes/api.rb`
and `config/routes/admin.rb`
- `db/schema.rb`:
Upstream has new migrations, while glitch-soc had an extra migration.
Updated the expected serial number to match upstream's.
- `lib/mastodon/version.rb`:
Upstream added support to set version tags from environment variables, while
glitch-soc has an extra `+glitch` tag.
Changed the code to support upstream's feature but prepending a `+glitch`.
- `spec/lib/activitypub/activity/create_spec.rb`:
Minor code style change upstream, while glitch-soc has extra tests due to
`directMessage` handling.
Applied upstream's changes while keeping glitch-soc's extra tests.
- `spec/models/concerns/account_interactions_spec.rb`:
Minor code style change upstream, while glitch-soc has extra tests.
Applied upstream's changes while keeping glitch-soc's extra tests.
2023-05-08 17:05:55 +00:00
|
|
|
item = serialize_payload(ActivityPub::ActivityPresenter.from_status(status), ActivityPub::ActivitySerializer, allow_local_only: true)
|
2023-05-02 13:09:43 +00:00
|
|
|
item.delete('@context')
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
unless item[:type] == 'Announce' || item[:object][:attachment].blank?
|
|
|
|
item[:object][:attachment].each do |attachment|
|
2023-05-02 13:09:43 +00:00
|
|
|
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.delete_prefix('/system/')
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
Oj.dump(item)
|
|
|
|
end.join(','))
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
2023-05-02 13:09:43 +00:00
|
|
|
|
|
|
|
file.write(append)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def build_archive!
|
2023-05-02 13:09:43 +00:00
|
|
|
tmp_file = Tempfile.new(%w(archive .zip))
|
|
|
|
|
|
|
|
Zip::File.open(tmp_file, create: true) do |zipfile|
|
|
|
|
dump_outbox!(zipfile)
|
|
|
|
dump_media_attachments!(zipfile)
|
|
|
|
dump_likes!(zipfile)
|
|
|
|
dump_bookmarks!(zipfile)
|
|
|
|
dump_actor!(zipfile)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
archive_filename = "#{['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-')}.zip"
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
|
|
|
|
@backup.processed = true
|
|
|
|
@backup.save!
|
|
|
|
ensure
|
|
|
|
tmp_file.close
|
|
|
|
tmp_file.unlink
|
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def dump_media_attachments!(zipfile)
|
2018-08-21 10:25:50 +00:00
|
|
|
MediaAttachment.attached.where(account: account).reorder(nil).find_in_batches do |media_attachments|
|
2018-02-21 22:21:32 +00:00
|
|
|
media_attachments.each do |m|
|
2023-05-02 13:09:43 +00:00
|
|
|
path = m.file&.path
|
|
|
|
next unless path
|
2020-02-24 20:18:26 +00:00
|
|
|
|
2023-06-06 12:50:51 +00:00
|
|
|
path = path.gsub(%r{\A.*/system/}, '')
|
|
|
|
path = path.gsub(%r{\A/+}, '')
|
2023-05-02 13:09:43 +00:00
|
|
|
download_to_zip(zipfile, m.file, path)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def dump_outbox!(zipfile)
|
|
|
|
zipfile.get_output_stream('outbox.json') do |io|
|
|
|
|
build_outbox_json!(io)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def dump_actor!(zipfile)
|
2018-02-21 22:21:32 +00:00
|
|
|
actor = serialize(account, ActivityPub::ActorSerializer)
|
|
|
|
|
2023-02-22 00:54:36 +00:00
|
|
|
actor[:icon][:url] = "avatar#{File.extname(actor[:icon][:url])}" if actor[:icon]
|
|
|
|
actor[:image][:url] = "header#{File.extname(actor[:image][:url])}" if actor[:image]
|
2018-08-22 17:33:10 +00:00
|
|
|
actor[:outbox] = 'outbox.json'
|
|
|
|
actor[:likes] = 'likes.json'
|
2018-08-29 11:06:37 +00:00
|
|
|
actor[:bookmarks] = 'bookmarks.json'
|
2018-02-21 22:21:32 +00:00
|
|
|
|
2023-05-19 09:27:10 +00:00
|
|
|
download_to_zip(zipfile, account.avatar, "avatar#{File.extname(account.avatar.path)}") if account.avatar.exists?
|
|
|
|
download_to_zip(zipfile, account.header, "header#{File.extname(account.header.path)}") if account.header.exists?
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
json = Oj.dump(actor)
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
zipfile.get_output_stream('actor.json') do |io|
|
2018-02-21 22:21:32 +00:00
|
|
|
io.write(json)
|
|
|
|
end
|
2018-08-22 17:33:10 +00:00
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def dump_likes!(zipfile)
|
|
|
|
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'likes.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
|
|
|
|
skeleton.delete(:totalItems)
|
|
|
|
skeleton[:orderedItems] = ['!PLACEHOLDER!']
|
|
|
|
skeleton = Oj.dump(skeleton)
|
|
|
|
prepend, append = skeleton.split('"!PLACEHOLDER!"')
|
2018-08-22 17:33:10 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
zipfile.get_output_stream('likes.json') do |io|
|
|
|
|
io.write(prepend)
|
2018-02-21 22:21:32 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
add_comma = false
|
2018-08-22 17:33:10 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
Status.reorder(nil).joins(:favourites).includes(:account).merge(account.favourites).find_in_batches do |statuses|
|
|
|
|
io.write(',') if add_comma
|
|
|
|
add_comma = true
|
2018-08-22 17:33:10 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
io.write(statuses.map do |status|
|
|
|
|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
|
|
|
|
end.join(','))
|
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
|
|
|
|
io.write(append)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def dump_bookmarks!(zipfile)
|
|
|
|
skeleton = serialize(ActivityPub::CollectionPresenter.new(id: 'bookmarks.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
|
|
|
|
skeleton.delete(:totalItems)
|
|
|
|
skeleton[:orderedItems] = ['!PLACEHOLDER!']
|
|
|
|
skeleton = Oj.dump(skeleton)
|
|
|
|
prepend, append = skeleton.split('"!PLACEHOLDER!"')
|
2018-08-29 11:06:37 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
zipfile.get_output_stream('bookmarks.json') do |io|
|
|
|
|
io.write(prepend)
|
2018-08-29 11:06:37 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
add_comma = false
|
|
|
|
Status.reorder(nil).joins(:bookmarks).includes(:account).merge(account.bookmarks).find_in_batches do |statuses|
|
|
|
|
io.write(',') if add_comma
|
|
|
|
add_comma = true
|
2018-08-29 11:06:37 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
io.write(statuses.map do |status|
|
|
|
|
Oj.dump(ActivityPub::TagManager.instance.uri_for(status))
|
|
|
|
end.join(','))
|
2018-08-29 11:06:37 +00:00
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
|
|
|
|
io.write(append)
|
2018-08-29 11:06:37 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-21 22:21:32 +00:00
|
|
|
def collection_presenter
|
|
|
|
ActivityPub::CollectionPresenter.new(
|
2018-08-22 17:33:10 +00:00
|
|
|
id: 'outbox.json',
|
2018-02-21 22:21:32 +00:00
|
|
|
type: :ordered,
|
|
|
|
size: account.statuses_count,
|
|
|
|
items: []
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def serialize(object, serializer)
|
|
|
|
ActiveModelSerializers::SerializableResource.new(
|
|
|
|
object,
|
|
|
|
serializer: serializer,
|
2019-07-05 19:34:03 +00:00
|
|
|
adapter: ActivityPub::Adapter,
|
2023-04-09 09:25:30 +00:00
|
|
|
allow_local_only: true
|
2018-02-21 22:21:32 +00:00
|
|
|
).as_json
|
|
|
|
end
|
|
|
|
|
|
|
|
CHUNK_SIZE = 1.megabyte
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
def download_to_zip(zipfile, attachment, filename)
|
2018-02-21 22:21:32 +00:00
|
|
|
adapter = Paperclip.io_adapters.for(attachment)
|
|
|
|
|
2023-05-02 13:09:43 +00:00
|
|
|
zipfile.get_output_stream(filename) do |io|
|
2018-02-21 22:21:32 +00:00
|
|
|
while (buffer = adapter.read(CHUNK_SIZE))
|
|
|
|
io.write(buffer)
|
|
|
|
end
|
|
|
|
end
|
2021-07-21 16:34:39 +00:00
|
|
|
rescue Errno::ENOENT, Seahorse::Client::NetworkingError => e
|
|
|
|
Rails.logger.warn "Could not backup file #{filename}: #{e}"
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|