feat: import firefish renote and reply from export, import self-reply from mastodon export

This commit is contained in:
老周部落 2024-03-02 14:42:20 +08:00
parent 64581d2088
commit e4f9902945
No known key found for this signature in database
GPG Key ID: C72181CD85C6B738
8 changed files with 169 additions and 19 deletions

View File

@ -74,6 +74,34 @@ mentions: "Mentions"
directNotes: "Direct messages"
cw: "Content warning"
importAndExport: "Import/Export Data"
importAndExportWarn: "The Import/Export Data feature is an experimental feature and
implementation may change at any time without prior notice.\n
Due to differences in the exported data of different software versions, the actual
conditions of the import program, and the server health of the exported data link,
the imported data may be incomplete or the access permissions may not be set
correctly (for example, there is no access permission mark in the
Mastodon/Akkoma/Pleroma exported data, so all posts makes public after import),
so please be sure to check the imported data carefully integrity and configure
the correct access permissions for it."
importAndExportInfo: "Since some data cannot be obtained after the original account is
frozen or the original server goes offline, it is strongly recommendedthat you import
the data before the original account is frozen (migrated, logged out) or the original
server goes offline.\n
If the original account is frozen or the original server is offline but you have the
original images, you can try uploading them to the network disk before importing the
data, which may help with data import.\n
Since some data is obtained from its server using your current account when importing
data, data that the current account does not have permission to access will be regarded
as broken. Please make adjustments including but not limited to access permissions,
Manually following accounts and other methods allow the current account to obtain
relevant data, so that the import program can normally obtain the data it needs to
obtain to help you import.\n
Since it is impossible to confirm whether the broken link content posted by someone other
than you is posted by him/her, if there is broken link content posted by others in the
discussion thread, the related content and subsequent replies will not be imported.\n
Since data import is greatly affected by network communication, it is recommended that you
pay attention to data recovery after a period of time. If the data is still not restored,
you can try importing the same backup file again and try again."
import: "Import"
export: "Export"
files: "Files"

View File

@ -61,6 +61,16 @@ mention: "提及"
mentions: "提及"
directNotes: "私信"
importAndExport: "导入 / 导出数据"
importAndExportWarn: "导入 / 导出数据功能是一项实验性功能,实现可能会随时变化而无预先通知。\n
由于不同软件不同版本的导出数据、导入程序实际情况以及导出数据链接的服务器运行状况不同,导入的数据可能会不完整或未被正确设置访问权限
(例如 Mastodon/Akkoma/Pleroma 导出数据内无访问权限标记,因此所有帖子导入后均为公开状态),因此请务必谨慎核对导入数据的完整性,
并为其配置正确的访问权限。"
importAndExportInfo: "由于原账号冻结或者原服务器下线后部分数据无法获取,因此强烈建议您在原账号冻结(迁移、注销)或原服务器下线前导入数据。\n
在原账号冻结或者原服务器下线但您拥有原始图片的情况下,可以尝试在导入数据之前将其上传到网盘上,可能对数据导入有所帮助。\n
由于导入数据时部分数据是使用您当前账号到其服务器上获取,因此当前账号无权访问的数据会视为断链。请通过包括但不限于访问权限调整、
手动关注账户等方式让当前帐号可以获取到相关数据,以便导入程序能够正常获取到需要获取的数据从而帮助您进行导入。\n
由于无法确认非您本人发表的断链内容的是否由其本人发表,因此如果讨论串内有其他人发表的断链内容,则相关内容以及后续回复不会被导入。\n
由于数据导入受网络通信影响较大,因此建议您一段时间之后再关注数据恢复情况。如果数据仍未恢复可以尝试再次导入同样的备份文件重试一次。"
import: "导入"
export: "导出"
files: "文件"

View File

@ -337,6 +337,7 @@ export function createImportMastoPostJob(
user: ThinUser,
post: any,
signatureCheck: boolean,
parent: Note | null = null,
) {
return dbQueue.add(
"importMastoPost",
@ -344,6 +345,7 @@ export function createImportMastoPostJob(
user: user,
post: post,
signatureCheck: signatureCheck,
parent: parent,
},
{
removeOnComplete: true,

View File

@ -10,6 +10,7 @@ import type { Note } from "@/models/entities/note.js";
import type { Poll } from "@/models/entities/poll.js";
import type { DbUserJobData } from "@/queue/types.js";
import { createTemp } from "@/misc/create-temp.js";
import config from "@/config/index.js";
const logger = queueLogger.createSubLogger("export-notes");
@ -129,5 +130,6 @@ async function serialize(
visibility: note.visibility,
visibleUserIds: note.visibleUserIds,
localOnly: note.localOnly,
objectUrl: `${config.url}/notes/${note.id}`,
};
}

View File

@ -1,12 +1,14 @@
import * as Post from "@/misc/post.js";
import create from "@/services/note/create.js";
import { Users } from "@/models/index.js";
import Resolver from "@/remote/activitypub/resolver.js";
import { DriveFiles, Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
import type Bull from "bull";
import { createImportCkPostJob } from "@/queue/index.js";
import { resolveNote } from "@/remote/activitypub/models/note.js";
import { Notes, NoteEdits } from "@/models/index.js";
import type { Note } from "@/models/entities/note.js";
import { genId } from "backend-rs";
@ -23,20 +25,37 @@ export async function importCkPost(
return;
}
const post = job.data.post;
/*
if (post.replyId != null) {
done();
return;
const parent = job.data.parent;
const isRenote = post.renoteId !== null;
let reply: Note | null = null;
let renote: Note | null = null;
job.progress(20);
if (!isRenote && post.replyId !== null) {
if (
!parent &&
typeof post.objectUrl !== "undefined" &&
post.objectUrl !== null
) {
const resolver = new Resolver();
const originalNote = await resolver.resolve(post.objectUrl);
reply = await resolveNote(originalNote.inReplyTo);
} else {
reply = post.replyId !== null ? parent : null;
}
}
if (post.renoteId != null) {
done();
return;
// renote also need resolve original note
if (
isRenote &&
!parent &&
typeof post.objectUrl !== "undefined" &&
post.objectUrl !== null
) {
const resolver = new Resolver();
const originalNote = await resolver.resolve(post.objectUrl);
renote = await resolveNote(originalNote.quoteUrl);
} else {
renote = isRenote ? parent : null;
}
if (post.visibility !== "public") {
done();
return;
}
*/
const urls = (post.files || [])
.map((x: any) => x.url)
.filter((x: String) => x.startsWith("http"));
@ -49,7 +68,17 @@ export async function importCkPost(
});
files.push(file);
} catch (e) {
logger.error(`Skipped adding file to drive: ${url}`);
// try to get the same md5 file from user drive
const md5 = post.files.map((x: any) => x.url).find(url).md5;
const much = await DriveFiles.findOneBy({
md5: md5,
userId: user.id,
});
if (much) {
files.push(much);
} else {
logger.error(`Skipped adding file to drive: ${url}`);
}
}
}
const { text, cw, localOnly, createdAt, visibility } = Post.parse(post);
@ -79,8 +108,8 @@ export async function importCkPost(
files: files.length == 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply: post.replyId ? job.data.parent : null,
renote: post.renoteId ? job.data.parent : null,
reply,
renote,
cw: cw,
localOnly,
visibility: visibility,

View File

@ -10,6 +10,7 @@ import type { DriveFile } from "@/models/entities/drive-file.js";
import { Notes, NoteEdits } from "@/models/index.js";
import type { Note } from "@/models/entities/note.js";
import { genId } from "backend-rs";
import { createImportMastoPostJob } from "@/queue/index.js";
const logger = queueLogger.createSubLogger("import-masto-post");
@ -23,12 +24,17 @@ export async function importMastoPost(
return;
}
const post = job.data.post;
const parent = job.data.parent;
const isRenote = post.type === "Announce";
let reply: Note | null = null;
let renote: Note | null = null;
job.progress(20);
if (!isRenote && post.object.inReplyTo != null) {
reply = await resolveNote(post.object.inReplyTo);
if (parent == null) {
reply = await resolveNote(post.object.inReplyTo);
} else {
reply = parent;
}
}
// renote also need resolve original note
if (isRenote) {
@ -126,4 +132,14 @@ export async function importMastoPost(
done();
logger.succ("Imported");
if (post.childNotes) {
for (const child of post.childNotes) {
createImportMastoPostJob(
job.data.user,
child,
job.data.signatureCheck,
note,
);
}
}
}

View File

@ -40,7 +40,10 @@ export async function importPosts(
file.url,
job.data.user.id,
);
for (const post of outbox.orderedItems) {
logger.info("Parsing mastodon style posts");
const arr = recreateChainForMastodon(outbox.orderedItems);
logger.debug(JSON.stringify(arr, null, 2));
for (const post of arr) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
}
} catch (e) {
@ -59,12 +62,15 @@ export async function importPosts(
if (parsed instanceof Array) {
logger.info("Parsing key style posts");
const arr = recreateChain(parsed);
logger.debug(JSON.stringify(arr, null, 2));
for (const post of arr) {
createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
}
} else if (parsed instanceof Object) {
logger.info("Parsing animal style posts");
for (const post of parsed.orderedItems) {
const arr = recreateChainForMastodon(parsed.orderedItems);
logger.debug(JSON.stringify(arr, null, 2));
for (const post of arr) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
}
}
@ -94,9 +100,56 @@ function recreateChain(arr: any[]): any {
let parent = null;
if (note.replyId != null) {
parent = lookup[`${note.replyId}`];
// Accept URL, let import process to resolveNote
if (
!parent &&
typeof note.objectUrl !== "undefined" &&
note.objectUrl.startsWith("http")
) {
notesTree.push(note);
}
}
if (note.renoteId != null) {
parent = lookup[`${note.renoteId}`];
// Accept URL, let import process to resolveNote
if (
!parent &&
typeof note.objectUrl !== "undefined" &&
note.objectUrl.startsWith("http")
) {
notesTree.push(note);
}
}
if (parent) {
parent.childNotes.push(note);
}
}
return notesTree;
}
function recreateChainForMastodon(arr: any[]): any {
type NotesMap = {
[id: string]: any;
};
const notesTree: any[] = [];
const lookup: NotesMap = {};
for (const note of arr) {
lookup[`${note.id}`] = note;
note.childNotes = [];
if (note.object.inReplyTo == null) {
notesTree.push(note);
}
}
for (const note of arr) {
let parent = null;
if (note.object.inReplyTo != null) {
const inReplyToIdForLookup = `${note.object.inReplyTo}/activity`;
parent = lookup[`${inReplyToIdForLookup}`];
// Accept URL, let import process to resolveNote
if (!parent && note.object.inReplyTo.startsWith("http")) {
notesTree.push(note);
}
}
if (parent) {

View File

@ -1,5 +1,14 @@
<template>
<div class="_formRoot">
<FormSection>
<template #label>{{ i18n.ts.importAndExport }}</template>
<FormInfo warn class="_formBlock">{{
i18n.ts.importAndExportWarn
}}</FormInfo>
<FormInfo class="_formBlock">{{
i18n.ts.importAndExportInfo
}}</FormInfo>
</FormSection>
<FormSection>
<template #label>{{ i18n.ts._exportOrImport.allNotes }}</template>
<FormFolder>
@ -177,6 +186,7 @@
<script lang="ts" setup>
import { ref } from "vue";
import FormInfo from "@/components/MkInfo.vue";
import MkButton from "@/components/MkButton.vue";
import FormSection from "@/components/form/section.vue";
import FormFolder from "@/components/form/folder.vue";