Compare commits

...

25 Commits

Author SHA1 Message Date
Nyan Helsing ec2f807ae7 Merge branch 'ft/use-env-for-dburls' into 'develop'
Use env config for db

Co-authored-by: naskya <m@naskya.net>
Co-authored-by: deathg.rip <josh@deathg.rip>

See merge request firefish/firefish!10706
2024-04-21 20:31:59 +00:00
naskya ce672f4edd
dev: add cargo test to pnpm scripts
mocha test has been unmaintained for a long time and is very broken :(
2024-04-21 22:36:05 +09:00
naskya 131b3686d4 Merge branch 'feat/drive-file-usage-hints' into 'develop'
feat: Add usageHint field to DriveFile

Co-authored-by: yumeko <yumeko@mainichi.social>

See merge request firefish/firefish!10750
2024-04-21 12:58:37 +00:00
naskya 6b008c651a
chore (backend): remove (technically) incorrect TypeORM decorator field 2024-04-21 11:09:18 +09:00
naskya d2dbfb37c7
chore (backend): reflect entity changes to the schema and repository 2024-04-21 10:59:02 +09:00
naskya 96481f1353
chore: update downgrade.sql 2024-04-21 10:48:31 +09:00
naskya c936102a4c
chore (backend-rs): regenerate entities and index.js/d.ts 2024-04-21 10:45:47 +09:00
naskya 43570a54aa
chore: format 2024-04-21 10:44:54 +09:00
naskya 4d34e14dd8
Merge branch 'develop' into feat/drive-file-usage-hints 2024-04-21 10:42:25 +09:00
naskya 28f7ac1acd
fix (backend): typo 2024-04-21 10:31:00 +09:00
naskya 9f3396af21
chore (backend): translate Japanese comments into English 2024-04-21 10:30:13 +09:00
naskya dac4043dd9
v20240421 2024-04-21 10:09:45 +09:00
naskya d1e898c0d0
docs: update changelog 2024-04-21 09:32:05 +09:00
mei23 dc02a07774
fix (backend): add Cache-Control to Bull Dashboard 2024-04-21 09:29:00 +09:00
naskya 2760e7feee
chore (minor): use ** in lieu of Math.pow 2024-04-21 06:40:53 +09:00
yumeko 6c46bb56fd
Switch DriveFile's usageHint field to an enum type 2024-04-19 18:24:48 +03:00
yumeko 968657d26e
Run format 2024-04-19 07:54:11 +03:00
yumeko 913de651db
When updating (remote) user avatar/banner, clear usageHint for the previous drivefile, if any 2024-04-19 07:25:42 +03:00
yumeko 4aeb0d95cc
Add DriveFile usageHint field to rust model as well 2024-04-19 07:03:09 +03:00
yumeko c0f93de94b
Set file usage hints on local avatar/banner uploads as well + export "valid" values as type 2024-04-19 06:29:28 +03:00
yumeko 4823abd3a9
Add usageHint field to DriveFile, and fill accordingly when operating on Persons 2024-04-19 03:41:36 +03:00
naskya bc3a773cae
Merge branch 'develop' into ft/use-env-for-dburls 2024-03-29 06:50:43 +09:00
deathg.rip 9f84a40e0b refine dockerignore 2024-03-23 16:40:29 -04:00
deathg.rip be1b861122 run linter 2024-03-23 14:20:05 -04:00
deathg.rip dad0365d08 Use env config for db 2024-03-23 14:07:34 -04:00
26 changed files with 249 additions and 68 deletions

View File

@ -16,7 +16,12 @@ report.*.json
coverage coverage
# config # config
/.config /.config/LICENSE
/.config/*.env
/.config/ci.yml
/.config/devenv.yml
/.config/example.yml
/.config/helm_values_example.yml
# misskey # misskey
built built

View File

@ -5,6 +5,10 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well. - Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well. - Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits)
- Fix bugs
## [v20240413](https://firefish.dev/firefish/firefish/-/merge_requests/10741/commits) ## [v20240413](https://firefish.dev/firefish/firefish/-/merge_requests/10741/commits)
- Add "Media" tab to user page - Add "Media" tab to user page

View File

@ -1,6 +1,7 @@
BEGIN; BEGIN;
DELETE FROM "migrations" WHERE name IN ( DELETE FROM "migrations" WHERE name IN (
'AddDriveFileUsage1713451569342',
'ConvertCwVarcharToText1713225866247', 'ConvertCwVarcharToText1713225866247',
'FixChatFileConstraint1712855579316', 'FixChatFileConstraint1712855579316',
'DropTimeZone1712425488543', 'DropTimeZone1712425488543',
@ -23,7 +24,11 @@ DELETE FROM "migrations" WHERE name IN (
'RemoveNativeUtilsMigration1705877093218' 'RemoveNativeUtilsMigration1705877093218'
); );
--convert-cw-varchar-to-text -- AddDriveFileUsage
ALTER TABLE "drive_file" DROP COLUMN "usageHint";
DROP TYPE "drive_file_usage_hint_enum";
-- convert-cw-varchar-to-text
DROP INDEX "IDX_8e3bbbeb3df04d1a8105da4c8f"; DROP INDEX "IDX_8e3bbbeb3df04d1a8105da4c8f";
ALTER TABLE "note" ALTER COLUMN "cw" TYPE character varying(512); ALTER TABLE "note" ALTER COLUMN "cw" TYPE character varying(512);
CREATE INDEX "IDX_8e3bbbeb3df04d1a8105da4c8f" ON "note" USING "pgroonga" ("cw" pgroonga_varchar_full_text_search_ops_v2); CREATE INDEX "IDX_8e3bbbeb3df04d1a8105da4c8f" ON "note" USING "pgroonga" ("cw" pgroonga_varchar_full_text_search_ops_v2);

22
fly.toml Normal file
View File

@ -0,0 +1,22 @@
# fly.toml app configuration file generated for infinite-jetzt-firefish on 2024-03-21T21:20:45-04:00
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#
app = 'example-fly-firefish'
primary_region = 'bos'
[build]
[http_service]
internal_port = 3000
force_https = true
auto_stop_machines = true
auto_start_machines = true
min_machines_running = 0
processes = ['firefish']
[[vm]]
memory = '1gb'
cpu_kind = 'shared'
cpus = 1

View File

@ -1,6 +1,6 @@
{ {
"name": "firefish", "name": "firefish",
"version": "20240413", "version": "20240421",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://firefish.dev/firefish/firefish.git" "url": "https://firefish.dev/firefish/firefish.git"
@ -26,7 +26,9 @@
"debug": "pnpm run build:debug && pnpm run start", "debug": "pnpm run build:debug && pnpm run start",
"build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run gulp", "build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run gulp",
"mocha": "pnpm --filter backend run mocha", "mocha": "pnpm --filter backend run mocha",
"test": "pnpm run mocha", "test": "pnpm run test:ts && pnpm run test:rs",
"test:ts": "pnpm run mocha",
"test:rs": "cargo test",
"format": "pnpm run format:ts; pnpm run format:rs", "format": "pnpm run format:ts; pnpm run format:rs",
"format:ts": "pnpm -r --parallel run format", "format:ts": "pnpm -r --parallel run format",
"format:rs": "cargo fmt --all --", "format:rs": "cargo fmt --all --",

View File

@ -348,6 +348,7 @@ export interface DriveFile {
webpublicType: string | null webpublicType: string | null
requestHeaders: Json | null requestHeaders: Json | null
requestIp: string | null requestIp: string | null
usageHint: DriveFileUsageHintEnum | null
} }
export interface DriveFolder { export interface DriveFolder {
id: string id: string
@ -780,6 +781,10 @@ export enum AntennaSrcEnum {
List = 'list', List = 'list',
Users = 'users' Users = 'users'
} }
export enum DriveFileUsageHintEnum {
UserAvatar = 'userAvatar',
UserBanner = 'userBanner'
}
export enum MutedNoteReasonEnum { export enum MutedNoteReasonEnum {
Manual = 'manual', Manual = 'manual',
Other = 'other', Other = 'other',

View File

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`) throw new Error(`Failed to load native binding`)
} }
const { readEnvironmentConfig, readServerConfig, stringToAcct, acctToString, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, AntennaSrcEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initIdGenerator, getTimestamp, genId, secureRndstr } = nativeBinding const { readEnvironmentConfig, readServerConfig, stringToAcct, acctToString, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initIdGenerator, getTimestamp, genId, secureRndstr } = nativeBinding
module.exports.readEnvironmentConfig = readEnvironmentConfig module.exports.readEnvironmentConfig = readEnvironmentConfig
module.exports.readServerConfig = readServerConfig module.exports.readServerConfig = readServerConfig
@ -339,6 +339,7 @@ module.exports.decodeReaction = decodeReaction
module.exports.countReactions = countReactions module.exports.countReactions = countReactions
module.exports.toDbReaction = toDbReaction module.exports.toDbReaction = toDbReaction
module.exports.AntennaSrcEnum = AntennaSrcEnum module.exports.AntennaSrcEnum = AntennaSrcEnum
module.exports.DriveFileUsageHintEnum = DriveFileUsageHintEnum
module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum
module.exports.NoteVisibilityEnum = NoteVisibilityEnum module.exports.NoteVisibilityEnum = NoteVisibilityEnum
module.exports.NotificationTypeEnum = NotificationTypeEnum module.exports.NotificationTypeEnum = NotificationTypeEnum

View File

@ -1,5 +1,6 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15 //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
use super::sea_orm_active_enums::DriveFileUsageHintEnum;
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
@ -52,6 +53,8 @@ pub struct Model {
pub request_headers: Option<Json>, pub request_headers: Option<Json>,
#[sea_orm(column_name = "requestIp")] #[sea_orm(column_name = "requestIp")]
pub request_ip: Option<String>, pub request_ip: Option<String>,
#[sea_orm(column_name = "usageHint")]
pub usage_hint: Option<DriveFileUsageHintEnum>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -23,6 +23,20 @@ pub enum AntennaSrcEnum {
#[derive(Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum)] #[derive(Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum)]
#[cfg_attr(not(feature = "napi"), derive(Clone))] #[cfg_attr(not(feature = "napi"), derive(Clone))]
#[cfg_attr(feature = "napi", napi_derive::napi(string_enum = "camelCase"))] #[cfg_attr(feature = "napi", napi_derive::napi(string_enum = "camelCase"))]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "drive_file_usage_hint_enum"
)]
pub enum DriveFileUsageHintEnum {
#[sea_orm(string_value = "userAvatar")]
UserAvatar,
#[sea_orm(string_value = "userBanner")]
UserBanner,
}
#[derive(Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum)]
#[cfg_attr(not(feature = "napi"), derive(Clone))]
#[cfg_attr(feature = "napi", napi_derive::napi(string_enum = "camelCase"))]
#[sea_orm( #[sea_orm(
rs_type = "String", rs_type = "String",
db_type = "Enum", db_type = "Enum",

View File

@ -2,7 +2,9 @@ import { loadConfig } from "./built/config.js";
import { createRedisConnection } from "./built/redis.js"; import { createRedisConnection } from "./built/redis.js";
const config = loadConfig(); const config = loadConfig();
const redis = createRedisConnection(config); const redis = createRedisConnection({
...config,
});
redis.on("connect", () => redis.disconnect()); redis.on("connect", () => redis.disconnect());
redis.on("error", (e) => { redis.on("error", (e) => {

View File

@ -189,11 +189,17 @@ const log = process.env.NODE_ENV !== "production";
export const db = new DataSource({ export const db = new DataSource({
type: "postgres", type: "postgres",
host: config.db.host, ...(process.env.DATABASE_URL
port: config.db.port, ? {
username: config.db.user, url: process.env.DATABASE_URL,
password: config.db.pass, }
database: config.db.db, : {
host: config.db.host,
port: config.db.port,
username: config.db.user,
password: config.db.pass,
database: config.db.db,
}),
extra: { extra: {
statement_timeout: 1000 * 30, statement_timeout: 1000 * 30,
...config.db.extra, ...config.db.extra,
@ -203,16 +209,21 @@ export const db = new DataSource({
cache: !config.db.disableCache cache: !config.db.disableCache
? { ? {
type: "ioredis", type: "ioredis",
options: { port: process.env.REDIS_URL, // typeorm passes "port" as the first argument to ioredis when using the "ioredis" cache driver so we can use it to pass the redis url
host: config.redis.host, options: process.env.REDIS_URL
port: config.redis.port, ? {
family: config.redis.family == null ? 0 : config.redis.family, keyPrefix: `${config.redis.prefix}:query:`,
username: config.redis.user ?? "default", }
password: config.redis.pass, : {
keyPrefix: `${config.redis.prefix}:query:`, host: config.redis.host,
db: config.redis.db || 0, port: config.redis.port,
tls: config.redis.tls, family: config.redis.family == null ? 0 : config.redis.family,
}, username: config.redis.user ?? "default",
password: config.redis.pass,
keyPrefix: `${config.redis.prefix}:query:`,
db: config.redis.db || 0,
tls: config.redis.tls,
},
} }
: false, : false,
logging: log, logging: log,

View File

@ -6,16 +6,18 @@ export function createConnection() {
if (config.cacheServer) { if (config.cacheServer) {
source = config.cacheServer; source = config.cacheServer;
} }
return new Redis({ return new Redis(
port: source.port, process.env.REDIS_URL || {
host: source.host, port: source.port,
family: source.family ?? 0, host: source.host,
password: source.pass, family: source.family ?? 0,
username: source.user ?? "default", password: source.pass,
keyPrefix: `${source.prefix}:`, username: source.user ?? "default",
db: source.db || 0, keyPrefix: `${source.prefix}:`,
tls: source.tls, db: source.db || 0,
}); tls: source.tls,
},
);
} }
export const subscriber = createConnection(); export const subscriber = createConnection();

View File

@ -0,0 +1,17 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class AddDriveFileUsage1713451569342 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TYPE drive_file_usage_hint_enum AS ENUM ('userAvatar', 'userBanner')`,
);
await queryRunner.query(
`ALTER TABLE "drive_file" ADD "usageHint" drive_file_usage_hint_enum DEFAULT NULL`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "drive_file" DROP COLUMN "usageHint"`);
await queryRunner.query(`DROP TYPE drive_file_usage_hint_enum`);
}
}

View File

@ -16,6 +16,8 @@ import { DriveFolder } from "./drive-folder.js";
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js"; import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
import { NoteFile } from "./note-file.js"; import { NoteFile } from "./note-file.js";
export type DriveFileUsageHint = "userAvatar" | "userBanner" | null;
@Entity() @Entity()
@Index(["userId", "folderId", "id"]) @Index(["userId", "folderId", "id"])
export class DriveFile { export class DriveFile {
@ -177,6 +179,14 @@ export class DriveFile {
}) })
public isSensitive: boolean; public isSensitive: boolean;
// Hint for what this file is used for
@Column({
type: "enum",
enum: ["userAvatar", "userBanner"],
nullable: true,
})
public usageHint: DriveFileUsageHint;
/** /**
* ()URLへの直リンクか否か * ()URLへの直リンクか否か
*/ */

View File

@ -152,6 +152,7 @@ export const DriveFileRepository = db.getRepository(DriveFile).extend({
md5: file.md5, md5: file.md5,
size: file.size, size: file.size,
isSensitive: file.isSensitive, isSensitive: file.isSensitive,
usageHint: file.usageHint,
blurhash: file.blurhash, blurhash: file.blurhash,
properties: opts.self ? file.properties : this.getPublicProperties(file), properties: opts.self ? file.properties : this.getPublicProperties(file),
url: opts.self ? file.url : this.getPublicUrl(file, false), url: opts.self ? file.url : this.getPublicUrl(file, false),
@ -193,6 +194,7 @@ export const DriveFileRepository = db.getRepository(DriveFile).extend({
md5: file.md5, md5: file.md5,
size: file.size, size: file.size,
isSensitive: file.isSensitive, isSensitive: file.isSensitive,
usageHint: file.usageHint,
blurhash: file.blurhash, blurhash: file.blurhash,
properties: opts.self ? file.properties : this.getPublicProperties(file), properties: opts.self ? file.properties : this.getPublicProperties(file),
url: opts.self ? file.url : this.getPublicUrl(file, false), url: opts.self ? file.url : this.getPublicUrl(file, false),

View File

@ -44,6 +44,12 @@ export const packedDriveFileSchema = {
optional: false, optional: false,
nullable: false, nullable: false,
}, },
usageHint: {
type: "string",
optional: false,
nullable: true,
enum: ["userAvatar", "userBanner"],
},
blurhash: { blurhash: {
type: "string", type: "string",
optional: false, optional: false,

View File

@ -4,12 +4,18 @@ import { entities } from "./db/postgre.js";
export default new DataSource({ export default new DataSource({
type: "postgres", type: "postgres",
host: config.db.host, ...(process.env.DATABASE_URL
port: config.db.port, ? {
username: config.db.user, url: process.env.DATABASE_URL,
password: config.db.pass, }
database: config.db.db, : {
extra: config.db.extra, host: config.db.host,
port: config.db.port,
username: config.db.user,
password: config.db.pass,
database: config.db.db,
}),
...(config.db && { extra: config.db.extra }),
entities: entities, entities: entities,
migrations: ["built/migration/*.js"], migrations: ["built/migration/*.js"],
}); });

View File

@ -2,16 +2,18 @@ import Bull from "bull";
import config from "@/config/index.js"; import config from "@/config/index.js";
export function initialize<T>(name: string, limitPerSec = -1) { export function initialize<T>(name: string, limitPerSec = -1) {
return new Bull<T>(name, { return new Bull<T>(name, process.env.REDIS_URL, {
redis: { redis: process.env.REDIS_URL
port: config.redis.port, ? undefined
host: config.redis.host, : {
family: config.redis.family == null ? 0 : config.redis.family, port: config.redis.port,
username: config.redis.user ?? "default", host: config.redis.host,
password: config.redis.pass, family: config.redis.family == null ? 0 : config.redis.family,
db: config.redis.db || 0, username: config.redis.user ?? "default",
tls: config.redis.tls, password: config.redis.pass,
}, db: config.redis.db || 0,
tls: config.redis.tls,
},
prefix: config.redis.prefix ? `${config.redis.prefix}:queue` : "queue", prefix: config.redis.prefix ? `${config.redis.prefix}:queue` : "queue",
limiter: limiter:
limitPerSec > 0 limitPerSec > 0
@ -34,7 +36,7 @@ export function initialize<T>(name: string, limitPerSec = -1) {
function apBackoff(attemptsMade: number, err: Error) { function apBackoff(attemptsMade: number, err: Error) {
const baseDelay = 60 * 1000; // 1min const baseDelay = 60 * 1000; // 1min
const maxBackoff = 8 * 60 * 60 * 1000; // 8hours const maxBackoff = 8 * 60 * 60 * 1000; // 8hours
let backoff = (Math.pow(2, attemptsMade) - 1) * baseDelay; let backoff = (2 ** attemptsMade - 1) * baseDelay;
backoff = Math.min(backoff, maxBackoff); backoff = Math.min(backoff, maxBackoff);
backoff += Math.round(backoff * Math.random() * 0.2); backoff += Math.round(backoff * Math.random() * 0.2);
return backoff; return backoff;

View File

@ -3,7 +3,10 @@ import type { CacheableRemoteUser } from "@/models/entities/user.js";
import Resolver from "../resolver.js"; import Resolver from "../resolver.js";
import { fetchMeta } from "backend-rs"; import { fetchMeta } from "backend-rs";
import { apLogger } from "../logger.js"; import { apLogger } from "../logger.js";
import type { DriveFile } from "@/models/entities/drive-file.js"; import type {
DriveFile,
DriveFileUsageHint,
} from "@/models/entities/drive-file.js";
import { DriveFiles } from "@/models/index.js"; import { DriveFiles } from "@/models/index.js";
import { truncate } from "@/misc/truncate.js"; import { truncate } from "@/misc/truncate.js";
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js"; import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
@ -16,6 +19,7 @@ const logger = apLogger;
export async function createImage( export async function createImage(
actor: CacheableRemoteUser, actor: CacheableRemoteUser,
value: any, value: any,
usage: DriveFileUsageHint,
): Promise<DriveFile> { ): Promise<DriveFile> {
// Skip if author is frozen. // Skip if author is frozen.
if (actor.isSuspended) { if (actor.isSuspended) {
@ -43,6 +47,7 @@ export async function createImage(
sensitive: image.sensitive, sensitive: image.sensitive,
isLink: !instance.cacheRemoteFiles, isLink: !instance.cacheRemoteFiles,
comment: truncate(image.name, DB_MAX_IMAGE_COMMENT_LENGTH), comment: truncate(image.name, DB_MAX_IMAGE_COMMENT_LENGTH),
usageHint: usage,
}); });
if (file.isLink) { if (file.isLink) {
@ -73,9 +78,10 @@ export async function createImage(
export async function resolveImage( export async function resolveImage(
actor: CacheableRemoteUser, actor: CacheableRemoteUser,
value: any, value: any,
usage: DriveFileUsageHint,
): Promise<DriveFile> { ): Promise<DriveFile> {
// TODO // TODO
// Fetch from remote server and register // Fetch from remote server and register
return await createImage(actor, value); return await createImage(actor, value, usage);
} }

View File

@ -213,7 +213,8 @@ export async function createNote(
? ( ? (
await Promise.all( await Promise.all(
note.attachment.map( note.attachment.map(
(x) => limit(() => resolveImage(actor, x)) as Promise<DriveFile>, (x) =>
limit(() => resolveImage(actor, x, null)) as Promise<DriveFile>,
), ),
) )
).filter((image) => image != null) ).filter((image) => image != null)
@ -616,7 +617,7 @@ export async function updateNote(value: string | IObject, resolver?: Resolver) {
fileList.map( fileList.map(
(x) => (x) =>
limit(async () => { limit(async () => {
const file = await resolveImage(actor, x); const file = await resolveImage(actor, x, null);
const update: Partial<DriveFile> = {}; const update: Partial<DriveFile> = {};
const altText = truncate(x.name, DB_MAX_IMAGE_COMMENT_LENGTH); const altText = truncate(x.name, DB_MAX_IMAGE_COMMENT_LENGTH);

View File

@ -10,6 +10,7 @@ import {
Followings, Followings,
UserProfiles, UserProfiles,
UserPublickeys, UserPublickeys,
DriveFiles,
} from "@/models/index.js"; } from "@/models/index.js";
import type { IRemoteUser, CacheableUser } from "@/models/entities/user.js"; import type { IRemoteUser, CacheableUser } from "@/models/entities/user.js";
import { User } from "@/models/entities/user.js"; import { User } from "@/models/entities/user.js";
@ -362,10 +363,14 @@ export async function createPerson(
//#region Fetch avatar and header image //#region Fetch avatar and header image
const [avatar, banner] = await Promise.all( const [avatar, banner] = await Promise.all(
[person.icon, person.image].map((img) => [person.icon, person.image].map((img, index) =>
img == null img == null
? Promise.resolve(null) ? Promise.resolve(null)
: resolveImage(user!, img).catch(() => null), : resolveImage(
user,
img,
index === 0 ? "userAvatar" : index === 1 ? "userBanner" : null,
).catch(() => null),
), ),
); );
@ -438,10 +443,14 @@ export async function updatePerson(
// Fetch avatar and header image // Fetch avatar and header image
const [avatar, banner] = await Promise.all( const [avatar, banner] = await Promise.all(
[person.icon, person.image].map((img) => [person.icon, person.image].map((img, index) =>
img == null img == null
? Promise.resolve(null) ? Promise.resolve(null)
: resolveImage(user, img).catch(() => null), : resolveImage(
user,
img,
index === 0 ? "userAvatar" : index === 1 ? "userBanner" : null,
).catch(() => null),
), ),
); );
@ -561,10 +570,14 @@ export async function updatePerson(
} as Partial<User>; } as Partial<User>;
if (avatar) { if (avatar) {
if (user?.avatarId)
await DriveFiles.update(user.avatarId, { usageHint: null });
updates.avatarId = avatar.id; updates.avatarId = avatar.id;
} }
if (banner) { if (banner) {
if (user?.bannerId)
await DriveFiles.update(user.bannerId, { usageHint: null });
updates.bannerId = banner.id; updates.bannerId = banner.id;
} }

View File

@ -13,6 +13,7 @@ import { normalizeForSearch } from "@/misc/normalize-for-search.js";
import { verifyLink } from "@/services/fetch-rel-me.js"; import { verifyLink } from "@/services/fetch-rel-me.js";
import { ApiError } from "@/server/api/error.js"; import { ApiError } from "@/server/api/error.js";
import define from "@/server/api/define.js"; import define from "@/server/api/define.js";
import { DriveFile } from "@/models/entities/drive-file";
export const meta = { export const meta = {
tags: ["account"], tags: ["account"],
@ -241,8 +242,9 @@ export default define(meta, paramDef, async (ps, _user, token) => {
if (ps.emailNotificationTypes !== undefined) if (ps.emailNotificationTypes !== undefined)
profileUpdates.emailNotificationTypes = ps.emailNotificationTypes; profileUpdates.emailNotificationTypes = ps.emailNotificationTypes;
let avatar: DriveFile | null = null;
if (ps.avatarId) { if (ps.avatarId) {
const avatar = await DriveFiles.findOneBy({ id: ps.avatarId }); avatar = await DriveFiles.findOneBy({ id: ps.avatarId });
if (avatar == null || avatar.userId !== user.id) if (avatar == null || avatar.userId !== user.id)
throw new ApiError(meta.errors.noSuchAvatar); throw new ApiError(meta.errors.noSuchAvatar);
@ -250,8 +252,9 @@ export default define(meta, paramDef, async (ps, _user, token) => {
throw new ApiError(meta.errors.avatarNotAnImage); throw new ApiError(meta.errors.avatarNotAnImage);
} }
let banner: DriveFile | null = null;
if (ps.bannerId) { if (ps.bannerId) {
const banner = await DriveFiles.findOneBy({ id: ps.bannerId }); banner = await DriveFiles.findOneBy({ id: ps.bannerId });
if (banner == null || banner.userId !== user.id) if (banner == null || banner.userId !== user.id)
throw new ApiError(meta.errors.noSuchBanner); throw new ApiError(meta.errors.noSuchBanner);
@ -328,6 +331,20 @@ export default define(meta, paramDef, async (ps, _user, token) => {
updateUsertags(user, tags); updateUsertags(user, tags);
//#endregion //#endregion
// Update old/new avatar usage hints
if (avatar) {
if (user.avatarId)
await DriveFiles.update(user.avatarId, { usageHint: null });
await DriveFiles.update(avatar.id, { usageHint: "userAvatar" });
}
// Update old/new banner usage hints
if (banner) {
if (user.bannerId)
await DriveFiles.update(user.bannerId, { usageHint: null });
await DriveFiles.update(banner.id, { usageHint: "userBanner" });
}
if (Object.keys(updates).length > 0) await Users.update(user.id, updates); if (Object.keys(updates).length > 0) await Users.update(user.id, updates);
if (Object.keys(profileUpdates).length > 0) if (Object.keys(profileUpdates).length > 0)
await UserProfiles.update(user.id, profileUpdates); await UserProfiles.update(user.id, profileUpdates);

View File

@ -54,6 +54,10 @@ app.use(async (ctx, next) => {
const url = decodeURI(ctx.path); const url = decodeURI(ctx.path);
if (url === bullBoardPath || url.startsWith(`${bullBoardPath}/`)) { if (url === bullBoardPath || url.startsWith(`${bullBoardPath}/`)) {
if (!url.startsWith(`${bullBoardPath}/static/`)) {
ctx.set("Cache-Control", "private, max-age=0, must-revalidate");
}
const token = ctx.cookies.get("token"); const token = ctx.cookies.get("token");
if (token == null) { if (token == null) {
ctx.status = 401; ctx.status = 401;

View File

@ -16,6 +16,7 @@ import {
UserProfiles, UserProfiles,
} from "@/models/index.js"; } from "@/models/index.js";
import { DriveFile } from "@/models/entities/drive-file.js"; import { DriveFile } from "@/models/entities/drive-file.js";
import type { DriveFileUsageHint } from "@/models/entities/drive-file.js";
import type { IRemoteUser, User } from "@/models/entities/user.js"; import type { IRemoteUser, User } from "@/models/entities/user.js";
import { genId } from "backend-rs"; import { genId } from "backend-rs";
import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js"; import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js";
@ -65,6 +66,7 @@ function urlPathJoin(
* @param type Content-Type for original * @param type Content-Type for original
* @param hash Hash for original * @param hash Hash for original
* @param size Size for original * @param size Size for original
* @param usage Optional usage hint for file (f.e. "userAvatar")
*/ */
async function save( async function save(
file: DriveFile, file: DriveFile,
@ -73,6 +75,7 @@ async function save(
type: string, type: string,
hash: string, hash: string,
size: number, size: number,
usage: DriveFileUsageHint = null,
): Promise<DriveFile> { ): Promise<DriveFile> {
// thunbnail, webpublic を必要なら生成 // thunbnail, webpublic を必要なら生成
const alts = await generateAlts(path, type, !file.uri); const alts = await generateAlts(path, type, !file.uri);
@ -161,6 +164,7 @@ async function save(
file.md5 = hash; file.md5 = hash;
file.size = size; file.size = size;
file.storedInternal = false; file.storedInternal = false;
file.usageHint = usage ?? null;
return await DriveFiles.insert(file).then((x) => return await DriveFiles.insert(file).then((x) =>
DriveFiles.findOneByOrFail(x.identifiers[0]), DriveFiles.findOneByOrFail(x.identifiers[0]),
@ -204,6 +208,7 @@ async function save(
file.type = type; file.type = type;
file.md5 = hash; file.md5 = hash;
file.size = size; file.size = size;
file.usageHint = usage ?? null;
return await DriveFiles.insert(file).then((x) => return await DriveFiles.insert(file).then((x) =>
DriveFiles.findOneByOrFail(x.identifiers[0]), DriveFiles.findOneByOrFail(x.identifiers[0]),
@ -450,6 +455,9 @@ type AddFileArgs = {
requestIp?: string | null; requestIp?: string | null;
requestHeaders?: Record<string, string> | null; requestHeaders?: Record<string, string> | null;
/** Whether this file has a known use case, like user avatar or instance icon */
usageHint?: DriveFileUsageHint;
}; };
/** /**
@ -469,6 +477,7 @@ export async function addFile({
sensitive = null, sensitive = null,
requestIp = null, requestIp = null,
requestHeaders = null, requestHeaders = null,
usageHint = null,
}: AddFileArgs): Promise<DriveFile> { }: AddFileArgs): Promise<DriveFile> {
const info = await getFileInfo(path); const info = await getFileInfo(path);
logger.info(`${JSON.stringify(info)}`); logger.info(`${JSON.stringify(info)}`);
@ -581,6 +590,7 @@ export async function addFile({
file.isLink = isLink; file.isLink = isLink;
file.requestIp = requestIp; file.requestIp = requestIp;
file.requestHeaders = requestHeaders; file.requestHeaders = requestHeaders;
file.usageHint = usageHint;
file.isSensitive = user file.isSensitive = user
? Users.isLocalUser(user) && ? Users.isLocalUser(user) &&
(instance!.markLocalFilesNsfwByDefault || profile!.alwaysMarkNsfw) (instance!.markLocalFilesNsfwByDefault || profile!.alwaysMarkNsfw)
@ -639,6 +649,7 @@ export async function addFile({
info.type.mime, info.type.mime,
info.md5, info.md5,
info.size, info.size,
usageHint,
); );
} }

View File

@ -3,7 +3,10 @@ import type { User } from "@/models/entities/user.js";
import { createTemp } from "@/misc/create-temp.js"; import { createTemp } from "@/misc/create-temp.js";
import { downloadUrl, isPrivateIp } from "@/misc/download-url.js"; import { downloadUrl, isPrivateIp } from "@/misc/download-url.js";
import type { DriveFolder } from "@/models/entities/drive-folder.js"; import type { DriveFolder } from "@/models/entities/drive-folder.js";
import type { DriveFile } from "@/models/entities/drive-file.js"; import type {
DriveFile,
DriveFileUsageHint,
} from "@/models/entities/drive-file.js";
import { DriveFiles } from "@/models/index.js"; import { DriveFiles } from "@/models/index.js";
import { driveLogger } from "./logger.js"; import { driveLogger } from "./logger.js";
import { addFile } from "./add-file.js"; import { addFile } from "./add-file.js";
@ -13,7 +16,11 @@ const logger = driveLogger.createSubLogger("downloader");
type Args = { type Args = {
url: string; url: string;
user: { id: User["id"]; host: User["host"] } | null; user: {
id: User["id"];
host: User["host"];
driveCapacityOverrideMb: User["driveCapacityOverrideMb"];
} | null;
folderId?: DriveFolder["id"] | null; folderId?: DriveFolder["id"] | null;
uri?: string | null; uri?: string | null;
sensitive?: boolean; sensitive?: boolean;
@ -22,6 +29,7 @@ type Args = {
comment?: string | null; comment?: string | null;
requestIp?: string | null; requestIp?: string | null;
requestHeaders?: Record<string, string> | null; requestHeaders?: Record<string, string> | null;
usageHint?: DriveFileUsageHint;
}; };
export async function uploadFromUrl({ export async function uploadFromUrl({
@ -35,6 +43,7 @@ export async function uploadFromUrl({
comment = null, comment = null,
requestIp = null, requestIp = null,
requestHeaders = null, requestHeaders = null,
usageHint = null,
}: Args): Promise<DriveFile> { }: Args): Promise<DriveFile> {
const parsedUrl = new URL(url); const parsedUrl = new URL(url);
if ( if (
@ -75,9 +84,10 @@ export async function uploadFromUrl({
sensitive, sensitive,
requestIp, requestIp,
requestHeaders, requestHeaders,
usageHint,
}); });
logger.succ(`Got: ${driveFile.id}`); logger.succ(`Got: ${driveFile.id}`);
return driveFile!; return driveFile;
} catch (e) { } catch (e) {
logger.error(`Failed to create drive file:\n${inspect(e)}`); logger.error(`Failed to create drive file:\n${inspect(e)}`);
throw e; throw e;

View File

@ -28,9 +28,9 @@ export default class Logger {
if (config.syslog) { if (config.syslog) {
this.syslogClient = new SyslogPro.RFC5424({ this.syslogClient = new SyslogPro.RFC5424({
applacationName: "Firefish", applicationName: "Firefish",
timestamp: true, timestamp: true,
encludeStructuredData: true, includeStructuredData: true,
color: true, color: true,
extendedColor: true, extendedColor: true,
server: { server: {
@ -144,12 +144,12 @@ export default class Logger {
} }
} }
// Used when the process can't continue (fatal error)
public error( public error(
x: string | Error, x: string | Error,
data?: Record<string, any> | null, data?: Record<string, any> | null,
important = false, important = false,
): void { ): void {
// 実行を継続できない状況で使う
if (x instanceof Error) { if (x instanceof Error) {
data = data || {}; data = data || {};
data.e = x; data.e = x;
@ -166,30 +166,30 @@ export default class Logger {
} }
} }
// Used when the process can continue but some action should be taken
public warn( public warn(
message: string, message: string,
data?: Record<string, any> | null, data?: Record<string, any> | null,
important = false, important = false,
): void { ): void {
// 実行を継続できるが改善すべき状況で使う
this.log("warning", message, data, important); this.log("warning", message, data, important);
} }
// Used when something is successful
public succ( public succ(
message: string, message: string,
data?: Record<string, any> | null, data?: Record<string, any> | null,
important = false, important = false,
): void { ): void {
// 何かに成功した状況で使う
this.log("success", message, data, important); this.log("success", message, data, important);
} }
// Used for debugging (information necessary for developers but unnecessary for users)
public debug( public debug(
message: string, message: string,
data?: Record<string, any> | null, data?: Record<string, any> | null,
important = false, important = false,
): void { ): void {
// Used for debugging (information necessary for developers but unnecessary for users)
// Fixed if statement is ignored when logLevel includes debug // Fixed if statement is ignored when logLevel includes debug
if ( if (
config.logLevel?.includes("debug") || config.logLevel?.includes("debug") ||
@ -200,12 +200,12 @@ export default class Logger {
} }
} }
// Other generic logs
public info( public info(
message: string, message: string,
data?: Record<string, any> | null, data?: Record<string, any> | null,
important = false, important = false,
): void { ): void {
// それ以外
this.log("info", message, data, important); this.log("info", message, data, important);
} }
} }