Compare commits

...

25 Commits

Author SHA1 Message Date
Linca 070da75672 Merge branch 'fix-dev-build' into 'develop'
fix: backend-rs not working in dev-build

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

See merge request firefish/firefish!10780
2024-05-06 13:55:20 +00:00
naskya a3b156441a
ci: temporary fix for cargo test failure due to missing meta.json 2024-05-06 19:38:35 +09:00
naskya ecbd8a8724
ci: save node_modules and target 2024-05-06 19:23:43 +09:00
naskya 442dc33a34
ci: exec build & cargo test only for now 2024-05-06 19:08:28 +09:00
naskya c8372767fa
ci: attempt to fix permission 2024-05-06 19:00:34 +09:00
naskya 8e497b41cf
messed up 2024-05-06 18:44:28 +09:00
naskya bfdf73caeb
ci: fix permisson 2024-05-06 18:38:54 +09:00
naskya 5b18f9761c
ci: fix .git 2024-05-06 18:29:31 +09:00
naskya 641ff742bb
ci: add dependencies of sea-orm-cli 2024-05-06 18:26:50 +09:00
naskya e6121946aa
ci: another fix 2024-05-06 18:11:30 +09:00
naskya c6212ff8f4
ci: use CI_JOB_TOKEN 2024-05-06 18:06:56 +09:00
naskya d582a84c57
ci: install postgresql client 2024-05-06 17:58:26 +09:00
naskya a7978e2b08
ci: non-interactive shell option 2024-05-06 17:46:45 +09:00
naskya 766bac3dee
ci: give alias for services 2024-05-06 17:14:47 +09:00
naskya 7360736966
ci: fix typo 2024-05-06 17:07:42 +09:00
naskya e797849e9b
ci: attempt to add a CI task for merge requests 2024-05-06 17:00:36 +09:00
naskya 4e83dbd01f Merge branch 'refactor/remove-gulp' into 'develop'
refactor: replace gulp with a simple script


See merge request firefish/firefish!10791
2024-05-06 04:45:17 +00:00
naskya dd74eabae1
refactor (backend): port nodeinfo fetcher to backend-rs 2024-05-06 08:12:21 +09:00
naskya 711618b42c
test (backend-rs): add tests for nodeinfo (de)serialization 2024-05-06 05:20:13 +09:00
naskya 510207b101
refactor (backend-rs): separate nodeinfo generator and schema 2024-05-06 04:23:38 +09:00
naskya 49825853c1
refactor (backend): port nodeinfo generator to backend-rs 2024-05-06 03:01:55 +09:00
naskya fda81a9f91
chore: use absolute path for file operations 2024-05-05 21:04:20 +09:00
naskya c505c6df36
fix: remove old locale files 2024-05-05 20:59:26 +09:00
naskya 341b43ed71
refactor: replace gulp with a simple script 2024-05-05 02:19:58 +09:00
Lhcfl d17ed06708 fix: backend-rs not working in dev-build 2024-04-28 21:27:42 +08:00
23 changed files with 853 additions and 2830 deletions

View File

@ -1,195 +1,11 @@
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Firefish configuration
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# ┌─────┐
#───┘ URL └─────────────────────────────────────────────────────
# Final accessible URL seen by a user.
url: https://example.tld/
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# URL SETTINGS AFTER THAT!
# ┌───────────────────────┐
#───┘ Port and TLS settings └───────────────────────────────────
#
# Misskey requires a reverse proxy to support HTTPS connections.
#
# +----- https://example.tld/ ------------+
# +------+ |+-------------+ +----------------+|
# | User | ---> || Proxy (443) | ---> | Misskey (3000) ||
# +------+ |+-------------+ +----------------+|
# +---------------------------------------+
#
# You need to set up a reverse proxy. (e.g. nginx)
# An encrypted connection with HTTPS is highly recommended
# because tokens may be transferred in GET requests.
# The port that your Misskey server should listen on.
url: http://localhost:3000
port: 3000
# ┌──────────────────────────┐
#───┘ PostgreSQL configuration └────────────────────────────────
db:
host: postgres
port: 5432
# Database name
db: postgres
# Auth
user: postgres
pass: test
# Whether disable Caching queries
#disableCache: true
# Extra Connection options
#extra:
# ssl: true
# ┌─────────────────────┐
#───┘ Redis configuration └─────────────────────────────────────
db: firefish_db
user: firefish
pass: password
redis:
host: redis
port: 6379
#family: 0 # 0=Both, 4=IPv4, 6=IPv6
#pass: example-pass
#prefix: example-prefix
#db: 1
# ┌─────────────────────────────┐
#───┘ Elasticsearch configuration └─────────────────────────────
#elasticsearch:
# host: localhost
# port: 9200
# ssl: false
# user:
# pass:
# ┌───────────────┐
#───┘ ID generation └───────────────────────────────────────────
# You can select the ID generation method.
# You don't usually need to change this setting, but you can
# change it according to your preferences.
# Available methods:
# aid ... Short, Millisecond accuracy
# meid ... Similar to ObjectID, Millisecond accuracy
# ulid ... Millisecond accuracy
# objectid ... This is left for backward compatibility
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# ID SETTINGS AFTER THAT!
id: 'aid'
# ┌─────────────────────┐
#───┘ Other configuration └─────────────────────────────────────
# Max note length, should be < 8000.
#maxNoteLength: 3000
# Whether disable HSTS
#disableHsts: true
# Number of worker processes
#clusterLimit: 1
# Job concurrency per worker
# deliverJobConcurrency: 128
# inboxJobConcurrency: 16
# Job rate limiter
# deliverJobPerSec: 128
# inboxJobPerSec: 16
# Job attempts
# deliverJobMaxAttempts: 12
# inboxJobMaxAttempts: 8
# IP address family used for outgoing request (ipv4, ipv6 or dual)
#outgoingAddressFamily: ipv4
# Syslog option
#syslog:
# host: localhost
# port: 514
# Proxy for HTTP/HTTPS
#proxy: http://127.0.0.1:3128
#proxyBypassHosts: [
# 'example.com',
# '192.0.2.8'
#]
# Proxy for SMTP/SMTPS
#proxySmtp: http://127.0.0.1:3128 # use HTTP/1.1 CONNECT
#proxySmtp: socks4://127.0.0.1:1080 # use SOCKS4
#proxySmtp: socks5://127.0.0.1:1080 # use SOCKS5
# Media Proxy
#mediaProxy: https://example.com/proxy
# Proxy remote files (default: false)
#proxyRemoteFiles: true
#allowedPrivateNetworks: [
# '127.0.0.1/32'
#]
# Upload or download file size limits (bytes)
#maxFileSize: 262144000
# Managed hosting settings
# !!!!!!!!!!
# >>>>>> NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS! <<<<<<
# !!!!!!!!!!
# Each category is optional, but if each item in each category is mandatory!
# If you mess this up, that's on you, you've been warned...
#maxUserSignups: 100
#isManagedHosting: true
#deepl:
# managed: true
# authKey: ''
# isPro: false
#
#email:
# managed: true
# address: 'example@email.com'
# host: 'email.com'
# port: 587
# user: 'example@email.com'
# pass: ''
# useImplicitSslTls: false
#
#objectStorage:
# managed: true
# baseUrl: ''
# bucket: ''
# prefix: ''
# endpoint: ''
# region: ''
# accessKey: ''
# secretKey: ''
# useSsl: true
# connnectOverProxy: false
# setPublicReadOnUpload: true
# s3ForcePathStyle: true
# !!!!!!!!!!
# >>>>>> AGAIN, NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS, ABOVE SETTINGS ARE FOR MANAGED HOSTING ONLY! <<<<<<
# !!!!!!!!!!
# Seriously. Do NOT fill out the above settings if you're self-hosting.
# They're much better off being set from the control panel.

48
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,48 @@
image: docker.io/node:18-alpine
services:
- name: docker.io/groonga/pgroonga:latest-alpine-12-slim
alias: postgres
- name: docker.io/redis:7-alpine
alias: redis
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: always
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
cache:
paths:
- node_modules/
- target/
stages:
- test
variables:
POSTGRES_DB: firefish_db
POSTGRES_USER: firefish
POSTGRES_PASSWORD: password
POSTGRES_HOST_AUTH_METHOD: trust
GIT_CLEAN_FLAGS: -ffdx --exclude node_modules/ --exclude target/
default:
before_script:
- apk add --update build-base linux-headers curl ca-certificates python3 perl postgresql-client
- curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
- . "${HOME}/.cargo/env"
- corepack enable
- corepack prepare pnpm@latest --activate
- cp .config/ci.yml .config/default.yml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
- psql --host postgres --user "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" --command 'CREATE EXTENSION pgroonga'
build_and_cargo_unit_test:
stage: test
script:
- pnpm install --frozen-lockfile
- pnpm run build:debug
- pnpm run migrate
- cargo test

View File

@ -45,7 +45,7 @@ COPY packages/backend-rs/index.js packages/backend-rs/built/index.js
# Copy in the rest of the files to compile
COPY . ./
RUN NODE_ENV='production' pnpm run --filter firefish-js build
RUN NODE_ENV='production' pnpm run --recursive --parallel --filter '!backend-rs' --filter '!firefish-js' build && pnpm run gulp
RUN NODE_ENV='production' pnpm run --recursive --parallel --filter '!backend-rs' --filter '!firefish-js' build && pnpm run build:assets
# Trim down the dependencies to only those for production
RUN find . -path '*/node_modules/*' -delete && pnpm install --prod --frozen-lockfile

View File

@ -326,7 +326,7 @@ cd ~/firefish
- To add custom locales, place them in the `./custom/locales/` directory. If you name your custom locale the same as an existing locale, it will overwrite it. If you give it a unique name, it will be added to the list. Also make sure that the first part of the filename matches the locale you're basing it on. (Example: `en-FOO.yml`)
- To add custom error images, place them in the `./custom/assets/badges` directory, replacing the files already there.
- To add custom sounds, place only mp3 files in the `./custom/assets/sounds` directory.
- To update custom assets without rebuilding, just run `pnpm run gulp`.
- To update custom assets without rebuilding, just run `pnpm run build:assets`.
- To block ChatGPT, CommonCrawl, or other crawlers from indexing your instance, uncomment the respective rules in `./custom/robots.txt`.
## Tips & Tricks

View File

@ -1,101 +0,0 @@
/**
* Gulp tasks
*/
const fs = require("fs");
const gulp = require("gulp");
const replace = require("gulp-replace");
const terser = require("gulp-terser");
const cssnano = require("gulp-cssnano");
const meta = require("./package.json");
gulp.task("copy:backend:views", () =>
gulp
.src("./packages/backend/src/server/web/views/**/*")
.pipe(gulp.dest("./packages/backend/built/server/web/views")),
);
gulp.task("copy:backend:custom", () =>
gulp
.src("./custom/assets/**/*")
.pipe(gulp.dest("./packages/backend/assets/")),
);
gulp.task("copy:client:fonts", () =>
gulp
.src("./packages/client/node_modules/three/examples/fonts/**/*")
.pipe(gulp.dest("./built/_client_dist_/fonts/")),
);
gulp.task("copy:client:locales", async (cb) => {
fs.mkdirSync("./built/_client_dist_/locales", { recursive: true });
const { default: locales } = await import("./locales/index.mjs");
const v = { _version_: meta.version };
for (const [lang, locale] of Object.entries(locales)) {
fs.writeFileSync(
`./built/_client_dist_/locales/${lang}.${meta.version}.json`,
JSON.stringify({ ...locale, ...v }),
"utf-8",
);
}
cb();
});
gulp.task("build:backend:script", async () => {
const { default: locales } = await import("./locales/index.mjs");
return gulp
.src([
"./packages/backend/src/server/web/boot.js",
"./packages/backend/src/server/web/bios.js",
"./packages/backend/src/server/web/cli.js",
])
.pipe(replace("SUPPORTED_LANGS", JSON.stringify(Object.keys(locales))))
.pipe(
terser({
toplevel: true,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task("build:backend:style", () => {
return gulp
.src([
"./packages/backend/src/server/web/style.css",
"./packages/backend/src/server/web/bios.css",
"./packages/backend/src/server/web/cli.css",
])
.pipe(
cssnano({
zindex: false,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task(
"build",
gulp.parallel(
"copy:client:locales",
"copy:backend:views",
"copy:backend:custom",
"build:backend:script",
"build:backend:style",
"copy:client:fonts",
),
);
gulp.task("default", gulp.task("build"));
gulp.task("watch", () => {
gulp.watch(
["./packages/*/src/**/*"],
{ ignoreInitial: false },
gulp.task("build"),
);
});

View File

@ -9,14 +9,15 @@
"private": true,
"scripts": {
"rebuild": "pnpm run clean && pnpm run build",
"build": "pnpm node ./scripts/build.mjs && pnpm run gulp",
"build": "pnpm node ./scripts/build.mjs && pnpm run build:assets",
"build:assets": "pnpm node ./scripts/copy-assets.mjs",
"build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run build:assets",
"start": "pnpm --filter backend run start",
"start:container": "pnpm run gulp && pnpm run migrate && pnpm run start",
"start:container": "pnpm run build:assets && pnpm run migrate && pnpm run start",
"start:test": "pnpm --filter backend run start:test",
"init": "pnpm run migrate",
"migrate": "pnpm --filter backend run migration:run",
"revertmigration": "pnpm --filter backend run migration:revert",
"gulp": "gulp build",
"watch": "pnpm run dev",
"dev": "pnpm node ./scripts/dev.mjs",
"dev:staging": "NODE_OPTIONS=--max_old_space_size=3072 NODE_ENV=development pnpm run build && pnpm run start",
@ -24,7 +25,6 @@
"lint:ts": "pnpm --filter !firefish-js -r --parallel run lint",
"lint:rs": "cargo clippy --fix --allow-dirty --allow-staged && cargo fmt --all --",
"debug": "pnpm run build:debug && pnpm run start",
"build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run gulp",
"mocha": "pnpm --filter backend run mocha",
"test": "pnpm run test:ts && pnpm run test:rs",
"test:ts": "pnpm run mocha",
@ -38,10 +38,6 @@
"clean-all": "pnpm run clean && pnpm run clean-cargo && pnpm run clean-npm"
},
"dependencies": {
"gulp": "4.0.2",
"gulp-cssnano": "2.1.3",
"gulp-replace": "1.1.4",
"gulp-terser": "2.1.0",
"js-yaml": "4.1.0"
},
"devDependencies": {

View File

@ -1155,6 +1155,106 @@ export interface Webhook {
latestStatus: number | null
}
export function initializeRustLogger(): void
export function fetchNodeinfo(host: string): Promise<Nodeinfo>
export function nodeinfo_2_1(): Promise<any>
export function nodeinfo_2_0(): Promise<any>
/** NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0 */
export interface Nodeinfo {
/** The schema version, must be 2.0. */
version: string
/** Metadata about server software in use. */
software: Software20
/** The protocols supported on this server. */
protocols: Array<Protocol>
/** The third party sites this server can connect to via their application API. */
services: Services
/** Whether this server allows open self-registration. */
openRegistrations: boolean
/** Usage statistics for this server. */
usage: Usage
/** Free form key value pairs for software specific values. Clients should not rely on any specific key present. */
metadata: Record<string, any>
}
/** Metadata about server software in use (version 2.0). */
export interface Software20 {
/** The canonical name of this server software. */
name: string
/** The version of this server software. */
version: string
}
export enum Protocol {
Activitypub = 'activitypub',
Buddycloud = 'buddycloud',
Dfrn = 'dfrn',
Diaspora = 'diaspora',
Libertree = 'libertree',
Ostatus = 'ostatus',
Pumpio = 'pumpio',
Tent = 'tent',
Xmpp = 'xmpp',
Zot = 'zot'
}
/** The third party sites this server can connect to via their application API. */
export interface Services {
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
inbound: Array<Inbound>
/** The third party sites this server can publish messages to on the behalf of a user. */
outbound: Array<Outbound>
}
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
export enum Inbound {
Atom1 = 'atom1',
Gnusocial = 'gnusocial',
Imap = 'imap',
Pnut = 'pnut',
Pop3 = 'pop3',
Pumpio = 'pumpio',
Rss2 = 'rss2',
Twitter = 'twitter'
}
/** The third party sites this server can publish messages to on the behalf of a user. */
export enum Outbound {
Atom1 = 'atom1',
Blogger = 'blogger',
Buddycloud = 'buddycloud',
Diaspora = 'diaspora',
Dreamwidth = 'dreamwidth',
Drupal = 'drupal',
Facebook = 'facebook',
Friendica = 'friendica',
Gnusocial = 'gnusocial',
Google = 'google',
Insanejournal = 'insanejournal',
Libertree = 'libertree',
Linkedin = 'linkedin',
Livejournal = 'livejournal',
Mediagoblin = 'mediagoblin',
Myspace = 'myspace',
Pinterest = 'pinterest',
Pnut = 'pnut',
Posterous = 'posterous',
Pumpio = 'pumpio',
Redmatrix = 'redmatrix',
Rss2 = 'rss2',
Smtp = 'smtp',
Tent = 'tent',
Tumblr = 'tumblr',
Twitter = 'twitter',
Wordpress = 'wordpress',
Xmpp = 'xmpp'
}
/** Usage statistics for this server. */
export interface Usage {
users: Users
localPosts: number | null
localComments: number | null
}
/** statistics about the users of this server. */
export interface Users {
total: number | null
activeHalfyear: number | null
activeMonth: number | null
}
export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void>
export function unwatchNote(watcherId: string, noteId: string): Promise<void>
export function publishToChannelStream(channelId: string, userId: string): void

View File

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
@ -364,6 +364,12 @@ module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
module.exports.initializeRustLogger = initializeRustLogger
module.exports.fetchNodeinfo = fetchNodeinfo
module.exports.nodeinfo_2_1 = nodeinfo_2_1
module.exports.nodeinfo_2_0 = nodeinfo_2_0
module.exports.Protocol = Protocol
module.exports.Inbound = Inbound
module.exports.Outbound = Outbound
module.exports.watchNote = watchNote
module.exports.unwatchNote = unwatchNote
module.exports.publishToChannelStream = publishToChannelStream

View File

@ -1,3 +1,4 @@
pub mod log;
pub mod nodeinfo;
pub mod note;
pub mod stream;

View File

@ -0,0 +1,161 @@
use crate::service::nodeinfo::schema::*;
use crate::util::http_client;
use isahc::AsyncReadResponseExt;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Http client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("Http error: {0}")]
HttpErr(#[from] isahc::Error),
#[error("Bad status: {0}")]
BadStatus(String),
#[error("Failed to parse response body as text: {0}")]
ResponseErr(#[from] std::io::Error),
#[error("Failed to parse response body as json: {0}")]
JsonErr(#[from] serde_json::Error),
#[error("No nodeinfo provided")]
MissingNodeinfo,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLinks {
links: Vec<NodeinfoLink>,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLink {
rel: String,
href: String,
}
#[inline]
fn wellknown_nodeinfo_url(host: &str) -> String {
format!("https://{}/.well-known/nodeinfo", host)
}
async fn fetch_nodeinfo_links(host: &str) -> Result<NodeinfoLinks, Error> {
let client = http_client::client()?;
let wellknown_url = wellknown_nodeinfo_url(host);
let mut wellknown_response = client.get_async(&wellknown_url).await?;
if !wellknown_response.status().is_success() {
tracing::debug!("{:#?}", wellknown_response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
wellknown_url,
wellknown_response.status()
)));
}
Ok(serde_json::from_str(&wellknown_response.text().await?)?)
}
fn check_nodeinfo_link(links: NodeinfoLinks) -> Result<String, Error> {
for link in links.links {
if link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.1"
|| link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.0"
{
return Ok(link.href);
}
}
Err(Error::MissingNodeinfo)
}
async fn fetch_nodeinfo_impl(nodeinfo_link: &str) -> Result<Nodeinfo20, Error> {
let client = http_client::client()?;
let mut response = client.get_async(nodeinfo_link).await?;
if !response.status().is_success() {
tracing::debug!("{:#?}", response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
nodeinfo_link,
response.status()
)));
}
Ok(serde_json::from_str(&response.text().await?)?)
}
// for napi export
type Nodeinfo = Nodeinfo20;
#[crate::export]
pub async fn fetch_nodeinfo(host: &str) -> Result<Nodeinfo, Error> {
tracing::info!("fetching from {}", host);
let links = fetch_nodeinfo_links(host).await?;
let nodeinfo_link = check_nodeinfo_link(links)?;
fetch_nodeinfo_impl(&nodeinfo_link).await
}
#[cfg(test)]
mod unit_test {
use super::{check_nodeinfo_link, fetch_nodeinfo, NodeinfoLink, NodeinfoLinks};
use pretty_assertions::assert_eq;
#[test]
fn test_check_nodeinfo_link() {
let links_1 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_1).unwrap(),
"https://example.com/real"
);
let links_2 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.1".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_2).unwrap(),
"https://example.com/real"
);
let links_3 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy/2.0".to_string(),
},
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.1".to_string(),
href: "https://example.com/dummy/2.1".to_string(),
},
],
};
check_nodeinfo_link(links_3).expect_err("No nodeinfo");
}
#[tokio::test]
async fn test_fetch_nodeinfo() {
assert_eq!(
fetch_nodeinfo("info.firefish.dev")
.await
.unwrap()
.software
.name,
"firefish"
);
}
}

View File

@ -0,0 +1,142 @@
use crate::config::CONFIG;
use crate::database::cache;
use crate::database::db_conn;
use crate::misc::meta::fetch_meta;
use crate::model::entity::{note, user};
use crate::service::nodeinfo::schema::*;
use sea_orm::{ColumnTrait, DbErr, EntityTrait, PaginatorTrait, QueryFilter};
use serde_json::json;
use std::collections::HashMap;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Failed to serialize nodeinfo to JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
async fn statistics() -> Result<(u64, u64, u64, u64), DbErr> {
let db = db_conn().await?;
let now = chrono::Local::now().naive_local();
const MONTH: chrono::TimeDelta = chrono::Duration::seconds(2592000000);
const HALF_YEAR: chrono::TimeDelta = chrono::Duration::seconds(15552000000);
let local_users = user::Entity::find()
.filter(user::Column::Host.is_null())
.count(db);
let local_active_halfyear = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - HALF_YEAR))
.count(db);
let local_active_month = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - MONTH))
.count(db);
let local_posts = note::Entity::find()
.filter(note::Column::UserHost.is_null())
.count(db);
tokio::try_join!(
local_users,
local_active_halfyear,
local_active_month,
local_posts
)
}
async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
let (local_users, local_active_halfyear, local_active_month, local_posts) =
statistics().await?;
let meta = fetch_meta(true).await?;
let metadata = HashMap::from([
(
"nodeName".to_string(),
json!(meta.name.unwrap_or(CONFIG.host.clone())),
),
("nodeDescription".to_string(), json!(meta.description)),
("repositoryUrl".to_string(), json!(meta.repository_url)),
(
"enableLocalTimeline".to_string(),
json!(!meta.disable_local_timeline),
),
(
"enableRecommendedTimeline".to_string(),
json!(!meta.disable_recommended_timeline),
),
(
"enableGlobalTimeline".to_string(),
json!(!meta.disable_global_timeline),
),
(
"enableGuestTimeline".to_string(),
json!(meta.enable_guest_timeline),
),
(
"maintainer".to_string(),
json!({"name":meta.maintainer_name,"email":meta.maintainer_email}),
),
("proxyAccountName".to_string(), json!(meta.proxy_account_id)),
(
"themeColor".to_string(),
json!(meta.theme_color.unwrap_or("#31748f".to_string())),
),
]);
Ok(Nodeinfo21 {
version: "2.1".to_string(),
software: Software21 {
name: "firefish".to_string(),
version: CONFIG.version.clone(),
repository: Some(meta.repository_url),
homepage: Some("https://firefish.dev/firefish/firefish".to_string()),
},
protocols: vec![Protocol::Activitypub],
services: Services {
inbound: vec![],
outbound: vec![Outbound::Atom1, Outbound::Rss2],
},
open_registrations: !meta.disable_registration,
usage: Usage {
users: Users {
total: Some(local_users as u32),
active_halfyear: Some(local_active_halfyear as u32),
active_month: Some(local_active_month as u32),
},
local_posts: Some(local_posts as u32),
local_comments: None,
},
metadata,
})
}
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
const NODEINFO_2_1_CACHE_KEY: &str = "nodeinfo_2_1";
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY)?;
if let Some(nodeinfo) = cached {
Ok(nodeinfo)
} else {
let nodeinfo = generate_nodeinfo_2_1().await?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60)?;
Ok(nodeinfo)
}
}
pub async fn nodeinfo_2_0() -> Result<Nodeinfo20, Error> {
Ok(nodeinfo_2_1().await?.into())
}
#[crate::export(js_name = "nodeinfo_2_1")]
pub async fn nodeinfo_2_1_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_1().await?)?)
}
#[crate::export(js_name = "nodeinfo_2_0")]
pub async fn nodeinfo_2_0_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_0().await?)?)
}

View File

@ -0,0 +1,3 @@
pub mod fetch;
pub mod generate;
pub mod schema;

View File

@ -0,0 +1,263 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// TODO: I want to use these macros but they don't work with rmp_serde
// - #[serde(skip_serializing_if = "Option::is_none")] (https://github.com/3Hren/msgpack-rust/issues/86)
// - #[serde(tag = "version", rename = "2.1")] (https://github.com/3Hren/msgpack-rust/issues/318)
/// NodeInfo schema version 2.1. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Nodeinfo21 {
/// The schema version, must be 2.1.
pub version: String,
/// Metadata about server software in use.
pub software: Software21,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "Nodeinfo")]
pub struct Nodeinfo20 {
/// The schema version, must be 2.0.
pub version: String,
/// Metadata about server software in use.
pub software: Software20,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// Metadata about server software in use (version 2.1).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Software21 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
/// The url of the source code repository of this server software.
pub repository: Option<String>,
/// The url of the homepage of this server software.
pub homepage: Option<String>,
}
/// Metadata about server software in use (version 2.0).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Software20 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Protocol {
Activitypub,
Buddycloud,
Dfrn,
Diaspora,
Libertree,
Ostatus,
Pumpio,
Tent,
Xmpp,
Zot,
}
/// The third party sites this server can connect to via their application API.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Services {
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
pub inbound: Vec<Inbound>,
/// The third party sites this server can publish messages to on the behalf of a user.
pub outbound: Vec<Outbound>,
}
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Inbound {
#[serde(rename = "atom1.0")]
Atom1,
Gnusocial,
Imap,
Pnut,
#[serde(rename = "pop3")]
Pop3,
Pumpio,
#[serde(rename = "rss2.0")]
Rss2,
Twitter,
}
/// The third party sites this server can publish messages to on the behalf of a user.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Outbound {
#[serde(rename = "atom1.0")]
Atom1,
Blogger,
Buddycloud,
Diaspora,
Dreamwidth,
Drupal,
Facebook,
Friendica,
Gnusocial,
Google,
Insanejournal,
Libertree,
Linkedin,
Livejournal,
Mediagoblin,
Myspace,
Pinterest,
Pnut,
Posterous,
Pumpio,
Redmatrix,
#[serde(rename = "rss2.0")]
Rss2,
Smtp,
Tent,
Tumblr,
Twitter,
Wordpress,
Xmpp,
}
/// Usage statistics for this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Usage {
pub users: Users,
pub local_posts: Option<u32>,
pub local_comments: Option<u32>,
}
/// statistics about the users of this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Users {
pub total: Option<u32>,
pub active_halfyear: Option<u32>,
pub active_month: Option<u32>,
}
impl From<Software21> for Software20 {
fn from(software: Software21) -> Self {
Self {
name: software.name,
version: software.version,
}
}
}
impl From<Nodeinfo21> for Nodeinfo20 {
fn from(nodeinfo: Nodeinfo21) -> Self {
Self {
version: "2.0".to_string(),
software: nodeinfo.software.into(),
protocols: nodeinfo.protocols,
services: nodeinfo.services,
open_registrations: nodeinfo.open_registrations,
usage: nodeinfo.usage,
metadata: nodeinfo.metadata,
}
}
}
#[cfg(test)]
mod unit_test {
use super::{Nodeinfo20, Nodeinfo21};
use pretty_assertions::assert_eq;
#[test]
fn parse_nodeinfo_2_0() {
let json_str_1 = r#"{"version":"2.0","software":{"name":"mastodon","version":"4.3.0-nightly.2024-04-30"},"protocols":["activitypub"],"services":{"outbound":[],"inbound":[]},"usage":{"users":{"total":1935016,"activeMonth":238223,"activeHalfyear":618795},"localPosts":90175135},"openRegistrations":true,"metadata":{"nodeName":"Mastodon","nodeDescription":"The original server operated by the Mastodon gGmbH non-profit"}}"#;
let parsed_1: Nodeinfo20 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo20 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "mastodon");
assert_eq!(parsed_1.software.version, "4.3.0-nightly.2024-04-30");
let json_str_2 = r#"{"version":"2.0","software":{"name":"peertube","version":"5.0.0"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":false,"usage":{"users":{"total":5,"activeMonth":0,"activeHalfyear":2},"localPosts":1018,"localComments":1},"metadata":{"taxonomy":{"postsName":"Videos"},"nodeName":"Blender Video","nodeDescription":"Blender Foundation PeerTube instance.","nodeConfig":{"search":{"remoteUri":{"users":true,"anonymous":false}},"plugin":{"registered":[]},"theme":{"registered":[],"default":"default"},"email":{"enabled":false},"contactForm":{"enabled":true},"transcoding":{"hls":{"enabled":true},"webtorrent":{"enabled":true},"enabledResolutions":[1080]},"live":{"enabled":false,"transcoding":{"enabled":true,"enabledResolutions":[]}},"import":{"videos":{"http":{"enabled":true},"torrent":{"enabled":false}}},"autoBlacklist":{"videos":{"ofUsers":{"enabled":false}}},"avatar":{"file":{"size":{"max":4194304},"extensions":[".png",".jpeg",".jpg",".gif",".webp"]}},"video":{"image":{"extensions":[".png",".jpg",".jpeg",".webp"],"size":{"max":4194304}},"file":{"extensions":[".webm",".ogv",".ogg",".mp4",".mkv",".mov",".qt",".mqv",".m4v",".flv",".f4v",".wmv",".avi",".3gp",".3gpp",".3g2",".3gpp2",".nut",".mts",".m2ts",".mpv",".m2v",".m1v",".mpg",".mpe",".mpeg",".vob",".mxf",".mp3",".wma",".wav",".flac",".aac",".m4a",".ac3"]}},"videoCaption":{"file":{"size":{"max":20971520},"extensions":[".vtt",".srt"]}},"user":{"videoQuota":5368709120,"videoQuotaDaily":-1},"trending":{"videos":{"intervalDays":7}},"tracker":{"enabled":true}}}}"#;
let parsed_2: Nodeinfo20 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo20 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "peertube");
assert_eq!(parsed_2.software.version, "5.0.0");
let json_str_3 = r#"{"metadata":{"nodeName":"pixelfed","software":{"homepage":"https://pixelfed.org","repo":"https://github.com/pixelfed/pixelfed"},"config":{"features":{"timelines":{"local":true,"network":true},"mobile_apis":true,"stories":true,"video":true,"import":{"instagram":false,"mastodon":false,"pixelfed":false},"label":{"covid":{"enabled":false,"org":"visit the WHO website","url":"https://www.who.int/emergencies/diseases/novel-coronavirus-2019/advice-for-public"}},"hls":{"enabled":false}}}},"protocols":["activitypub"],"services":{"inbound":[],"outbound":[]},"software":{"name":"pixelfed","version":"0.12.0"},"usage":{"localPosts":24059868,"localComments":0,"users":{"total":112832,"activeHalfyear":24366,"activeMonth":8921}},"version":"2.0","openRegistrations":true}"#;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "pixelfed");
assert_eq!(parsed_3.software.version, "0.12.0");
}
#[test]
fn parse_nodeinfo_2_1() {
let json_str_1 = r##"{"version":"2.1","software":{"name":"catodon","version":"24.04-dev.2","repository":"https://codeberg.org/catodon/catodon","homepage":"https://codeberg.org/catodon/catodon"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":294,"activeHalfyear":292,"activeMonth":139},"localPosts":22616,"localComments":0},"metadata":{"nodeName":"Catodon Social","nodeDescription":"🌎 Home of Catodon, a new platform for fedi communities, initially based on Iceshrimp/Firefish/Misskey. Be aware that our first release is not out yet, so things are still experimental.","maintainer":{"name":"admin","email":"redacted@example.com"},"langs":[],"tosUrl":"https://example.com/redacted","repositoryUrl":"https://codeberg.org/catodon/catodon","feedbackUrl":"https://codeberg.org/catodon/catodon/issues","disableRegistration":false,"disableLocalTimeline":false,"disableRecommendedTimeline":true,"disableGlobalTimeline":false,"emailRequiredForSignup":true,"postEditing":true,"postImports":false,"enableHcaptcha":true,"enableRecaptcha":false,"maxNoteTextLength":8000,"maxCaptionTextLength":1500,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableEmail":true,"themeColor":"#31748f"}}"##;
let parsed_1: Nodeinfo21 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo21 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "catodon");
assert_eq!(parsed_1.software.version, "24.04-dev.2");
let json_str_2 = r#"{"version":"2.1","software":{"name":"meisskey","version":"10.102.699-m544","repository":"https://github.com/mei23/misskey"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":1123,"activeHalfyear":305,"activeMonth":89},"localPosts":268739,"localComments":0},"metadata":{"nodeName":"meisskey.one","nodeDescription":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","name":"meisskey.one","description":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","maintainer":{"name":"redacted","email":"redacted"},"langs":[],"announcements":[{"title":"問題・要望など","text":"問題・要望などは <a href=\"https://example.com/redacted\">#meisskeyone要望</a> で投稿してなのだわ"}],"relayActor":"https://example.com/redacted","relays":[],"disableRegistration":false,"disableLocalTimeline":true,"enableRecaptcha":true,"maxNoteTextLength":5000,"enableTwitterIntegration":false,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableServiceWorker":true,"proxyAccountName":"ghost"}}"#;
let parsed_2: Nodeinfo21 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo21 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "meisskey");
assert_eq!(parsed_2.software.version, "10.102.699-m544");
let json_str_3 = r##"{"metadata":{"enableGlobalTimeline":true,"enableGuestTimeline":false,"enableLocalTimeline":true,"enableRecommendedTimeline":false,"maintainer":{"name":"Firefish dev team"},"nodeDescription":"","nodeName":"Firefish","repositoryUrl":"https://firefish.dev/firefish/firefish","themeColor":"#F25A85"},"openRegistrations":false,"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"software":{"homepage":"https://firefish.dev/firefish/firefish","name":"firefish","repository":"https://firefish.dev/firefish/firefish","version":"20240504"},"usage":{"localPosts":23857,"users":{"activeHalfyear":7,"activeMonth":7,"total":9}},"version":"2.1"}"##;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "firefish");
assert_eq!(parsed_3.software.version, "20240504");
}
}

View File

@ -18,6 +18,7 @@ pub fn client() -> Result<HttpClient, Error> {
.get_or_try_init(|| {
let mut builder = HttpClient::builder()
.timeout(Duration::from_secs(10))
.default_header("user-agent", &CONFIG.user_agent)
.dns_cache(DnsCache::Timeout(Duration::from_secs(60 * 60)));
if let Some(proxy_url) = &CONFIG.proxy {

View File

@ -1,7 +1,7 @@
// https://gist.github.com/tkrotoff/a6baf96eb6b61b445a9142e5555511a0
import type { Primitive } from "type-fest";
type NullToUndefined<T> = T extends null
export type NullToUndefined<T> = T extends null
? undefined
: T extends Primitive | Function | Date | RegExp
? T
@ -15,7 +15,7 @@ type NullToUndefined<T> = T extends null
? { [K in keyof T]: NullToUndefined<T[K]> }
: unknown;
type UndefinedToNull<T> = T extends undefined
export type UndefinedToNull<T> = T extends undefined
? null
: T extends Primitive | Function | Date | RegExp
? T
@ -47,6 +47,16 @@ function _nullToUndefined<T>(obj: T): NullToUndefined<T> {
return obj as any;
}
/**
* Recursively converts all null values to undefined.
*
* @param obj object to convert
* @returns a copy of the object with all its null values converted to undefined
*/
export function fromRustObject<T>(obj: T) {
return _nullToUndefined(structuredClone(obj));
}
function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
if (obj === undefined) {
return null as any;
@ -71,6 +81,6 @@ function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
* @param obj object to convert
* @returns a copy of the object with all its undefined values converted to null
*/
export function undefinedToNull<T>(obj: T) {
export function toRustObject<T>(obj: T) {
return _undefinedToNull(structuredClone(obj));
}

View File

@ -1,9 +1,7 @@
import Router from "@koa/router";
import { config } from "@/config.js";
import { fetchMeta } from "backend-rs";
import { Users, Notes } from "@/models/index.js";
import { IsNull, MoreThan } from "typeorm";
import { Cache } from "@/misc/cache.js";
import { nodeinfo_2_0, nodeinfo_2_1 } from "backend-rs";
import { fromRustObject } from "@/prelude/undefined-to-null.js";
const router = new Router();
@ -22,101 +20,14 @@ export const links = [
},
];
const nodeinfo2 = async () => {
const now = Date.now();
const [meta, total, activeHalfyear, activeMonth, localPosts] =
await Promise.all([
fetchMeta(false),
Users.count({ where: { host: IsNull() } }),
Users.count({
where: {
host: IsNull(),
lastActiveDate: MoreThan(new Date(now - 15552000000)),
},
}),
Users.count({
where: {
host: IsNull(),
lastActiveDate: MoreThan(new Date(now - 2592000000)),
},
}),
Notes.count({ where: { userHost: IsNull() } }),
]);
const proxyAccount = meta.proxyAccountId
? await Users.pack(meta.proxyAccountId).catch(() => null)
: null;
return {
software: {
name: "firefish",
version: config.version,
repository: meta.repositoryUrl,
homepage: "https://firefish.dev/firefish/firefish",
},
protocols: ["activitypub"],
services: {
inbound: [] as string[],
outbound: ["atom1.0", "rss2.0"],
},
openRegistrations: !meta.disableRegistration,
usage: {
users: { total, activeHalfyear, activeMonth },
localPosts,
localComments: 0,
},
metadata: {
nodeName: meta.name,
nodeDescription: meta.description,
maintainer: {
name: meta.maintainerName,
email: meta.maintainerEmail,
},
langs: meta.langs,
tosUrl: meta.tosUrl,
repositoryUrl: meta.repositoryUrl,
feedbackUrl: meta.feedbackUrl,
disableRegistration: meta.disableRegistration,
disableLocalTimeline: meta.disableLocalTimeline,
disableRecommendedTimeline: meta.disableRecommendedTimeline,
disableGlobalTimeline: meta.disableGlobalTimeline,
emailRequiredForSignup: meta.emailRequiredForSignup,
postEditing: true,
postImports: meta.experimentalFeatures?.postImports || false,
enableHcaptcha: meta.enableHcaptcha,
enableRecaptcha: meta.enableRecaptcha,
maxNoteTextLength: config.maxNoteLength,
maxCaptionTextLength: config.maxCaptionLength,
enableEmail: meta.enableEmail,
enableServiceWorker: meta.enableServiceWorker,
proxyAccountName: proxyAccount ? proxyAccount.username : null,
themeColor: meta.themeColor || "#31748f",
},
};
};
const cache = new Cache<Awaited<ReturnType<typeof nodeinfo2>>>(
"nodeinfo",
60 * 10,
);
router.get(nodeinfo2_1path, async (ctx) => {
const base = await cache.fetch(null, () => nodeinfo2());
ctx.body = { version: "2.1", ...base };
ctx.set("Cache-Control", "public, max-age=600");
ctx.body = fromRustObject(await nodeinfo_2_1());
ctx.set("Cache-Control", "public, max-age=3600");
});
router.get(nodeinfo2_0path, async (ctx) => {
const base = await cache.fetch(null, () => nodeinfo2());
// @ts-ignore
base.software.repository = undefined;
// @ts-ignore
base.software.homepage = undefined;
ctx.body = { version: "2.0", ...base };
ctx.set("Cache-Control", "public, max-age=600");
ctx.body = fromRustObject(await nodeinfo_2_0());
ctx.set("Cache-Control", "public, max-age=3600");
});
export default router;

View File

@ -10,6 +10,7 @@ import {
import { Instances } from "@/models/index.js";
import { getFetchInstanceMetadataLock } from "@/misc/app-lock.js";
import Logger from "@/services/logger.js";
import { type Nodeinfo, fetchNodeinfo } from "backend-rs";
import { inspect } from "node:util";
const logger = new Logger("metadata", "cyan");
@ -36,7 +37,7 @@ export async function fetchInstanceMetadata(
try {
const [info, dom, manifest] = await Promise.all([
fetchNodeinfo(instance).catch(() => null),
fetchNodeinfo(instance.host).catch(() => null),
fetchDom(instance).catch(() => null),
fetchManifest(instance).catch(() => null),
]);
@ -57,30 +58,26 @@ export async function fetchInstanceMetadata(
if (info) {
updates.softwareName =
info.software?.name
?.toLowerCase()
info.software.name
.toLowerCase()
.substring(0, MAX_LENGTH_INSTANCE.softwareName) || null;
updates.softwareVersion =
info.software?.version?.substring(
info.software.version.substring(
0,
MAX_LENGTH_INSTANCE.softwareVersion,
) || null;
updates.openRegistrations = info.openRegistrations;
updates.maintainerName = info.metadata
? info.metadata.maintainer
? info.metadata.maintainer.name?.substring(
0,
MAX_LENGTH_INSTANCE.maintainerName,
) || null
: null
updates.maintainerName = info.metadata.maintainer
? info.metadata.maintainer.name?.substring(
0,
MAX_LENGTH_INSTANCE.maintainerName,
) || null
: null;
updates.maintainerEmail = info.metadata
? info.metadata.maintainer
? info.metadata.maintainer.email?.substring(
0,
MAX_LENGTH_INSTANCE.maintainerEmail,
) || null
: null
updates.maintainerEmail = info.metadata.maintainer
? info.metadata.maintainer.email?.substring(
0,
MAX_LENGTH_INSTANCE.maintainerEmail,
) || null
: null;
}
@ -115,75 +112,6 @@ export async function fetchInstanceMetadata(
}
}
type NodeInfo = {
openRegistrations?: boolean;
software?: {
name?: string;
version?: string;
};
metadata?: {
name?: string;
nodeName?: string;
nodeDescription?: string;
description?: string;
maintainer?: {
name?: string;
email?: string;
};
};
};
async function fetchNodeinfo(instance: Instance): Promise<NodeInfo> {
logger.info(`Fetching nodeinfo of ${instance.host} ...`);
try {
const wellknown = (await getJson(
`https://${instance.host}/.well-known/nodeinfo`,
).catch((e) => {
if (e.statusCode === 404) {
throw new Error("No nodeinfo provided");
} else {
throw new Error(inspect(e));
}
})) as Record<string, unknown>;
if (wellknown.links == null || !Array.isArray(wellknown.links)) {
throw new Error("No wellknown links");
}
const links = wellknown.links as any[];
const lnik1_0 = links.find(
(link) => link.rel === "http://nodeinfo.diaspora.software/ns/schema/1.0",
);
const lnik2_0 = links.find(
(link) => link.rel === "http://nodeinfo.diaspora.software/ns/schema/2.0",
);
const lnik2_1 = links.find(
(link) => link.rel === "http://nodeinfo.diaspora.software/ns/schema/2.1",
);
const link = lnik2_1 || lnik2_0 || lnik1_0;
if (link == null) {
throw new Error("No nodeinfo link provided");
}
const info = await getJson(link.href).catch((e) => {
throw new Error(inspect(e));
});
logger.info(`Successfuly fetched nodeinfo of ${instance.host}`);
return info as NodeInfo;
} catch (e) {
logger.error(
`Failed to fetch nodeinfo of ${instance.host}:\n${inspect(e)}`,
);
throw e;
}
}
async function fetchDom(instance: Instance): Promise<Window["document"]> {
logger.info(`Fetching HTML of ${instance.host} ...`);
@ -272,7 +200,7 @@ async function fetchIconUrl(
}
async function getThemeColor(
info: NodeInfo | null,
info: Nodeinfo | null,
doc: Window["document"] | null,
manifest: Record<string, any> | null,
): Promise<string | null> {
@ -290,7 +218,7 @@ async function getThemeColor(
}
async function getSiteName(
info: NodeInfo | null,
info: Nodeinfo | null,
doc: Window["document"] | null,
manifest: Record<string, any> | null,
): Promise<string | undefined | null> {
@ -318,7 +246,7 @@ async function getSiteName(
}
async function getDescription(
info: NodeInfo | null,
info: Nodeinfo | null,
doc: Window["document"] | null,
manifest: Record<string, any> | null,
): Promise<string | null> {

View File

@ -66,7 +66,7 @@ import { Mutex } from "redis-semaphore";
import { langmap } from "@/misc/langmap.js";
import Logger from "@/services/logger.js";
import { inspect } from "node:util";
import { undefinedToNull } from "@/prelude/undefined-to-null.js";
import { toRustObject } from "@/prelude/undefined-to-null.js";
const logger = new Logger("create-note");
@ -404,7 +404,7 @@ export default async (
checkHitAntenna(antenna, note, user).then((hit) => {
if (hit) {
// TODO: do this more sanely
addNoteToAntenna(antenna.id, undefinedToNull(note) as Note);
addNoteToAntenna(antenna.id, toRustObject(note));
}
});
}

View File

@ -21,8 +21,6 @@
"@syuilo/aiscript": "0.17.0",
"@types/autosize": "^4.0.3",
"@types/glob": "8.1.0",
"@types/gulp": "4.0.17",
"@types/gulp-rename": "2.0.6",
"@types/insert-text-at-cursor": "^0.3.2",
"@types/katex": "0.16.7",
"@types/matter-js": "0.19.6",

File diff suppressed because it is too large Load Diff

41
scripts/copy-assets.mjs Normal file
View File

@ -0,0 +1,41 @@
import fs from "node:fs/promises";
import path, { join } from "node:path";
import { fileURLToPath } from "node:url";
const repositoryRootDir = join(path.dirname(fileURLToPath(import.meta.url)), "../");
const file = (relativePath) => join(repositoryRootDir, relativePath);
await (async () => {
await fs.rm(file("built/_client_dist_/locales"), { recursive: true, force: true });
await Promise.all([
fs.cp(file("packages/backend/src/server/web"), file("packages/backend/built/server/web"), { recursive: true }),
fs.cp(file("custom/assets"), file("packages/backend/assets"), { recursive: true }),
fs.cp(file("packages/client/node_modules/three/examples/fonts"), file("built/_client_dist_/fonts"), { recursive: true }),
fs.mkdir(file("built/_client_dist_/locales"), { recursive: true }),
]);
const locales = (await import("../locales/index.mjs")).default;
const meta = (await import("../built/meta.json", { assert: { type: "json" } })).default;
for await (const [lang, locale] of Object.entries(locales)) {
await fs.writeFile(
file(`built/_client_dist_/locales/${lang}.${meta.version}.json`),
JSON.stringify({ ...locale, _version_: meta.version }),
"utf-8",
);
}
const js_assets = [
file("packages/backend/built/server/web/boot.js"),
file("packages/backend/built/server/web/bios.js"),
file("packages/backend/built/server/web/cli.js"),
];
for await (const js_file of js_assets) {
const content = (await fs.readFile(js_file, "utf-8"))
.replace("SUPPORTED_LANGS", JSON.stringify(Object.keys(locales)));
await fs.writeFile(js_file, content, "utf-8");
}
// TODO?: minify packages/backend/built/server/web/*.css
})();

View File

@ -1,6 +1,7 @@
import path, { join } from "node:path";
import { fileURLToPath } from "node:url";
import { execa } from "execa";
import fs from "node:fs";
(async () => {
const __dirname = path.dirname(fileURLToPath(import.meta.url));
@ -32,4 +33,6 @@ import { execa } from "execa";
stdio: "inherit",
}
);
fs.copyFileSync("packages/backend-rs/index.js", "packages/backend-rs/built/index.js");
})();

View File

@ -11,12 +11,6 @@ import { execa } from "execa";
stderr: process.stderr,
});
execa("pnpm", ["dlx", "gulp", "watch"], {
cwd: join(__dirname, "/../"),
stdout: process.stdout,
stderr: process.stderr,
});
execa("pnpm", ["--filter", "backend", "watch"], {
cwd: join(__dirname, "/../"),
stdout: process.stdout,