Compare commits

...

266 Commits

Author SHA1 Message Date
naskya 3b8a7ff31e Merge branch 'develop' into 'main'
release: v20240516

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Gary O'Regan Kelly <gmoregan@icloud.com>

See merge request firefish/firefish!10854
2024-05-16 07:18:17 +00:00
naskya 088dfd21e7
v20240516 2024-05-16 16:16:29 +09:00
naskya 03323e40fa
docs: update notice-for-admins.md 2024-05-16 15:09:40 +09:00
naskya c6e3506bd5
fix: remove unnecessary copy operation (close #10926) 2024-05-16 15:03:53 +09:00
naskya 128fc72778 Merge branch 'renovate/lock-file-maintenance' into 'develop'
chore(deps): lock file maintenance

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10853
2024-05-16 05:17:04 +00:00
CI 310059f6a0 chore(deps): lock file maintenance 2024-05-16 04:05:56 +00:00
naskya 7d4d1c1fbd
fix merge mistake 2024-05-16 08:45:50 +09:00
naskya dbd205972f Merge branch 'refactor/push-notification' into 'develop'
refactor: port push notification sender to backend-rs


See merge request firefish/firefish!10760
2024-05-15 22:19:58 +00:00
naskya 41b32c5535 refactor (backend): port push notification sender to backend-rs 2024-05-15 22:19:58 +00:00
naskya 56be2f034e Merge branch 'renovate/syn-2.x' into 'develop'
chore(deps): update rust crate syn to 2.0.63

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10851
2024-05-15 21:22:39 +00:00
naskya e15bcee86c Merge branch 'renovate/aws-sdk-2.x' into 'develop'
fix(deps): update dependency aws-sdk to v2.1621.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10852
2024-05-15 21:19:48 +00:00
naskya 43326cdf8d Merge branch 'renovate/serde-monorepo' into 'develop'
chore(deps): update rust crate serde to 1.0.202

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10850
2024-05-15 21:16:55 +00:00
CI 7d1947792d fix(deps): update dependency aws-sdk to v2.1621.0 2024-05-15 21:05:33 +00:00
CI d28fe77d9f chore(deps): update rust crate syn to 2.0.63 2024-05-15 21:05:04 +00:00
CI acc13e9b10 chore(deps): update rust crate serde to 1.0.202 2024-05-15 21:04:59 +00:00
naskya 4e31e11f81
docs: use permalink 2024-05-16 05:04:47 +09:00
naskya dddd2779c0
chore: update auto-generated files 2024-05-16 04:57:48 +09:00
naskya 832fc7cd1d
docs: update changelog 2024-05-16 04:56:26 +09:00
naskya a18ad132be
fix: remove $[center] MFM function 2024-05-16 04:51:51 +09:00
naskya 4b96063c23
chore: format 2024-05-16 04:22:41 +09:00
naskya 0de54e02f8
chore (backend): use literals and consts 2024-05-16 04:22:23 +09:00
naskya 101e50926b
chore: remove import assertion 2024-05-16 04:12:10 +09:00
naskya 9cf88f0df6
chore: remove import assertion 2024-05-16 03:49:31 +09:00
naskya efb6cc9132 Merge branch 'renovate/execa-9.x' into 'develop'
chore(deps): update dependency execa to v9.1.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10848
2024-05-15 18:36:18 +00:00
Hosted Weblate 58f3eb4924
Merge branch 'origin/develop' into Weblate 2024-05-15 18:29:30 +00:00
Gary O'Regan Kelly 5adc0e581d
locale: update translations (French)
Currently translated at 100.0% (1932 of 1932 strings)

Translation: Firefish/locales
Translate-URL: https://hosted.weblate.org/projects/firefish/locales/fr/
2024-05-15 20:29:25 +02:00
naskya c0b760cda5 Merge branch 'develop' into 'renovate/execa-9.x'
# Conflicts:
#   package.json
2024-05-15 18:17:35 +00:00
naskya eb967564f9 Merge branch 'renovate/aws-sdk-2.x' into 'develop'
fix(deps): update dependency aws-sdk to v2.1620.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10849
2024-05-15 18:13:47 +00:00
naskya 0085105e72 Merge branch 'renovate/websocket-1.x' into 'develop'
fix(deps): update dependency websocket to v1.0.35

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10847
2024-05-15 18:09:16 +00:00
naskya 217b3ecf80 Merge branch 'renovate/is-svg-5.x' into 'develop'
fix(deps): update dependency is-svg to v5.0.1

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10846
2024-05-15 18:07:52 +00:00
naskya ffeeb3b444 Merge branch 'renovate/bull-4.x' into 'develop'
fix(deps): update dependency bull to v4.12.4

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10845
2024-05-15 18:06:29 +00:00
naskya 2f00947a24 Merge branch 'renovate/swc-monorepo' into 'develop'
chore(deps): update swc monorepo

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10844
2024-05-15 18:05:01 +00:00
naskya 5608129913 Merge branch 'renovate/syn-2.x-lockfile' into 'develop'
chore(deps): update rust crate syn to v2.0.63

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10843
2024-05-15 18:02:51 +00:00
naskya 8923e1f2a7 Merge branch 'renovate/serde-monorepo' into 'develop'
chore(deps): update rust crate serde to v1.0.202

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10842
2024-05-15 17:54:30 +00:00
naskya 8765e6ba54
ci: update renovate config 2024-05-16 02:42:59 +09:00
naskya 7c72738983 Merge branch 'renovate/pnpm-9.x' into 'develop'
chore(deps): update pnpm to v9.1.1

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10841
2024-05-15 17:40:54 +00:00
naskya ff446de7e8 Merge branch 'renovate/vue-tsc-2.x' into 'develop'
chore(deps): update dependency vue-tsc to v2.0.18

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10840
2024-05-15 17:36:25 +00:00
naskya 411d00a7af Merge branch 'renovate/vue-draggable-plus-0.x' into 'develop'
chore(deps): update dependency vue-draggable-plus to v0.4.1

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10839
2024-05-15 17:35:22 +00:00
CI 65a1fa870b fix(deps): update dependency aws-sdk to v2.1620.0 2024-05-15 17:29:54 +00:00
CI 1d25c78866 chore(deps): update dependency execa to v9.1.0 2024-05-15 17:29:35 +00:00
CI 6067eaef04 fix(deps): update dependency websocket to v1.0.35 2024-05-15 17:29:14 +00:00
CI 92299423a3 fix(deps): update dependency is-svg to v5.0.1 2024-05-15 17:28:54 +00:00
CI 65a8984c09 fix(deps): update dependency bull to v4.12.4 2024-05-15 17:28:34 +00:00
CI 99eb364778 chore(deps): update swc monorepo 2024-05-15 17:28:11 +00:00
CI 266c81df1e chore(deps): update rust crate syn to v2.0.63 2024-05-15 17:27:48 +00:00
CI 6a2e91efa1 chore(deps): update rust crate serde to v1.0.202 2024-05-15 17:27:42 +00:00
CI 17cbb9cd1e chore(deps): update pnpm to v9.1.1 2024-05-15 17:26:51 +00:00
CI d6ebb55556 chore(deps): update dependency vue-tsc to v2.0.18 2024-05-15 17:26:26 +00:00
CI 4dd1cff80b chore(deps): update dependency vue-draggable-plus to v0.4.1 2024-05-15 17:26:06 +00:00
naskya 752c6dc75b
ci: update renovate config 2024-05-16 02:24:06 +09:00
naskya cede0fdae2 Merge branch 'renovate/node-20.x' into 'develop'
chore(deps): update dependency @types/node to v20.12.12

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10837
2024-05-15 15:01:54 +00:00
naskya 35d706e45d Merge branch 'renovate/swiper-11.x' into 'develop'
chore(deps): update dependency swiper to v11.1.3

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10838
2024-05-15 15:01:26 +00:00
CI 9075050a67 chore(deps): update dependency swiper to v11.1.3 2024-05-15 10:05:12 +00:00
CI edc2a7d890 chore(deps): update dependency @types/node to v20.12.12 2024-05-15 10:04:53 +00:00
naskya 28e2a24585
chore (backend-rs): cleanup 2024-05-15 16:45:35 +09:00
naskya 2884b2fb42
chore (backend-rs): apply clippy fix 2024-05-15 16:36:26 +09:00
naskya d8e1ab63c0
refactor: port system information checker to backend-rs
network stat is removed because it might be inaccurate and/or
it should be monitored by other system tools, but it may be added back
later if it is wanted
2024-05-15 16:26:46 +09:00
Gary O'Regan Kelly c2d5859755
locale: update translations (French)
Currently translated at 100.0% (1932 of 1932 strings)

Translation: Firefish/locales
Translate-URL: https://hosted.weblate.org/projects/firefish/locales/fr/
2024-05-14 20:01:54 +02:00
naskya 457bd22b7b
chore (deps): pin versions 2024-05-12 01:29:19 +09:00
naskya 6176c09509
ci: update renovate config 2024-05-12 01:20:17 +09:00
naskya e9068acddd Merge branch 'renovate/lock-file-maintenance' into 'develop'
chore(deps): lock file maintenance

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10834
2024-05-11 16:11:11 +00:00
CI 3ccacb7fce chore(deps): lock file maintenance 2024-05-11 16:07:50 +00:00
naskya 654ab006a6
ci: update config 2024-05-12 01:03:52 +09:00
naskya 1942d772db Merge branch 'renovate/node-20.x' into 'develop'
chore(deps): update node.js to v20

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10833
2024-05-11 15:59:23 +00:00
naskya bc08f0faa9 Merge branch 'renovate/semver-7.x' into 'develop'
fix(deps): update dependency semver to v7.6.2

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10829
2024-05-11 15:56:59 +00:00
naskya fa35d1f4dd
meta: remove node version info from backend-rs/package.json 2024-05-12 00:52:27 +09:00
naskya 4db42272e7 Merge branch 'renovate/eslint-plugin-file-progress-1.x' into 'develop'
chore(deps): update dependency eslint-plugin-file-progress to ^1.4.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10836
2024-05-11 15:51:49 +00:00
naskya cfa3263c46
Merge branch 'develop' into renovate/semver-7.x 2024-05-12 00:49:18 +09:00
naskya b09e418cf6 Merge branch 'renovate/msgpackr-1.x' into 'develop'
fix(deps): update dependency msgpackr to ^1.10.2

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10835
2024-05-11 15:47:12 +00:00
CI e5a5d715b6 chore(deps): update node.js to v20 2024-05-11 15:41:52 +00:00
CI 9ad61fe607 chore(deps): update dependency eslint-plugin-file-progress to ^1.4.0 2024-05-11 15:41:30 +00:00
CI d6983e92aa fix(deps): update dependency semver to v7.6.2 2024-05-11 15:40:50 +00:00
CI 07e2571c79 fix(deps): update dependency msgpackr to ^1.10.2 2024-05-11 15:40:29 +00:00
naskya 0a9289abe3 Merge branch 'renovate/sass-1.x' into 'develop'
chore(deps): update dependency sass to v1.77.1

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10814
2024-05-11 15:25:52 +00:00
naskya 9dda7f955a Merge branch 'renovate/type-fest-4.x' into 'develop'
chore(deps): update dependency type-fest to v4.18.2

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10831
2024-05-11 14:38:51 +00:00
naskya a14a4a5f9c Merge branch 'renovate/redocly-openapi-core-1.x' into 'develop'
fix(deps): update dependency @redocly/openapi-core to v1.12.2

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10826
2024-05-11 14:34:39 +00:00
naskya b3eb12ccff Merge branch 'renovate/eslint-sets-eslint-config-vue3-5.x' into 'develop'
chore(deps): update dependency @eslint-sets/eslint-config-vue3 to ^5.13.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10811
2024-05-11 14:34:16 +00:00
naskya 6cd511d473 Merge branch 'renovate/bull-4.x' into 'develop'
fix(deps): update dependency bull to v4.12.3

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10828
2024-05-11 14:30:51 +00:00
naskya a82ed86539 Merge branch 'renovate/prismjs-1.x' into 'develop'
chore(deps): update dependency @types/prismjs to ^1.26.4

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10825
2024-05-11 14:29:38 +00:00
naskya 30ce11f9fe Merge branch 'renovate/vue-tsc-2.x' into 'develop'
chore(deps): update dependency vue-tsc to v2.0.17

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10807
2024-05-11 14:29:22 +00:00
naskya afb2d30e65 Merge branch 'renovate/pg-8.x' into 'develop'
chore(deps): update dependency @types/pg to ^8.11.6

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10824
2024-05-11 14:27:40 +00:00
naskya fb3cd43102 Merge branch 'renovate/napi-rs-cli-2.x' into 'develop'
chore(deps): update dependency @napi-rs/cli to v2.18.3

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10823
2024-05-11 14:26:54 +00:00
CI 1cd58ce05d chore(deps): update dependency type-fest to v4.18.2 2024-05-11 14:08:58 +00:00
CI 55fdd8b5a2 chore(deps): update dependency sass to v1.77.1 2024-05-11 14:08:37 +00:00
CI 704d39f202 chore(deps): update dependency @eslint-sets/eslint-config-vue3 to ^5.13.0 2024-05-11 14:07:55 +00:00
CI ef95673d50 fix(deps): update dependency bull to v4.12.3 2024-05-11 14:07:10 +00:00
CI eb443c8494 fix(deps): update dependency @redocly/openapi-core to v1.12.2 2024-05-11 14:06:30 +00:00
CI 52e3b49533 chore(deps): update dependency vue-tsc to v2.0.17 2024-05-11 14:06:07 +00:00
CI 11ded9491e chore(deps): update dependency @types/prismjs to ^1.26.4 2024-05-11 14:05:47 +00:00
CI d3b899ccc3 chore(deps): update dependency @types/pg to ^8.11.6 2024-05-11 14:05:27 +00:00
CI 6d6c0fbca0 chore(deps): update dependency @napi-rs/cli to v2.18.3 2024-05-11 14:05:03 +00:00
naskya 2257721fe3 Merge branch 'renovate/execa-9.x' into 'develop'
chore(deps): update dependency execa to v9

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10822
2024-05-11 13:10:38 +00:00
naskya cd836daa9b Merge branch 'renovate/otpauth-9.x' into 'develop'
fix(deps): update dependency otpauth to ^9.2.4

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10809
2024-05-11 13:09:51 +00:00
naskya bf5a2c6ebb Merge branch 'renovate/eslint-monorepo' into 'develop'
chore(deps): update dependency eslint to ^9.2.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10812
2024-05-11 13:09:13 +00:00
naskya f0632d2a6b Merge branch 'renovate/cropperjs-2.x' into 'develop'
chore(deps): update dependency cropperjs to v2.0.0-beta.5

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10805
2024-05-11 13:06:28 +00:00
naskya d192a7c81a
ci: fix config 2024-05-11 21:35:56 +09:00
naskya 847cc47fc4
chore (deps): update cargo dependencies 2024-05-11 21:27:24 +09:00
naskya 59862f16b0
chore (deps): update bull-board 2024-05-11 21:21:50 +09:00
naskya 0e5e96c99a Merge branch 'renovate/vite-5.x' into 'develop'
chore(deps): update dependency vite to v5.2.11

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10806
2024-05-11 12:18:46 +00:00
naskya dbc24a0b8f
Merge branch 'develop' into renovate/vite-5.x 2024-05-11 21:16:34 +09:00
naskya 1eb26263c1 Merge branch 'renovate/ajv-8.x' into 'develop'
fix(deps): update dependency ajv to v8.13.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10819
2024-05-11 12:14:56 +00:00
naskya 639a838736 Merge branch 'renovate/systeminformation-5.x' into 'develop'
fix(deps): update dependency systeminformation to v5.22.8

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10810
2024-05-11 12:14:00 +00:00
naskya 811be1022a Merge branch 'renovate/tesseract.js-5.x' into 'develop'
fix(deps): update dependency tesseract.js to ^5.1.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10821
2024-05-11 12:12:27 +00:00
naskya 7f277878a6 Merge branch 'renovate/ws-8.x' into 'develop'
chore(deps): update dependency ws to v8.17.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10815
2024-05-11 12:11:49 +00:00
naskya 119cbe3e4f Merge branch 'renovate/bull-board-ui-5.x' into 'develop'
fix(deps): update dependency @bull-board/ui to v5.17.1

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10818
2024-05-11 12:10:23 +00:00
naskya 2e15165117 Merge branch 'renovate/rollup-4.x' into 'develop'
chore(deps): update dependency rollup to v4.17.2

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10813
2024-05-11 12:09:49 +00:00
naskya 1b526c651e Merge branch 'renovate/aws-sdk-2.x' into 'develop'
fix(deps): update dependency aws-sdk to v2.1618.0

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10820
2024-05-11 11:57:14 +00:00
naskya afe06edd16
ci: disable scans for now 2024-05-11 20:49:18 +09:00
naskya 3bdf4f9f9c
ci: GitLab CI's cache is slow 2024-05-11 20:35:01 +09:00
naskya 567ba873e3 Merge branch 'renovate/vue-monorepo' into 'develop'
chore(deps): update vue monorepo to v3.4.27

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10808
2024-05-11 11:19:43 +00:00
naskya 6eef39158e
ci: ignore unrelated files in sast 2024-05-11 19:55:12 +09:00
naskya f81739b8d1
container: cargo fetch using root Cargo.toml 2024-05-11 19:30:00 +09:00
naskya ff7fffc711
container: update Dockerfile to use cargo cache on deps updates 2024-05-11 19:19:28 +09:00
naskya daade3865b Merge branch 'renovate/nodemailer-6.x' into 'develop'
chore(deps): update dependency @types/nodemailer to v6.4.15

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10804
2024-05-11 09:48:00 +00:00
naskya 7b24210bd8
ci: add variables 2024-05-11 18:47:01 +09:00
naskya d77da088f8
ci: tweak config 2024-05-11 18:38:02 +09:00
naskya d6541a3ebb Merge branch 'renovate/jsrsasign-10.x' into 'develop'
chore(deps): update dependency @types/jsrsasign to v10.5.14

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10803
2024-05-11 08:48:59 +00:00
naskya 4fbf211e96
ci: add missing file copy 2024-05-11 10:35:00 +09:00
naskya 132615958b
ci: revise conditions 2024-05-11 10:32:10 +09:00
naskya 27c4b4c812
ci: fix typo 2024-05-11 10:28:39 +09:00
naskya 99f5063f4c
ci: reduce cargo builds 2024-05-11 10:27:09 +09:00
CI ef706fff9d chore(deps): update dependency execa to v9 2024-05-11 00:12:40 +00:00
CI 84528680df fix(deps): update dependency tesseract.js to ^5.1.0 2024-05-11 00:12:21 +00:00
CI 11b5f5cc17 fix(deps): update dependency aws-sdk to v2.1618.0 2024-05-11 00:11:48 +00:00
CI da752a158c fix(deps): update dependency ajv to v8.13.0 2024-05-11 00:11:28 +00:00
CI 693c9edb10 fix(deps): update dependency @bull-board/ui to v5.17.1 2024-05-11 00:11:09 +00:00
CI 4110842357 chore(deps): update dependency ws to v8.17.0 2024-05-11 00:10:10 +00:00
CI 8656bdb185 chore(deps): update dependency rollup to v4.17.2 2024-05-11 00:09:25 +00:00
CI af4426653e chore(deps): update dependency eslint to ^9.2.0 2024-05-11 00:09:02 +00:00
CI 3750ca426b fix(deps): update dependency systeminformation to v5.22.8 2024-05-11 00:08:21 +00:00
CI 3a8ca2a2d7 fix(deps): update dependency otpauth to ^9.2.4 2024-05-11 00:08:02 +00:00
CI ace081f163 chore(deps): update vue monorepo to v3.4.27 2024-05-11 00:07:41 +00:00
CI 184b0e4019 chore(deps): update dependency vite to v5.2.11 2024-05-11 00:06:56 +00:00
CI bec62cffc6 chore(deps): update dependency cropperjs to v2.0.0-beta.5 2024-05-11 00:06:36 +00:00
CI d5493f8e5d chore(deps): update dependency @types/nodemailer to v6.4.15 2024-05-11 00:06:17 +00:00
CI 1cb64b7fa8 chore(deps): update dependency @types/jsrsasign to v10.5.14 2024-05-11 00:05:58 +00:00
naskya 8f59f26aa0
ci: disable nodejs-scan
it doesn't work very well with this repository :(
2024-05-11 09:03:31 +09:00
naskya 6f6333f094
ci: edit sast config 2024-05-11 08:50:13 +09:00
naskya 96cbc6799c
ci: add container scanning 2024-05-11 08:41:33 +09:00
naskya d4f1e06535
ci: add sast-ruleset.toml 2024-05-11 07:59:37 +09:00
naskya f9e2bd2448
ci: enable Static Application Security Testing 2024-05-11 07:26:53 +09:00
naskya b07dc87af6
container: reorder build operations to use cargo build cache on deps updates 2024-05-11 05:39:09 +09:00
naskya aa266d91e0
chore (backend-rs): impl From<Acct> for String 2024-05-11 04:54:30 +09:00
naskya 8f8d62aa58
chore (backend): organize imports 2024-05-11 04:52:59 +09:00
naskya d1b33ad76f
chore (backend-rs): move acct to another directory 2024-05-11 04:31:59 +09:00
naskya eeb09028bd
docs: fix indent 2024-05-11 04:23:20 +09:00
naskya ded0de27c5 Merge branch 'renovate/swc-monorepo' into 'develop'
chore(deps): update dependency @swc/core to v1.5.5

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10802
2024-05-10 12:40:32 +00:00
naskya ac57e4c019
ci: update config 2024-05-10 21:39:13 +09:00
CI 291990d320 chore(deps): update dependency @swc/core to v1.5.5 2024-05-10 12:09:21 +00:00
naskya 2ca7bd65aa
ci: update config 2024-05-10 21:05:34 +09:00
naskya fb4e449139
ci: update renovate config 2024-05-10 21:00:29 +09:00
naskya 084c7f1c84 Merge branch 'renovate/biomejs-biome-1.x' into 'develop'
chore(deps): update dependency @biomejs/biome to v1.7.3

Co-authored-by: CI <project_7_bot_1bfaee5701aed20091a86249a967a6c1@noreply.firefish.dev>

See merge request firefish/firefish!10800
2024-05-10 07:52:04 +00:00
naskya 421030c38f
ci: fix rules 2024-05-10 16:34:58 +09:00
naskya f933525856
chore: fix dependencies 2024-05-10 16:25:22 +09:00
CI 9c8e5eabb4 chore(deps): update dependency @biomejs/biome to v1.7.3 2024-05-10 00:32:26 +00:00
naskya 4d3072929e
chore (backend-rs): update doctest comment 2024-05-10 06:59:05 +09:00
naskya 612ce48f44
chore (backend-rs): impl FromStr and Display for Acct 2024-05-10 06:55:51 +09:00
naskya 95fd20a46f
feat (macro-rs): add ts_only_warn macro 2024-05-10 06:54:26 +09:00
naskya 3886c5624b
ci: don't run cargo test with napi feature flag 2024-05-10 06:52:55 +09:00
naskya fc7de024c6
chore: let pnpm detect dependencies 2024-05-10 02:24:25 +09:00
naskya bd88c3399f
chore: update pnpm major version 2024-05-10 01:55:27 +09:00
Hosted Weblate 5b8a164b8d
Merge branch 'origin/develop' into Weblate 2024-05-09 18:21:47 +02:00
jolupa b3cc8cdb3c
locale: update translations (Catalan)
Currently translated at 100.0% (1932 of 1932 strings)

Translation: Firefish/locales
Translate-URL: https://hosted.weblate.org/projects/firefish/locales/ca/
2024-05-09 18:21:46 +02:00
naskya 8373623136
locale: add the Esperanto language 2024-05-10 01:19:13 +09:00
Hosted Weblate d04f85d4bd
Merge branch 'origin/develop' into Weblate 2024-05-09 18:07:17 +02:00
naskya 33853a3a9b
locale: update translations (Japanese)
Currently translated at 100.0% (1932 of 1932 strings)

Translation: Firefish/locales
Translate-URL: https://hosted.weblate.org/projects/firefish/locales/ja/
2024-05-09 18:07:12 +02:00
naskya 26a58c92df
ci: use cargo nextest 2024-05-09 23:19:35 +09:00
naskya 4a81106cf5
chore (backend): remove generate-native-user-token 2024-05-09 21:49:56 +09:00
naskya bdc5d02d27
fix (client): missing import 2024-05-09 19:13:43 +09:00
naskya 075d326d7b Merge branch 'fix/renote-time' into 'develop'
fix: incorrect renote time

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

See merge request firefish/firefish!10798
2024-05-09 09:52:07 +00:00
Lhcfl 0a7f16c11f fix: renote time 2024-05-09 15:49:00 +08:00
naskya 3af8f86924
chore: lint 2024-05-09 02:06:10 +09:00
naskya 276cabbbe3
ci: fix clippy task 2024-05-09 01:15:09 +09:00
naskya af14bee31f
docs: update changelog 2024-05-09 00:41:49 +09:00
naskya b3d1be457b Merge branch 'fix/MkTime' into 'develop'
refactor MkTime: replace the awful ?: chain with if-else; fix: force update ticker when props.time changed

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

See merge request firefish/firefish!10797
2024-05-08 12:11:31 +00:00
Hosted Weblate 347851d6bb
Merge branch 'origin/develop' into Weblate 2024-05-08 10:21:46 +02:00
jolupa abec71074b
locale: update translations (Catalan)
Currently translated at 100.0% (1930 of 1930 strings)

Translation: Firefish/locales
Translate-URL: https://hosted.weblate.org/projects/firefish/locales/ca/
2024-05-08 10:21:45 +02:00
Lhcfl 272e30be0c refactor: replace the awful ?: chain with if-else; fix: force update ticker when props.time changed
related: ed6f866a4f

Co-authored-by: kakkokari-gtyih <kakkokari-gtyih@users.noreply.github.com>
2024-05-08 10:52:32 +08:00
naskya 971f196627
ci: yet another fix 2024-05-08 08:27:54 +09:00
naskya 8cc0e40d35
ci: remove more unneeded paths 2024-05-08 07:16:32 +09:00
naskya beeea86253
ci: remove unneeded steps from clippy check 2024-05-08 06:54:43 +09:00
naskya 084a4bc63a
ci: add pull_policy 2024-05-08 06:46:41 +09:00
naskya cda31d3dc7
Revert "refactor (backend): port publishNotesStream to backend-rs"
This reverts commit 5382dc5da8.

It turns out this sends an inccorect time info to the stream
since JavaScript's Date object doesn't have timezone info

I'll revisit this in the future
2024-05-08 06:08:26 +09:00
naskya 907578e8f8
ci: fix config error 2024-05-08 05:28:41 +09:00
naskya 2923ea86de
ci: update workflow rules 2024-05-08 05:26:59 +09:00
naskya 226c990385
ci: use buildah caches 2024-05-08 05:26:36 +09:00
naskya 769f52c8ee Merge branch 'fix/reactive' into 'develop'
fix: use reactive MkTime

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

See merge request firefish/firefish!10796
2024-05-07 19:59:12 +00:00
naskya 8a00d82f36
ci: add firefish-js 2024-05-08 04:49:13 +09:00
naskya 34ed877f57
ci: don't build the backend on client-only changes 2024-05-08 04:41:20 +09:00
Lhcfl f5074f35cc fix: use reactive MkTime 2024-05-08 03:00:07 +08:00
naskya a847dd55ad
ci: fix cargo clippy task 2024-05-08 03:58:21 +09:00
naskya 5382dc5da8
refactor (backend): port publishNotesStream to backend-rs 2024-05-08 02:15:07 +09:00
naskya 989e93f2a0
fix: migrate back from happy-dom to JSDOM (closes #10924 #10914 #10842)
this reverts commit 4565867b8b.
2024-05-08 01:52:15 +09:00
naskya df81cb6a85 Merge branch 'feat/collepse-reply-timeline' into 'develop'
feat: collepse renotes and replies in timeline

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

Closes #10908

See merge request firefish/firefish!10788
2024-05-07 16:20:45 +00:00
Lhcfl 31168cc7b2 fix: use reacive MkSubNoteContent 2024-05-07 23:42:40 +08:00
Lhcfl 42886f054d fix: use reactive previewableCount 2024-05-07 23:31:45 +08:00
Lhcfl 1d0ea11eea fix: use note capture in MkNoteSimple 2024-05-07 23:23:19 +08:00
Lhcfl 24602c4745 update locales 2024-05-07 22:49:09 +08:00
Lhcfl 33923a59fa fix: use reactive MkNoteHeader 2024-05-07 22:37:09 +08:00
Lhcfl 8067ed4084 Merge branch 'develop' of https://firefish.dev/firefish/firefish into feat/collepse-reply-timeline 2024-05-07 22:34:45 +08:00
naskya 4277ad0b59
meta: update COPYING & include LICENSE in pre-built images 2024-05-07 20:54:47 +09:00
naskya fc65d8c1c3
docs: update api-change.md 2024-05-07 20:52:11 +09:00
naskya 3b3d457c3e
ci: restrict paths 2024-05-07 18:34:18 +09:00
naskya 1128e243d3
container: fix dockerignore 2024-05-07 18:01:05 +09:00
naskya 39e08f57e8
ci: remove unneeded argument 2024-05-07 18:01:05 +09:00
naskya 09ef642905
ci: skip builds if unneeded 2024-05-07 17:36:23 +09:00
naskya 1b8748bc8c
another attempt to build an image inside container inside container 2024-05-07 17:30:57 +09:00
naskya 82c98ae72f
ci: modify buildah args 2024-05-07 07:26:33 +09:00
naskya 5b3f93457b
dev: add renovate 2024-05-07 06:58:00 +09:00
naskya 4d9c0f8e7b
ci: fix syntax 2024-05-07 06:11:31 +09:00
naskya bf2b624bc9
ci: build OCI container image on develop 2024-05-07 05:52:43 +09:00
naskya 5261eb24b6
ci: restrict project path 2024-05-07 05:26:05 +09:00
naskya d440e9b388
ci: revise tasks 2024-05-07 04:58:59 +09:00
naskya 14b285f882 Merge branch 'refactor/is-safe-url' into 'develop'
refactor (backend): port isValidUrl to backend-rs


See merge request firefish/firefish!10795
2024-05-06 17:11:51 +00:00
naskya baa5c402db
ci: apt-get update first & fix paths 2024-05-07 01:54:29 +09:00
naskya 5b01d3574f
refactor (backend): port isValidUrl to backend-rs 2024-05-07 00:56:37 +09:00
naskya e3a98ebc72 Merge branch 'userLang' into 'develop'
Add server-side per-user UI language

Co-authored-by: eana <coder@apps.1a23.com>

See merge request firefish/firefish!10793
2024-05-06 15:31:18 +00:00
naskya 7fe7f90350
ci: revise build config 2024-05-07 00:22:51 +09:00
naskya 8ed942e00f
chore: update auto-generated files 2024-05-06 23:13:31 +09:00
naskya ddfdd038ad
chore: update downgrade.sql 2024-05-06 23:10:39 +09:00
naskya 7fdd44cf8d
locale: update translations 2024-05-06 23:07:57 +09:00
naskya 0c4826becf
dev: copy backend-rs/index.{js,d.ts} to built/index.{js,d.ts} if not exist
https://firefish.dev/firefish/firefish/-/merge_requests/10780#note_5685
2024-05-06 22:54:10 +09:00
naskya ecd8e3d109
ci: remove git clean flags 2024-05-06 22:51:42 +09:00
naskya a3b156441a
ci: temporary fix for cargo test failure due to missing meta.json 2024-05-06 19:38:35 +09:00
naskya ecbd8a8724
ci: save node_modules and target 2024-05-06 19:23:43 +09:00
naskya 442dc33a34
ci: exec build & cargo test only for now 2024-05-06 19:08:28 +09:00
naskya c8372767fa
ci: attempt to fix permission 2024-05-06 19:00:34 +09:00
naskya 8e497b41cf
messed up 2024-05-06 18:44:28 +09:00
naskya bfdf73caeb
ci: fix permisson 2024-05-06 18:38:54 +09:00
naskya 5b18f9761c
ci: fix .git 2024-05-06 18:29:31 +09:00
naskya 641ff742bb
ci: add dependencies of sea-orm-cli 2024-05-06 18:26:50 +09:00
naskya e6121946aa
ci: another fix 2024-05-06 18:11:30 +09:00
naskya c6212ff8f4
ci: use CI_JOB_TOKEN 2024-05-06 18:06:56 +09:00
naskya d582a84c57
ci: install postgresql client 2024-05-06 17:58:26 +09:00
naskya a7978e2b08
ci: non-interactive shell option 2024-05-06 17:46:45 +09:00
naskya 766bac3dee
ci: give alias for services 2024-05-06 17:14:47 +09:00
naskya 7360736966
ci: fix typo 2024-05-06 17:07:42 +09:00
naskya e797849e9b
ci: attempt to add a CI task for merge requests 2024-05-06 17:00:36 +09:00
eana ef57735e6a fix typo 2024-05-06 05:26:38 +00:00
eana e7c33835b2 Add server-side per-user UI language 2024-05-06 05:14:44 +00:00
naskya 4e83dbd01f Merge branch 'refactor/remove-gulp' into 'develop'
refactor: replace gulp with a simple script


See merge request firefish/firefish!10791
2024-05-06 04:45:17 +00:00
naskya dd74eabae1
refactor (backend): port nodeinfo fetcher to backend-rs 2024-05-06 08:12:21 +09:00
naskya 711618b42c
test (backend-rs): add tests for nodeinfo (de)serialization 2024-05-06 05:20:13 +09:00
naskya 510207b101
refactor (backend-rs): separate nodeinfo generator and schema 2024-05-06 04:23:38 +09:00
naskya 49825853c1
refactor (backend): port nodeinfo generator to backend-rs 2024-05-06 03:01:55 +09:00
naskya 359fef0a42
chore: replace old comments 2024-05-05 21:22:57 +09:00
naskya fda81a9f91
chore: use absolute path for file operations 2024-05-05 21:04:20 +09:00
naskya c505c6df36
fix: remove old locale files 2024-05-05 20:59:26 +09:00
naskya 9a4a75bf92 Merge branch 'fix/click' into 'develop'
fix: Click event of MenuParent unexpectedly goes to underlying element

Co-authored-by: Lhcfl <Lhcfl@outlook.com>

See merge request firefish/firefish!10792
2024-05-05 11:42:25 +00:00
Linca 5e5d01d407 fix: Click event of MenuParent unexpectedly goes to underlying element
Co-authored-by: Lhcfl <Lhcfl@outlook.com>
2024-05-05 11:42:25 +00:00
naskya d114b8ec1d
chore: format 2024-05-05 14:58:56 +09:00
naskya d2471b6db7
refactor (backend-rs): replace reqwest with isahc
reqwest is feature-rich, but we will need isahc http client for push notifications (!10760)
isahc http client is also good btw :)
2024-05-05 14:53:45 +09:00
naskya 341b43ed71
refactor: replace gulp with a simple script 2024-05-05 02:19:58 +09:00
naskya 6d64358674
fix (client): missing MFM function props not falling back correctly 2024-05-05 01:15:04 +09:00
naskya 4992999bb7
test (backend-rs): add tests 2024-05-04 22:59:49 +09:00
naskya 38c0de39b9
chore (backend-rs): add docs for functions in database/cache 2024-05-04 22:50:46 +09:00
naskya 722d090f8d
chore (backend-rs): remove unneeded 'static 2024-05-04 22:49:11 +09:00
naskya b185c0c87e
feat (backend-rs): add cache::delete_all 2024-05-04 21:24:20 +09:00
naskya 8c22b0d07f
test (backend-rs): fix version format 2024-05-04 16:17:33 +09:00
naskya 0f4c05a64f
ci: add 'ci' feature flag to backend-rs 2024-05-04 16:14:23 +09:00
naskya bc39badf51
chore (client): remove unused code 2024-05-04 16:08:41 +09:00
Lhcfl 46d0679845 little patch 2024-05-03 00:56:10 +08:00
Lhcfl 160e7f26a6 feat: collepse renotes and replies 2024-05-03 00:22:25 +08:00
Lhcfl 9138c3726a dev: use reactiveState in foldNotification 2024-05-02 01:07:57 +08:00
Lhcfl 425b333474 set collapseReplyInTimeline default to false 2024-05-02 00:57:00 +08:00
Lhcfl d1c76b3882 feat: allow collepse replied posts in timeline 2024-05-02 00:53:52 +08:00
154 changed files with 14601 additions and 14024 deletions

View File

@ -1,195 +1,11 @@
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Firefish configuration
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# ┌─────┐
#───┘ URL └─────────────────────────────────────────────────────
# Final accessible URL seen by a user.
url: https://example.tld/
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# URL SETTINGS AFTER THAT!
# ┌───────────────────────┐
#───┘ Port and TLS settings └───────────────────────────────────
#
# Misskey requires a reverse proxy to support HTTPS connections.
#
# +----- https://example.tld/ ------------+
# +------+ |+-------------+ +----------------+|
# | User | ---> || Proxy (443) | ---> | Misskey (3000) ||
# +------+ |+-------------+ +----------------+|
# +---------------------------------------+
#
# You need to set up a reverse proxy. (e.g. nginx)
# An encrypted connection with HTTPS is highly recommended
# because tokens may be transferred in GET requests.
# The port that your Misskey server should listen on.
url: http://localhost:3000
port: 3000
# ┌──────────────────────────┐
#───┘ PostgreSQL configuration └────────────────────────────────
db:
host: postgres
port: 5432
# Database name
db: postgres
# Auth
user: postgres
pass: test
# Whether disable Caching queries
#disableCache: true
# Extra Connection options
#extra:
# ssl: true
# ┌─────────────────────┐
#───┘ Redis configuration └─────────────────────────────────────
db: firefish_db
user: firefish
pass: password
redis:
host: redis
port: 6379
#family: 0 # 0=Both, 4=IPv4, 6=IPv6
#pass: example-pass
#prefix: example-prefix
#db: 1
# ┌─────────────────────────────┐
#───┘ Elasticsearch configuration └─────────────────────────────
#elasticsearch:
# host: localhost
# port: 9200
# ssl: false
# user:
# pass:
# ┌───────────────┐
#───┘ ID generation └───────────────────────────────────────────
# You can select the ID generation method.
# You don't usually need to change this setting, but you can
# change it according to your preferences.
# Available methods:
# aid ... Short, Millisecond accuracy
# meid ... Similar to ObjectID, Millisecond accuracy
# ulid ... Millisecond accuracy
# objectid ... This is left for backward compatibility
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# ID SETTINGS AFTER THAT!
id: 'aid'
# ┌─────────────────────┐
#───┘ Other configuration └─────────────────────────────────────
# Max note length, should be < 8000.
#maxNoteLength: 3000
# Whether disable HSTS
#disableHsts: true
# Number of worker processes
#clusterLimit: 1
# Job concurrency per worker
# deliverJobConcurrency: 128
# inboxJobConcurrency: 16
# Job rate limiter
# deliverJobPerSec: 128
# inboxJobPerSec: 16
# Job attempts
# deliverJobMaxAttempts: 12
# inboxJobMaxAttempts: 8
# IP address family used for outgoing request (ipv4, ipv6 or dual)
#outgoingAddressFamily: ipv4
# Syslog option
#syslog:
# host: localhost
# port: 514
# Proxy for HTTP/HTTPS
#proxy: http://127.0.0.1:3128
#proxyBypassHosts: [
# 'example.com',
# '192.0.2.8'
#]
# Proxy for SMTP/SMTPS
#proxySmtp: http://127.0.0.1:3128 # use HTTP/1.1 CONNECT
#proxySmtp: socks4://127.0.0.1:1080 # use SOCKS4
#proxySmtp: socks5://127.0.0.1:1080 # use SOCKS5
# Media Proxy
#mediaProxy: https://example.com/proxy
# Proxy remote files (default: false)
#proxyRemoteFiles: true
#allowedPrivateNetworks: [
# '127.0.0.1/32'
#]
# Upload or download file size limits (bytes)
#maxFileSize: 262144000
# Managed hosting settings
# !!!!!!!!!!
# >>>>>> NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS! <<<<<<
# !!!!!!!!!!
# Each category is optional, but if each item in each category is mandatory!
# If you mess this up, that's on you, you've been warned...
#maxUserSignups: 100
#isManagedHosting: true
#deepl:
# managed: true
# authKey: ''
# isPro: false
#
#email:
# managed: true
# address: 'example@email.com'
# host: 'email.com'
# port: 587
# user: 'example@email.com'
# pass: ''
# useImplicitSslTls: false
#
#objectStorage:
# managed: true
# baseUrl: ''
# bucket: ''
# prefix: ''
# endpoint: ''
# region: ''
# accessKey: ''
# secretKey: ''
# useSsl: true
# connnectOverProxy: false
# setPublicReadOnUpload: true
# s3ForcePathStyle: true
# !!!!!!!!!!
# >>>>>> AGAIN, NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS, ABOVE SETTINGS ARE FOR MANAGED HOSTING ONLY! <<<<<<
# !!!!!!!!!!
# Seriously. Do NOT fill out the above settings if you're self-hosting.
# They're much better off being set from the control panel.

View File

@ -51,12 +51,11 @@ title.svg
/dev
/docs
/scripts
!/scripts/copy-assets.mjs
biome.json
COPYING
CODE_OF_CONDUCT.md
CONTRIBUTING.md
Dockerfile
LICENSE
Procfile
README.md
SECURITY.md

265
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,265 @@
image: docker.io/rust:slim-bookworm
services:
- name: docker.io/groonga/pgroonga:latest-alpine-12-slim
alias: postgres
pull_policy: if-not-present
- name: docker.io/redis:7-alpine
alias: redis
pull_policy: if-not-present
workflow:
rules:
- if: $CI_PROJECT_PATH == 'firefish/firefish' || $CI_MERGE_REQUEST_PROJECT_PATH == 'firefish/firefish'
changes:
paths:
- packages/**/*
- locales/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
- Dockerfile
- .dockerignore
when: always
- when: never
stages:
- dependency
- test
- build
variables:
POSTGRES_DB: 'firefish_db'
POSTGRES_USER: 'firefish'
POSTGRES_PASSWORD: 'password'
POSTGRES_HOST_AUTH_METHOD: 'trust'
DEBIAN_FRONTEND: 'noninteractive'
CARGO_PROFILE_DEV_OPT_LEVEL: '0'
CARGO_PROFILE_DEV_LTO: 'off'
CARGO_PROFILE_DEV_DEBUG: 'none'
CARGO_TERM_COLOR: 'always'
GIT_CLEAN_FLAGS: -ffdx -e node_modules/ -e built/ -e target/ -e packages/backend-rs/built/
default:
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential clang mold python3 perl nodejs postgresql-client
- corepack enable
- corepack prepare pnpm@latest --activate
- cp .config/ci.yml .config/default.yml
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
- psql --host postgres --user "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" --command 'CREATE EXTENSION pgroonga'
test:build:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: always
needs:
- job: cargo:clippy
optional: true
- job: cargo:test
optional: true
script:
- pnpm install --frozen-lockfile
- pnpm run build:debug
- pnpm run migrate
test:build:backend_ts_only:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend/**/*
- packages/megalodon/**/*
when: always
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential python3 nodejs postgresql-client
- corepack enable
- corepack prepare pnpm@latest --activate
- mkdir -p packages/backend-rs/built
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- cp .config/ci.yml .config/default.yml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
- psql --host postgres --user "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" --command 'CREATE EXTENSION pgroonga'
script:
- pnpm install --frozen-lockfile
- pnpm --filter 'backend' --filter 'megalodon' run build:debug
- pnpm run migrate
test:build:client_only:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/client/**/*
- packages/firefish-js/**/*
- packages/sw/**/*
- locales/**/*
when: always
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential python3 perl nodejs
- corepack enable
- corepack prepare pnpm@latest --activate
- cp .config/ci.yml .config/default.yml
script:
- pnpm install --frozen-lockfile
- pnpm --filter 'firefish-js' --filter 'client' --filter 'sw' run build:debug
build:container:
stage: build
image: docker.io/debian:bookworm-slim
services: []
rules:
- if: $BUILD == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop'
changes:
paths:
- packages/**/*
- locales/**/*
- scripts/copy-assets.mjs
- package.json
- Cargo.toml
- Cargo.lock
- Dockerfile
- .dockerignore
when: always
needs:
- job: test:build
optional: true
- job: test:build:backend_ts_only
optional: true
- job: test:build:client_only
optional: true
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends buildah ca-certificates fuse-overlayfs
- buildah login --username "${CI_REGISTRY_USER}" --password "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
- export IMAGE_TAG="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop:not-for-production"
- export IMAGE_CACHE="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop/cache"
script:
- |-
buildah build \
--isolation chroot \
--device /dev/fuse:rw \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
--cap-add all \
--platform linux/amd64 \
--layers \
--cache-to "${IMAGE_CACHE}" \
--cache-from "${IMAGE_CACHE}" \
--tag "${IMAGE_TAG}" \
.
- buildah inspect "${IMAGE_TAG}"
- buildah push "${IMAGE_TAG}"
cargo:test:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
when: always
script:
- curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
- pnpm install --frozen-lockfile
- mkdir -p packages/backend-rs/built
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- pnpm --filter='!backend-rs' run build:debug
- cargo test --doc
- cargo nextest run
cargo:clippy:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
when: always
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends build-essential clang mold perl
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
- rustup component add clippy
script:
- cargo clippy -- -D warnings
renovate:
stage: dependency
image:
name: docker.io/renovate/renovate:37-slim
entrypoint: [""]
rules:
- if: $RENOVATE && $CI_PIPELINE_SOURCE == 'schedule'
services: []
before_script: []
script:
- renovate --platform gitlab --token "${API_TOKEN}" --endpoint "${CI_SERVER_URL}/api/v4" "${CI_PROJECT_PATH}"

View File

@ -26,10 +26,6 @@ RsaSignature2017 implementation by Transmute Industries Inc
License: MIT
https://github.com/transmute-industries/RsaSignature2017/blob/master/LICENSE
Machine learning model for sensitive images by Infinite Red, Inc.
License: MIT
https://github.com/infinitered/nsfwjs/blob/master/LICENSE
Chiptune2.js by Simon Gündling
License: MIT
https://github.com/deskjet/chiptune2.js#license

1225
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,8 @@ resolver = "2"
[workspace.dependencies]
macro_rs = { path = "packages/macro-rs" }
napi = { version = "2.16.4", default-features = false }
napi-derive = "2.16.3"
napi = { version = "2.16.6", default-features = false }
napi-derive = "2.16.4"
napi-build = "2.1.3"
argon2 = "0.5.3"
@ -18,29 +18,31 @@ cuid2 = "0.1.2"
emojis = "0.6.2"
idna = "0.5.0"
image = "0.25.1"
isahc = "1.7.2"
nom-exif = "1.2.0"
once_cell = "1.19.0"
openssl = "0.10.64"
pretty_assertions = "1.4.0"
proc-macro2 = "1.0.81"
proc-macro2 = "1.0.82"
quote = "1.0.36"
rand = "0.8.5"
redis = "0.25.3"
regex = "1.10.4"
reqwest = "0.12.4"
rmp-serde = "1.2.0"
rmp-serde = "1.3.0"
sea-orm = "0.12.15"
serde = "1.0.198"
serde_json = "1.0.116"
serde = "1.0.202"
serde_json = "1.0.117"
serde_yaml = "0.9.34"
strum = "0.26.2"
syn = "2.0.60"
thiserror = "1.0.59"
syn = "2.0.63"
sysinfo = "0.30.12"
thiserror = "1.0.60"
tokio = "1.37.0"
tracing = "0.1.40"
tracing-subscriber = "0.3.18"
url = "2.5.0"
urlencoding = "2.1.3"
web-push = { git = "https://github.com/pimeys/rust-web-push", rev = "40febe4085e3cef9cdfd539c315e3e945aba0656" }
[profile.release]
lto = true

View File

@ -7,7 +7,12 @@ RUN apk update && apk add --no-cache build-base linux-headers curl ca-certificat
RUN curl --proto '=https' --tlsv1.2 --silent --show-error --fail https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Copy only the cargo dependency-related files first, to cache efficiently
# Copy only backend-rs dependency-related files first, to cache efficiently
COPY package.json pnpm-workspace.yaml ./
COPY packages/backend-rs/package.json packages/backend-rs/package.json
COPY packages/backend-rs/npm/linux-x64-musl/package.json packages/backend-rs/npm/linux-x64-musl/package.json
COPY packages/backend-rs/npm/linux-arm64-musl/package.json packages/backend-rs/npm/linux-arm64-musl/package.json
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
COPY packages/backend-rs/Cargo.toml packages/backend-rs/Cargo.toml
@ -15,22 +20,9 @@ COPY packages/backend-rs/src/lib.rs packages/backend-rs/src/
COPY packages/macro-rs/Cargo.toml packages/macro-rs/Cargo.toml
COPY packages/macro-rs/src/lib.rs packages/macro-rs/src/
# Install cargo dependencies
RUN cargo fetch --locked --manifest-path /firefish/packages/backend-rs/Cargo.toml
# Copy only the dependency-related files first, to cache efficiently
COPY package.json pnpm*.yaml ./
COPY packages/backend/package.json packages/backend/package.json
COPY packages/client/package.json packages/client/package.json
COPY packages/sw/package.json packages/sw/package.json
COPY packages/firefish-js/package.json packages/firefish-js/package.json
COPY packages/megalodon/package.json packages/megalodon/package.json
COPY packages/backend-rs/package.json packages/backend-rs/package.json
COPY packages/backend-rs/npm/linux-x64-musl/package.json packages/backend-rs/npm/linux-x64-musl/package.json
COPY packages/backend-rs/npm/linux-arm64-musl/package.json packages/backend-rs/npm/linux-arm64-musl/package.json
# Configure pnpm, and install dev mode dependencies for compilation
RUN corepack enable && corepack prepare pnpm@latest --activate && pnpm install --frozen-lockfile
# Configure pnpm, and install backend-rs dependencies
RUN corepack enable && corepack prepare pnpm@latest --activate && pnpm --filter backend-rs install
RUN cargo fetch --locked --manifest-path Cargo.toml
# Copy in the rest of the rust files
COPY packages/backend-rs packages/backend-rs/
@ -42,10 +34,22 @@ RUN NODE_ENV='production' pnpm run --filter backend-rs build
# Copy/Overwrite index.js to mitigate the bug in napi-rs codegen
COPY packages/backend-rs/index.js packages/backend-rs/built/index.js
# Copy in the rest of the files to compile
# Copy only the dependency-related files first, to cache efficiently
COPY packages/backend/package.json packages/backend/package.json
COPY packages/client/package.json packages/client/package.json
COPY packages/sw/package.json packages/sw/package.json
COPY packages/firefish-js/package.json packages/firefish-js/package.json
COPY packages/megalodon/package.json packages/megalodon/package.json
COPY pnpm-lock.yaml ./
# Install dev mode dependencies for compilation
RUN pnpm install --frozen-lockfile
# Copy in the rest of the files to build
COPY . ./
RUN NODE_ENV='production' pnpm run --filter firefish-js build
RUN NODE_ENV='production' pnpm run --recursive --parallel --filter '!backend-rs' --filter '!firefish-js' build && pnpm run gulp
# Build other workspaces
RUN NODE_ENV='production' pnpm run --recursive --filter '!backend-rs' build && pnpm run build:assets
# Trim down the dependencies to only those for production
RUN find . -path '*/node_modules/*' -delete && pnpm install --prod --frozen-lockfile

3
ci/cargo/config.toml Normal file
View File

@ -0,0 +1,3 @@
[target.x86_64-unknown-linux-gnu]
linker = "/usr/bin/clang"
rustflags = ["-C", "link-arg=--ld-path=/usr/bin/mold"]

View File

@ -2,6 +2,12 @@
Breaking changes are indicated by the :warning: icon.
## v20240516
- :warning: `server-info` (an endpoint to get server hardware information) now requires credentials.
- :warning: `net` (server's default network interface) has been removed from `admin/server-info`.
- Adding `lang` to the response of `i` and the request parameter of `i/update`.
## v20240504
- :warning: Removed `release` endpoint.

View File

@ -5,6 +5,13 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## [v20240516](https://firefish.dev/firefish/firefish/-/merge_requests/10854/commits)
- Improve timeline UX (you can restore the original appearance by settings)
- Remove `$[center]` MFM function
- This function was suddenly added last year (https://firefish.dev/firefish/firefish/-/commit/1a971efa689323d54eebb4d3646e102fb4d1d95a), but according to the [MFM spec](https://github.com/misskey-dev/mfm.js/blob/6aaf68089023c6adebe44123eebbc4dcd75955e0/docs/syntax.md#fn), `$[something]` must be an inline element (while `center` is a block element), so such a syntax is not expected by MFM renderers. Please use `<center></center>` instead.
- Fix bugs
## [v20240504](https://firefish.dev/firefish/firefish/-/merge_requests/10790/commits)
- Fix bugs

View File

@ -1,6 +1,7 @@
BEGIN;
DELETE FROM "migrations" WHERE name IN (
'AddUserProfileLanguage1714888400293',
'DropUnusedIndexes1714643926317',
'AlterAkaType1714099399879',
'AddDriveFileUsage1713451569342',
@ -764,9 +765,6 @@ CREATE SEQUENCE public.__chart_day__users_id_seq
CACHE 1;
ALTER SEQUENCE public.__chart_day__users_id_seq OWNED BY public.__chart_day__users.id;
-- drop-user-profile-language
ALTER TABLE "user_profile" ADD COLUMN "lang" character varying(32);
-- emoji-moderator
ALTER TABLE "user" DROP COLUMN "emojiModPerm";
DROP TYPE "public"."user_emojimodperm_enum";

View File

@ -326,7 +326,7 @@ cd ~/firefish
- To add custom locales, place them in the `./custom/locales/` directory. If you name your custom locale the same as an existing locale, it will overwrite it. If you give it a unique name, it will be added to the list. Also make sure that the first part of the filename matches the locale you're basing it on. (Example: `en-FOO.yml`)
- To add custom error images, place them in the `./custom/assets/badges` directory, replacing the files already there.
- To add custom sounds, place only mp3 files in the `./custom/assets/sounds` directory.
- To update custom assets without rebuilding, just run `pnpm run gulp`.
- To update custom assets without rebuilding, just run `pnpm run build:assets`.
- To block ChatGPT, CommonCrawl, or other crawlers from indexing your instance, uncomment the respective rules in `./custom/robots.txt`.
## Tips & Tricks

View File

@ -2,6 +2,12 @@
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
## v20240516
### For all users
Firefish is now compatible with [Node v22](https://nodejs.org/en/blog/announcements/v22-release-announce). The pre-built OCI container image will still be using the latest LTS version (v20.13.1 as of now).
## v20240430
### For all users
@ -17,11 +23,13 @@ You can control the verbosity of the server log by adding `maxLogLevel` in `.con
- Not only Firefish but also Node.js has recently fixed a few security issues:
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
So, it is highly recommended that you upgrade your Node.js version as well. The new versions are
- Node v18.20.2 (v18.x LTS)
- Node v20.12.2 (v20.x LTS)
- Node v21.7.3 (v21.x)
- You can check your Node.js version by this command:
You can check your Node.js version by this command:
```sh
node --version
```

View File

@ -1,101 +0,0 @@
/**
* Gulp tasks
*/
const fs = require("fs");
const gulp = require("gulp");
const replace = require("gulp-replace");
const terser = require("gulp-terser");
const cssnano = require("gulp-cssnano");
const meta = require("./package.json");
gulp.task("copy:backend:views", () =>
gulp
.src("./packages/backend/src/server/web/views/**/*")
.pipe(gulp.dest("./packages/backend/built/server/web/views")),
);
gulp.task("copy:backend:custom", () =>
gulp
.src("./custom/assets/**/*")
.pipe(gulp.dest("./packages/backend/assets/")),
);
gulp.task("copy:client:fonts", () =>
gulp
.src("./packages/client/node_modules/three/examples/fonts/**/*")
.pipe(gulp.dest("./built/_client_dist_/fonts/")),
);
gulp.task("copy:client:locales", async (cb) => {
fs.mkdirSync("./built/_client_dist_/locales", { recursive: true });
const { default: locales } = await import("./locales/index.mjs");
const v = { _version_: meta.version };
for (const [lang, locale] of Object.entries(locales)) {
fs.writeFileSync(
`./built/_client_dist_/locales/${lang}.${meta.version}.json`,
JSON.stringify({ ...locale, ...v }),
"utf-8",
);
}
cb();
});
gulp.task("build:backend:script", async () => {
const { default: locales } = await import("./locales/index.mjs");
return gulp
.src([
"./packages/backend/src/server/web/boot.js",
"./packages/backend/src/server/web/bios.js",
"./packages/backend/src/server/web/cli.js",
])
.pipe(replace("SUPPORTED_LANGS", JSON.stringify(Object.keys(locales))))
.pipe(
terser({
toplevel: true,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task("build:backend:style", () => {
return gulp
.src([
"./packages/backend/src/server/web/style.css",
"./packages/backend/src/server/web/bios.css",
"./packages/backend/src/server/web/cli.css",
])
.pipe(
cssnano({
zindex: false,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task(
"build",
gulp.parallel(
"copy:client:locales",
"copy:backend:views",
"copy:backend:custom",
"build:backend:script",
"build:backend:style",
"copy:client:fonts",
),
);
gulp.task("default", gulp.task("build"));
gulp.task("watch", () => {
gulp.watch(
["./packages/*/src/**/*"],
{ ignoreInitial: false },
gulp.task("build"),
);
});

View File

@ -2301,3 +2301,9 @@ getQrCode: Mostrar el codi QR
copyRemoteFollowUrl: Còpia la adreça URL del seguidor remot
foldNotification: Agrupar les notificacions similars
slashQuote: Cita encadenada
i18nServerInfo: Els nous clients els trobares en {language} per defecte.
i18nServerChange: Fes servir {language} en comptes.
i18nServerSet: Fes servir {language} per els nous clients.
mergeThreadInTimeline: Fusiona diferents publicacions en un mateix fil a les línies
de temps
mergeRenotesInTimeline: Agrupa diferents impulsos d'una mateixa publicació

View File

@ -766,6 +766,9 @@ confirmToUnclipAlreadyClippedNote: "This post is already part of the \"{name}\"
public: "Public"
i18nInfo: "Firefish is being translated into various languages by volunteers. You
can help at {link}."
i18nServerInfo: "New clients will be in {language} by default."
i18nServerChange: "Use {language} instead."
i18nServerSet: "Use {language} for new clients."
manageAccessTokens: "Manage access tokens"
accountInfo: "Account Info"
notesCount: "Number of posts"
@ -2241,3 +2244,5 @@ incorrectLanguageWarning: "It looks like your post is in {detected}, but you sel
noteEditHistory: "Post edit history"
slashQuote: "Chain quote"
foldNotification: "Group similar notifications"
mergeThreadInTimeline: "Merge multiple posts in the same thread in timelines"
mergeRenotesInTimeline: "Group multiple boosts of the same post"

1
locales/eo.yml Normal file
View File

@ -0,0 +1 @@
_lang_: "Esperanto"

View File

@ -1142,8 +1142,8 @@ _wordMute:
mutedNotes: "Publications masquées"
muteLangsDescription2: Utiliser les codes de langue (i.e en, fr, ja, zh).
lang: Langue
langDescription: Cacher du fil de publication les publications qui correspondent
à ces langues.
langDescription: Cachez les publications qui correspondent à la langue définie dans
le fil d'actualité.
muteLangs: Langues filtrées
muteLangsDescription: Séparer avec des espaces ou des retours à la ligne pour une
condition OU (OR).
@ -1260,7 +1260,7 @@ _tutorial:
step2_2: "En fournissant quelques informations sur qui vous êtes, il sera plus facile
pour les autres de savoir s'ils veulent voir vos publcations ou s'abonner à vous."
step3_1: "Maintenant il est temps de vous abonner à des gens!"
step3_2: "Vos fils d'actualités Principal et Social sont basés sur les personnes
step3_2: "Vos fils d'actualité Principal et Social sont basés sur les personnes
que vous êtes abonné, alors essayez de vous abonner à quelques comptes pour commencer.\n
Cliquez sur le cercle « plus » en haut à droite d'un profil pour vous abonner."
step4_1: "On y va."
@ -2332,3 +2332,9 @@ inputAccountId: Veuillez saisir votre compte (par exemple, @firefish@info.firefi
remoteFollow: Abonnement à distance
copyRemoteFollowUrl: Copier l'URL d'abonnement à distance
slashQuote: Citation enchaînée
i18nServerInfo: Les nouveaux clients seront en {language} par défaut.
i18nServerChange: Utilisez {language} à la place.
i18nServerSet: Utilisez {language} pour les nouveaux clients.
mergeThreadInTimeline: Fusionner plusieurs publications dans le même fil dans les
fils d'actualité
mergeRenotesInTimeline: Regrouper plusieurs boosts du même publication

View File

@ -685,6 +685,9 @@ unclip: "クリップ解除"
confirmToUnclipAlreadyClippedNote: "この投稿はすでにクリップ「{name}」に含まれています。投稿をこのクリップから除外しますか?"
public: "公開"
i18nInfo: "Firefishは有志によって様々な言語に翻訳されています。{link}で翻訳に協力できます。"
i18nServerInfo: "新しい端末では{language}が既定の言語になります。"
i18nServerChange: "{language}に変更する。"
i18nServerSet: "新しい端末での表示言語を{language}にします。"
manageAccessTokens: "アクセストークンの管理"
accountInfo: "アカウント情報"
notesCount: "投稿の数"
@ -2068,3 +2071,5 @@ getQrCode: QRコードを表示
copyRemoteFollowUrl: リモートからフォローするURLをコピー
foldNotification: 同じ種類の通知をまとめて表示する
slashQuote: 繋げて引用
mergeRenotesInTimeline: タイムラインで同じ投稿のブーストをまとめる
mergeThreadInTimeline: タイムラインで同じスレッドの投稿をまとめる

View File

@ -667,6 +667,9 @@ unclip: "移除便签"
confirmToUnclipAlreadyClippedNote: "本帖已包含在便签 \"{name}\" 里。您想要将本帖从该便签中移除吗?"
public: "公开"
i18nInfo: "Firefish 已经被志愿者们翻译成了各种语言。如果您也有兴趣,可以通过 {link} 帮助翻译。"
i18nServerInfo: "新客户端将默认使用 {language}。"
i18nServerChange: "改为 {language}。"
i18nServerSet: "设定新客户端使用 {language}。"
manageAccessTokens: "管理访问令牌"
accountInfo: "账号信息"
notesCount: "帖子数量"
@ -2068,3 +2071,5 @@ noteEditHistory: "帖子编辑历史"
media: 媒体
slashQuote: "斜杠引用"
foldNotification: "将通知按同类型分组"
mergeThreadInTimeline: "将时间线内的连续回复合并成一串"
mergeRenotesInTimeline: "合并同一个帖子的转发"

View File

@ -661,6 +661,9 @@ unclip: "解除摘錄"
confirmToUnclipAlreadyClippedNote: "此貼文已包含在摘錄「{name}」中。 你想將貼文從這個摘錄中排除嗎?"
public: "公開"
i18nInfo: "Firefish已經被志願者們翻譯成各種語言版本如果想要幫忙的話可以進入{link}幫助翻譯。"
i18nServerInfo: "新客戶端將默認使用 {language}。"
i18nServerChange: "改為 {language}。"
i18nServerSet: "設定新客戶端使用 {language}。"
manageAccessTokens: "管理存取權杖"
accountInfo: "帳戶資訊"
notesCount: "貼文數量"

View File

@ -1,22 +1,23 @@
{
"name": "firefish",
"version": "20240504",
"version": "20240516",
"repository": {
"type": "git",
"url": "https://firefish.dev/firefish/firefish.git"
},
"packageManager": "pnpm@8.15.7",
"packageManager": "pnpm@9.1.1",
"private": true,
"scripts": {
"rebuild": "pnpm run clean && pnpm run build",
"build": "pnpm node ./scripts/build.mjs && pnpm run gulp",
"build": "pnpm --recursive --color run build && pnpm node ./scripts/copy-index.mjs && pnpm run build:assets",
"build:assets": "pnpm node ./scripts/copy-assets.mjs",
"build:debug": "pnpm run clean && pnpm --recursive --color run build:debug && pnpm node ./scripts/copy-index-dev.mjs && pnpm run build:assets",
"start": "pnpm --filter backend run start",
"start:container": "pnpm run gulp && pnpm run migrate && pnpm run start",
"start:container": "pnpm run build:assets && pnpm run migrate && pnpm run start",
"start:test": "pnpm --filter backend run start:test",
"init": "pnpm run migrate",
"migrate": "pnpm --filter backend run migration:run",
"revertmigration": "pnpm --filter backend run migration:revert",
"gulp": "gulp build",
"watch": "pnpm run dev",
"dev": "pnpm node ./scripts/dev.mjs",
"dev:staging": "NODE_OPTIONS=--max_old_space_size=3072 NODE_ENV=development pnpm run build && pnpm run start",
@ -24,7 +25,6 @@
"lint:ts": "pnpm --filter !firefish-js -r --parallel run lint",
"lint:rs": "cargo clippy --fix --allow-dirty --allow-staged && cargo fmt --all --",
"debug": "pnpm run build:debug && pnpm run start",
"build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run gulp",
"mocha": "pnpm --filter backend run mocha",
"test": "pnpm run test:ts && pnpm run test:rs",
"test:ts": "pnpm run mocha",
@ -38,21 +38,17 @@
"clean-all": "pnpm run clean && pnpm run clean-cargo && pnpm run clean-npm"
},
"dependencies": {
"gulp": "4.0.2",
"gulp-cssnano": "2.1.3",
"gulp-replace": "1.1.4",
"gulp-terser": "2.1.0",
"js-yaml": "4.1.0"
},
"devDependencies": {
"@biomejs/biome": "1.7.1",
"@biomejs/cli-darwin-arm64": "^1.7.1",
"@biomejs/cli-darwin-x64": "^1.7.1",
"@biomejs/cli-linux-arm64": "^1.7.1",
"@biomejs/cli-linux-x64": "^1.7.1",
"@types/node": "20.12.7",
"execa": "8.0.1",
"pnpm": "8.15.7",
"@biomejs/biome": "1.7.3",
"@biomejs/cli-darwin-arm64": "1.7.3",
"@biomejs/cli-darwin-x64": "1.7.3",
"@biomejs/cli-linux-arm64": "1.7.3",
"@biomejs/cli-linux-x64": "1.7.3",
"@types/node": "20.12.12",
"execa": "9.1.0",
"pnpm": "9.1.1",
"typescript": "5.4.5"
}
}

View File

@ -7,6 +7,7 @@ rust-version = "1.74"
[features]
default = []
napi = ["dep:napi", "dep:napi-derive"]
ci = []
[lib]
crate-type = ["cdylib", "lib"]
@ -25,25 +26,27 @@ cuid2 = { workspace = true }
emojis = { workspace = true }
idna = { workspace = true }
image = { workspace = true }
isahc = { workspace = true }
nom-exif = { workspace = true }
once_cell = { workspace = true }
openssl = { workspace = true, features = ["vendored"] }
rand = { workspace = true }
redis = { workspace = true }
regex = { workspace = true }
reqwest = { workspace = true, features = ["blocking"] }
rmp-serde = { workspace = true }
sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
strum = { workspace = true, features = ["derive"] }
sysinfo = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true }
urlencoding = { workspace = true }
web-push = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }

View File

@ -41,7 +41,6 @@ export interface ServerConfig {
proxySmtp?: string
proxyBypassHosts?: Array<string>
allowedPrivateNetworks?: Array<string>
/** `NapiValue` is not implemented for `u64` */
maxFileSize?: number
accessLog?: string
clusterLimits?: WorkerConfigInternal
@ -212,23 +211,31 @@ export interface Acct {
}
export function stringToAcct(acct: string): Acct
export function acctToString(acct: Acct): string
export function showServerInfo(): void
export function initializeRustLogger(): void
export function addNoteToAntenna(antennaId: string, note: Note): void
/**
* @param host punycoded instance host
* @returns whether the given host should be blocked
*/
* Checks if a server is blocked.
*
* ## Argument
* `host` - punycoded instance host
*/
export function isBlockedServer(host: string): Promise<boolean>
/**
* @param host punycoded instance host
* @returns whether the given host should be limited
*/
* Checks if a server is silenced.
*
* ## Argument
* `host` - punycoded instance host
*/
export function isSilencedServer(host: string): Promise<boolean>
/**
* @param host punycoded instance host
* @returns whether the given host is allowlisted (this is always true if private mode is disabled)
*/
* Checks if a server is allowlisted.
* Returns `Ok(true)` if private mode is disabled.
*
* ## Argument
* `host` - punycoded instance host
*/
export function isAllowedServer(host: string): Promise<boolean>
/** TODO: handle name collisions better */
export interface NoteLikeForCheckWordMute {
fileIds: Array<string>
userId: string | null
@ -253,7 +260,6 @@ export interface ImageSize {
height: number
}
export function getImageSizeFromUrl(url: string): Promise<ImageSize>
/** TODO: handle name collisions better */
export interface NoteLikeForGetNoteSummary {
fileIds: Array<string>
text: string | null
@ -261,6 +267,29 @@ export interface NoteLikeForGetNoteSummary {
hasPoll: boolean
}
export function getNoteSummary(note: NoteLikeForGetNoteSummary): string
export interface Cpu {
model: string
cores: number
}
export interface Memory {
/** Total memory amount in bytes */
total: number
/** Used memory amount in bytes */
used: number
/** Available (for (re)use) memory amount in bytes */
available: number
}
export interface Storage {
/** Total storage space in bytes */
total: number
/** Used storage space in bytes */
used: number
}
export function cpuInfo(): Cpu
export function cpuUsage(): number
export function memoryUsage(): Memory
export function storageUsage(): Storage | null
export function isSafeUrl(url: string): boolean
export function latestVersion(): Promise<string>
export function toMastodonId(firefishId: string): string | null
export function fromMastodonId(mastodonId: string): string | null
@ -1122,6 +1151,7 @@ export interface UserProfile {
preventAiLearning: boolean
isIndexable: boolean
mutedPatterns: Array<string>
lang: string | null
}
export interface UserPublickey {
userId: string
@ -1147,9 +1177,117 @@ export interface Webhook {
latestSentAt: Date | null
latestStatus: number | null
}
export function initializeRustLogger(): void
export function fetchNodeinfo(host: string): Promise<Nodeinfo>
export function nodeinfo_2_1(): Promise<any>
export function nodeinfo_2_0(): Promise<any>
/** NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0 */
export interface Nodeinfo {
/** The schema version, must be 2.0. */
version: string
/** Metadata about server software in use. */
software: Software20
/** The protocols supported on this server. */
protocols: Array<Protocol>
/** The third party sites this server can connect to via their application API. */
services: Services
/** Whether this server allows open self-registration. */
openRegistrations: boolean
/** Usage statistics for this server. */
usage: Usage
/** Free form key value pairs for software specific values. Clients should not rely on any specific key present. */
metadata: Record<string, any>
}
/** Metadata about server software in use (version 2.0). */
export interface Software20 {
/** The canonical name of this server software. */
name: string
/** The version of this server software. */
version: string
}
export enum Protocol {
Activitypub = 'activitypub',
Buddycloud = 'buddycloud',
Dfrn = 'dfrn',
Diaspora = 'diaspora',
Libertree = 'libertree',
Ostatus = 'ostatus',
Pumpio = 'pumpio',
Tent = 'tent',
Xmpp = 'xmpp',
Zot = 'zot'
}
/** The third party sites this server can connect to via their application API. */
export interface Services {
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
inbound: Array<Inbound>
/** The third party sites this server can publish messages to on the behalf of a user. */
outbound: Array<Outbound>
}
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
export enum Inbound {
Atom1 = 'atom1',
Gnusocial = 'gnusocial',
Imap = 'imap',
Pnut = 'pnut',
Pop3 = 'pop3',
Pumpio = 'pumpio',
Rss2 = 'rss2',
Twitter = 'twitter'
}
/** The third party sites this server can publish messages to on the behalf of a user. */
export enum Outbound {
Atom1 = 'atom1',
Blogger = 'blogger',
Buddycloud = 'buddycloud',
Diaspora = 'diaspora',
Dreamwidth = 'dreamwidth',
Drupal = 'drupal',
Facebook = 'facebook',
Friendica = 'friendica',
Gnusocial = 'gnusocial',
Google = 'google',
Insanejournal = 'insanejournal',
Libertree = 'libertree',
Linkedin = 'linkedin',
Livejournal = 'livejournal',
Mediagoblin = 'mediagoblin',
Myspace = 'myspace',
Pinterest = 'pinterest',
Pnut = 'pnut',
Posterous = 'posterous',
Pumpio = 'pumpio',
Redmatrix = 'redmatrix',
Rss2 = 'rss2',
Smtp = 'smtp',
Tent = 'tent',
Tumblr = 'tumblr',
Twitter = 'twitter',
Wordpress = 'wordpress',
Xmpp = 'xmpp'
}
/** Usage statistics for this server. */
export interface Usage {
users: Users
localPosts: number | null
localComments: number | null
}
/** statistics about the users of this server. */
export interface Users {
total: number | null
activeHalfyear: number | null
activeMonth: number | null
}
export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void>
export function unwatchNote(watcherId: string, noteId: string): Promise<void>
export enum PushNotificationKind {
Generic = 'generic',
Chat = 'chat',
ReadAllChats = 'readAllChats',
ReadAllChatsInTheRoom = 'readAllChatsInTheRoom',
ReadNotifications = 'readNotifications',
ReadAllNotifications = 'readAllNotifications'
}
export function sendPushNotification(receiverUserId: string, kind: PushNotificationKind, content: any): Promise<void>
export function publishToChannelStream(channelId: string, userId: string): void
export enum ChatEvent {
Message = 'message',
@ -1195,4 +1333,6 @@ export function getTimestamp(id: string): number
export function genId(): string
/** Generate an ID using a specific datetime */
export function genIdAt(date: Date): string
export function secureRndstr(length?: number | undefined | null): string
/** Generate random string based on [thread_rng] and [Alphanumeric]. */
export function generateSecureRandomString(length: number): string
export function generateUserToken(): string

View File

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, showServerInfo, initializeRustLogger, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, cpuInfo, cpuUsage, memoryUsage, storageUsage, isSafeUrl, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, watchNote, unwatchNote, PushNotificationKind, sendPushNotification, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, generateSecureRandomString, generateUserToken } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
@ -323,6 +323,8 @@ module.exports.loadEnv = loadEnv
module.exports.loadConfig = loadConfig
module.exports.stringToAcct = stringToAcct
module.exports.acctToString = acctToString
module.exports.showServerInfo = showServerInfo
module.exports.initializeRustLogger = initializeRustLogger
module.exports.addNoteToAntenna = addNoteToAntenna
module.exports.isBlockedServer = isBlockedServer
module.exports.isSilencedServer = isSilencedServer
@ -339,6 +341,11 @@ module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary
module.exports.cpuInfo = cpuInfo
module.exports.cpuUsage = cpuUsage
module.exports.memoryUsage = memoryUsage
module.exports.storageUsage = storageUsage
module.exports.isSafeUrl = isSafeUrl
module.exports.latestVersion = latestVersion
module.exports.toMastodonId = toMastodonId
module.exports.fromMastodonId = fromMastodonId
@ -363,9 +370,16 @@ module.exports.RelayStatusEnum = RelayStatusEnum
module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
module.exports.initializeRustLogger = initializeRustLogger
module.exports.fetchNodeinfo = fetchNodeinfo
module.exports.nodeinfo_2_1 = nodeinfo_2_1
module.exports.nodeinfo_2_0 = nodeinfo_2_0
module.exports.Protocol = Protocol
module.exports.Inbound = Inbound
module.exports.Outbound = Outbound
module.exports.watchNote = watchNote
module.exports.unwatchNote = unwatchNote
module.exports.PushNotificationKind = PushNotificationKind
module.exports.sendPushNotification = sendPushNotification
module.exports.publishToChannelStream = publishToChannelStream
module.exports.ChatEvent = ChatEvent
module.exports.publishToChatStream = publishToChatStream
@ -377,4 +391,5 @@ module.exports.publishToModerationStream = publishToModerationStream
module.exports.getTimestamp = getTimestamp
module.exports.genId = genId
module.exports.genIdAt = genIdAt
module.exports.secureRndstr = secureRndstr
module.exports.generateSecureRandomString = generateSecureRandomString
module.exports.generateUserToken = generateUserToken

View File

@ -22,10 +22,7 @@
}
},
"devDependencies": {
"@napi-rs/cli": "2.18.1"
},
"engines": {
"node": ">= 10"
"@napi-rs/cli": "2.18.3"
},
"scripts": {
"artifacts": "napi artifacts",

View File

@ -22,7 +22,7 @@ struct ServerConfig {
pub proxy_bypass_hosts: Option<Vec<String>>,
pub allowed_private_networks: Option<Vec<String>>,
/// `NapiValue` is not implemented for `u64`
// TODO: i64 -> u64 (NapiValue is not implemented for u64)
pub max_file_size: Option<i64>,
pub access_log: Option<String>,
pub cluster_limits: Option<WorkerConfigInternal>,
@ -298,7 +298,7 @@ fn read_manifest() -> Manifest {
}
#[crate::export]
fn load_config() -> Config {
pub fn load_config() -> Config {
let server_config = read_config_file();
let version = read_meta().version;
let manifest = read_manifest();

View File

@ -2,10 +2,13 @@ use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(strum::Display)]
#[derive(strum::Display, Debug)]
pub enum Category {
#[strum(serialize = "fetchUrl")]
FetchUrl,
#[cfg(test)]
#[strum(serialize = "usedOnlyForTesting")]
Test,
}
#[derive(thiserror::Error, Debug)]
@ -18,14 +21,45 @@ pub enum Error {
DeserializeError(#[from] rmp_serde::decode::Error),
}
fn categorize(category: Category, key: &str) -> String {
format!("{}:{}", category, key)
}
#[inline]
fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
#[inline]
fn categorize(category: Category, key: &str) -> String {
format!("{}:{}", category, key)
}
#[inline]
fn wildcard(category: Category) -> String {
prefix_key(&categorize(category, "*"))
}
/// Sets a Redis cache.
///
/// This overwrites the exsisting cache with the same key.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
/// * `value` - (de)serializable value
/// * `expire_seconds` - TTL
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "apple";
/// let data = "I want to cache this string".to_string();
///
/// // caches the data for 10 seconds
/// cache::set(key, &data, 10);
///
/// // get the cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// assert_eq!(data, cached_data.unwrap());
/// ```
pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
@ -39,6 +73,33 @@ pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
Ok(())
}
/// Gets a Redis cache.
///
/// If the Redis connection is fine, this returns `Ok(data)` where `data`
/// is the cached value. Returns `Ok(None)` if there is no value corresponding to `key`.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "banana";
/// let data = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &data, 10).unwrap();
///
/// // get cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// assert_eq!(data, cached_data.unwrap());
///
/// // get nonexistent (or expired) cache
/// let no_cache = cache::get::<String>("nonexistent").unwrap();
/// assert!(no_cache.is_none());
/// ```
pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
Ok(match serialized_value {
@ -47,10 +108,48 @@ pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V
})
}
/// Deletes a Redis cache.
///
/// If the Redis connection is fine, this returns `Ok(())`
/// regardless of whether the cache exists.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "chocolate";
/// let value = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &value, 10).unwrap();
///
/// // delete the cache
/// cache::delete("foo").unwrap();
/// cache::delete("nonexistent").unwrap(); // this is okay
///
/// // the cache is gone
/// let cached_value = cache::get::<String>("foo").unwrap();
/// assert!(cached_value.is_none());
/// ```
pub fn delete(key: &str) -> Result<(), Error> {
Ok(redis_conn()?.del(prefix_key(key))?)
}
/// Sets a Redis cache under a `category`.
///
/// The usage is the same as [set], except that you need to
/// use [get_one] and [delete_one] to get/delete the cache.
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
/// * `value` - (de)serializable value
/// * `expire_seconds` - TTL
pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
@ -60,6 +159,14 @@ pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
set(&categorize(category, key), value, expire_seconds)
}
/// Gets a Redis cache under a `category`.
///
/// The usage is basically the same as [get].
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
@ -67,15 +174,41 @@ pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
get(&categorize(category, key))
}
/// Deletes a Redis cache under a `category`.
///
/// The usage is basically the same as [delete].
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn delete_one(category: Category, key: &str) -> Result<(), Error> {
delete(&categorize(category, key))
}
// TODO: set_all(), get_all(), delete_all()
/// Deletes all Redis caches under a `category`.
///
/// ## Arguments
///
/// * `category` - one of [Category]
pub fn delete_all(category: Category) -> Result<(), Error> {
let mut redis = redis_conn()?;
let keys: Vec<Vec<u8>> = redis.keys(wildcard(category))?;
if !keys.is_empty() {
redis.del(keys)?
}
Ok(())
}
// TODO: set_all(), get_all()
#[cfg(test)]
mod unit_test {
use super::{get, set};
use crate::database::cache::delete_one;
use super::{delete_all, get, get_one, set, set_one, Category::Test};
use pretty_assertions::assert_eq;
#[test]
@ -121,4 +254,35 @@ mod unit_test {
assert!(expired_value_2.is_none());
assert!(expired_value_3.is_none());
}
#[test]
fn use_category() {
let key_1 = "fire";
let key_2 = "fish";
let key_3 = "awawa";
let value_1 = "hello".to_string();
let value_2 = 998244353u32;
let value_3 = 'あ';
set_one(Test, key_1, &value_1, 5 * 60).unwrap();
set_one(Test, key_2, &value_2, 5 * 60).unwrap();
set_one(Test, key_3, &value_3, 5 * 60).unwrap();
assert_eq!(get_one::<String>(Test, key_1).unwrap().unwrap(), value_1);
assert_eq!(get_one::<u32>(Test, key_2).unwrap().unwrap(), value_2);
assert_eq!(get_one::<char>(Test, key_3).unwrap().unwrap(), value_3);
delete_one(Test, key_1).unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_some());
assert!(get_one::<char>(Test, key_3).unwrap().is_some());
delete_all(Test).unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_none());
assert!(get_one::<char>(Test, key_3).unwrap().is_none());
}
}

View File

@ -1,8 +1,9 @@
use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use sea_orm::{ConnectOptions, Database, DbConn, DbErr};
use tracing::log::LevelFilter;
static DB_CONN: once_cell::sync::OnceCell<DbConn> = once_cell::sync::OnceCell::new();
static DB_CONN: OnceCell<DbConn> = OnceCell::new();
async fn init_database() -> Result<&'static DbConn, DbErr> {
let database_uri = format!(

View File

@ -1,7 +1,8 @@
use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use redis::{Client, Connection, RedisError};
static REDIS_CLIENT: once_cell::sync::OnceCell<Client> = once_cell::sync::OnceCell::new();
static REDIS_CLIENT: OnceCell<Client> = OnceCell::new();
fn init_redis() -> Result<Client, RedisError> {
let redis_url = {
@ -26,7 +27,7 @@ fn init_redis() -> Result<Client, RedisError> {
params.concat()
};
tracing::info!("Initializing Redis connection");
tracing::info!("Initializing Redis client");
Client::open(redis_url)
}
@ -38,8 +39,8 @@ pub fn redis_conn() -> Result<Connection, RedisError> {
}
}
#[inline]
/// prefix redis key
#[inline]
pub fn key(key: impl ToString) -> String {
format!("{}:{}", CONFIG.redis_key_prefix, key.to_string())
}

View File

@ -0,0 +1,105 @@
use std::fmt;
use std::str::FromStr;
#[derive(Debug, PartialEq)]
#[crate::export(object)]
pub struct Acct {
pub username: String,
pub host: Option<String>,
}
impl FromStr for Acct {
type Err = ();
/// This never throw errors. Feel free to `.unwrap()` the result.
fn from_str(value: &str) -> Result<Self, Self::Err> {
let split: Vec<&str> = if let Some(stripped) = value.strip_prefix('@') {
stripped
} else {
value
}
.split('@')
.collect();
Ok(Self {
username: split[0].to_string(),
host: if split.len() == 1 {
None
} else {
Some(split[1].to_string())
},
})
}
}
impl fmt::Display for Acct {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let result = match &self.host {
Some(host) => format!("{}@{}", self.username, host),
None => self.username.clone(),
};
write!(f, "{result}")
}
}
impl From<Acct> for String {
fn from(value: Acct) -> Self {
value.to_string()
}
}
#[crate::ts_only_warn("Use `acct.parse().unwrap()` or `Acct::from_str(acct).unwrap()` instead.")]
#[crate::export]
pub fn string_to_acct(acct: &str) -> Acct {
Acct::from_str(acct).unwrap()
}
#[crate::ts_only_warn("Use `acct.to_string()` instead.")]
#[crate::export]
pub fn acct_to_string(acct: &Acct) -> String {
acct.to_string()
}
#[cfg(test)]
mod unit_test {
use super::Acct;
use pretty_assertions::assert_eq;
use std::str::FromStr;
#[test]
fn test_acct_to_string() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(remote_acct.to_string(), "firefish@example.com");
assert_ne!(remote_acct.to_string(), "mastodon@example.com");
assert_eq!(local_acct.to_string(), "MisakaMikoto");
assert_ne!(local_acct.to_string(), "ShiraiKuroko");
}
#[test]
fn test_string_to_acct() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(
Acct::from_str("@firefish@example.com").unwrap(),
remote_acct
);
assert_eq!(Acct::from_str("firefish@example.com").unwrap(), remote_acct);
assert_eq!(Acct::from_str("@MisakaMikoto").unwrap(), local_acct);
assert_eq!(Acct::from_str("MisakaMikoto").unwrap(), local_acct);
}
}

View File

@ -0,0 +1 @@
pub mod acct;

View File

@ -0,0 +1,39 @@
use std::sync::{Mutex, MutexGuard, OnceLock, PoisonError};
use sysinfo::System;
pub type SystemMutexError = PoisonError<MutexGuard<'static, System>>;
// TODO: handle this in a more proper way when we move the entry point to backend-rs
pub fn system() -> Result<MutexGuard<'static, System>, SystemMutexError> {
pub static SYSTEM: OnceLock<Mutex<System>> = OnceLock::new();
SYSTEM.get_or_init(|| Mutex::new(System::new_all())).lock()
}
#[crate::export]
pub fn show_server_info() -> Result<(), SystemMutexError> {
let system_info = system()?;
tracing::info!(
"Hostname: {}",
System::host_name().unwrap_or("unknown".to_string())
);
tracing::info!(
"OS: {}",
System::long_os_version().unwrap_or("unknown".to_string())
);
tracing::info!(
"Kernel: {}",
System::kernel_version().unwrap_or("unknown".to_string())
);
tracing::info!(
"CPU architecture: {}",
System::cpu_arch().unwrap_or("unknown".to_string())
);
tracing::info!("CPU threads: {}", system_info.cpus().len());
tracing::info!("Total memory: {} MiB", system_info.total_memory() / 1048576);
tracing::info!("Free memory: {} MiB", system_info.free_memory() / 1048576);
tracing::info!("Total swap: {} MiB", system_info.total_swap() / 1048576);
tracing::info!("Free swap: {} MiB", system_info.free_swap() / 1048576);
Ok(())
}

View File

@ -0,0 +1,2 @@
pub mod hardware_stats;
pub mod log;

View File

@ -1,7 +1,9 @@
pub use macro_rs::export;
pub use macro_rs::{export, ts_only_warn};
pub mod config;
pub mod database;
pub mod federation;
pub mod init;
pub mod misc;
pub mod model;
pub mod service;

View File

@ -1,74 +0,0 @@
#[derive(Debug, PartialEq)]
#[crate::export(object)]
pub struct Acct {
pub username: String,
pub host: Option<String>,
}
#[crate::export]
pub fn string_to_acct(acct: &str) -> Acct {
let split: Vec<&str> = if let Some(stripped) = acct.strip_prefix('@') {
stripped
} else {
acct
}
.split('@')
.collect();
Acct {
username: split[0].to_string(),
host: if split.len() == 1 {
None
} else {
Some(split[1].to_string())
},
}
}
#[crate::export]
pub fn acct_to_string(acct: &Acct) -> String {
match &acct.host {
Some(host) => format!("{}@{}", acct.username, host),
None => acct.username.clone(),
}
}
#[cfg(test)]
mod unit_test {
use super::{acct_to_string, string_to_acct, Acct};
use pretty_assertions::assert_eq;
#[test]
fn test_acct_to_string() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(acct_to_string(&remote_acct), "firefish@example.com");
assert_ne!(acct_to_string(&remote_acct), "mastodon@example.com");
assert_eq!(acct_to_string(&local_acct), "MisakaMikoto");
assert_ne!(acct_to_string(&local_acct), "ShiraiKuroko");
}
#[test]
fn test_string_to_acct() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(string_to_acct("@firefish@example.com"), remote_acct);
assert_eq!(string_to_acct("firefish@example.com"), remote_acct);
assert_eq!(string_to_acct("@MisakaMikoto"), local_acct);
assert_eq!(string_to_acct("MisakaMikoto"), local_acct);
}
}

View File

@ -1,10 +1,10 @@
use crate::misc::meta::fetch_meta;
use sea_orm::DbErr;
/**
* @param host punycoded instance host
* @returns whether the given host should be blocked
*/
/// Checks if a server is blocked.
///
/// ## Argument
/// `host` - punycoded instance host
#[crate::export]
pub async fn is_blocked_server(host: &str) -> Result<bool, DbErr> {
Ok(fetch_meta(true)
@ -16,10 +16,10 @@ pub async fn is_blocked_server(host: &str) -> Result<bool, DbErr> {
}))
}
/**
* @param host punycoded instance host
* @returns whether the given host should be limited
*/
/// Checks if a server is silenced.
///
/// ## Argument
/// `host` - punycoded instance host
#[crate::export]
pub async fn is_silenced_server(host: &str) -> Result<bool, DbErr> {
Ok(fetch_meta(true)
@ -31,10 +31,11 @@ pub async fn is_silenced_server(host: &str) -> Result<bool, DbErr> {
}))
}
/**
* @param host punycoded instance host
* @returns whether the given host is allowlisted (this is always true if private mode is disabled)
*/
/// Checks if a server is allowlisted.
/// Returns `Ok(true)` if private mode is disabled.
///
/// ## Argument
/// `host` - punycoded instance host
#[crate::export]
pub async fn is_allowed_server(host: &str) -> Result<bool, DbErr> {
let meta = fetch_meta(true).await?;

View File

@ -4,7 +4,7 @@ use once_cell::sync::Lazy;
use regex::Regex;
use sea_orm::{prelude::*, QuerySelect};
/// TODO: handle name collisions better
// TODO: handle name collisions in a better way
#[crate::export(object, js_name = "NoteLikeForCheckWordMute")]
pub struct NoteLike {
pub file_ids: Vec<String>,

View File

@ -1,6 +1,7 @@
use crate::database::cache;
use crate::util::http_client;
use image::{io::Reader, ImageError, ImageFormat};
use isahc::ReadResponseExt;
use nom_exif::{parse_jpeg_exif, EntryValue, ExifTag};
use std::io::Cursor;
use tokio::sync::Mutex;
@ -9,8 +10,12 @@ use tokio::sync::Mutex;
pub enum Error {
#[error("Redis cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Reqwest error: {0}")]
ReqwestErr(#[from] reqwest::Error),
#[error("HTTP client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("HTTP error: {0}")]
HttpErr(String),
#[error("Image decoding error: {0}")]
ImageErr(#[from] ImageError),
#[error("Image decoding error: {0}")]
@ -64,7 +69,16 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
tracing::info!("retrieving image size from {}", url);
let image_bytes = http_client()?.get(url).send().await?.bytes().await?;
let mut response = http_client::client()?.get(url)?;
if !response.status().is_success() {
tracing::info!("status: {}", response.status());
tracing::debug!("response body: {:#?}", response.body());
return Err(Error::HttpErr(format!("Failed to get image from {}", url)));
}
let image_bytes = response.bytes()?;
let reader = Reader::new(Cursor::new(&image_bytes)).with_guessed_format()?;
let format = reader.format();
@ -123,17 +137,8 @@ mod unit_test {
let gif_url = "https://firefish.dev/firefish/firefish/-/raw/b9c3dfbd3d473cb2cee20c467eeae780bc401271/packages/backend/test/resources/anime.gif";
let mp3_url = "https://firefish.dev/firefish/firefish/-/blob/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/sounds/aisha/1.mp3";
// Delete caches in case you run this test multiple times
// (should be disabled in CI tasks)
cache::delete_one(cache::Category::FetchUrl, png_url_1).unwrap();
cache::delete_one(cache::Category::FetchUrl, png_url_2).unwrap();
cache::delete_one(cache::Category::FetchUrl, png_url_3).unwrap();
cache::delete_one(cache::Category::FetchUrl, rotated_jpeg_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, webp_url_1).unwrap();
cache::delete_one(cache::Category::FetchUrl, webp_url_2).unwrap();
cache::delete_one(cache::Category::FetchUrl, ico_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, gif_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, mp3_url).unwrap();
// delete caches in case you run this test multiple times
cache::delete_all(cache::Category::FetchUrl).unwrap();
let png_size_1 = ImageSize {
width: 1024,
@ -196,4 +201,15 @@ mod unit_test {
assert_eq!(gif_size, get_image_size_from_url(gif_url).await.unwrap());
assert!(get_image_size_from_url(mp3_url).await.is_err());
}
#[tokio::test]
async fn too_many_attempts() {
let url = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/splash.png";
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, url).unwrap();
assert!(get_image_size_from_url(url).await.is_ok());
assert!(get_image_size_from_url(url).await.is_err());
}
}

View File

@ -1,4 +1,8 @@
/// TODO: handle name collisions better
use serde::{Deserialize, Serialize};
// TODO: handle name collisions in a better way
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "NoteLikeForGetNoteSummary")]
pub struct NoteLike {
pub file_ids: Vec<String>,

View File

@ -0,0 +1,90 @@
use crate::init::hardware_stats::{system, SystemMutexError};
use sysinfo::{Disks, MemoryRefreshKind};
// TODO: i64 -> u64 (we can't export u64 to Node.js)
#[crate::export(object)]
pub struct Cpu {
pub model: String,
// TODO: u16 -> usize (we can't export usize to Node.js)
pub cores: u16,
}
#[crate::export(object)]
pub struct Memory {
/// Total memory amount in bytes
pub total: i64,
/// Used memory amount in bytes
pub used: i64,
/// Available (for (re)use) memory amount in bytes
pub available: i64,
}
#[crate::export(object)]
pub struct Storage {
/// Total storage space in bytes
pub total: i64,
/// Used storage space in bytes
pub used: i64,
}
#[crate::export]
pub fn cpu_info() -> Result<Cpu, SystemMutexError> {
let system_info = system()?;
Ok(Cpu {
model: match system_info.cpus() {
[] => {
tracing::debug!("failed to get CPU info");
"unknown".to_string()
}
cpus => cpus[0].brand().to_string(),
},
cores: system_info.cpus().len() as u16,
})
}
#[crate::export]
pub fn cpu_usage() -> Result<f32, SystemMutexError> {
let mut system_info = system()?;
system_info.refresh_cpu_usage();
let total_cpu_usage: f32 = system_info.cpus().iter().map(|cpu| cpu.cpu_usage()).sum();
let cpu_threads = system_info.cpus().len();
Ok(total_cpu_usage / (cpu_threads as f32))
}
#[crate::export]
pub fn memory_usage() -> Result<Memory, SystemMutexError> {
let mut system_info = system()?;
system_info.refresh_memory_specifics(MemoryRefreshKind::new().with_ram());
Ok(Memory {
total: system_info.total_memory() as i64,
used: system_info.used_memory() as i64,
available: system_info.available_memory() as i64,
})
}
#[crate::export]
pub fn storage_usage() -> Option<Storage> {
// Get the first disk that is actualy used.
let disks = Disks::new_with_refreshed_list();
let disk = disks
.iter()
.find(|disk| disk.available_space() > 0 && disk.total_space() > disk.available_space());
if let Some(disk) = disk {
let total = disk.total_space() as i64;
let available = disk.available_space() as i64;
return Some(Storage {
total,
used: total - available,
});
}
tracing::debug!("failed to get stats");
None
}

View File

@ -0,0 +1,34 @@
#[crate::export]
pub fn is_safe_url(url: &str) -> bool {
if let Ok(url) = url.parse::<url::Url>() {
if url.host_str().unwrap_or_default() == "unix"
|| !["http", "https"].contains(&url.scheme())
|| ![None, Some(80), Some(443)].contains(&url.port())
{
return false;
}
true
} else {
false
}
}
#[cfg(test)]
mod unit_test {
use super::is_safe_url;
#[test]
fn safe_url() {
assert!(is_safe_url("http://firefish.dev/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev/firefish/firefish"));
assert!(is_safe_url("http://firefish.dev:80/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev:80/firefish/firefish"));
assert!(is_safe_url("http://firefish.dev:443/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev:443/firefish/firefish"));
assert!(!is_safe_url("https://unix/firefish/firefish"));
assert!(!is_safe_url("https://firefish.dev:35/firefish/firefish"));
assert!(!is_safe_url("ftp://firefish.dev/firefish/firefish"));
assert!(!is_safe_url("nyaa"));
assert!(!is_safe_url(""));
}
}

View File

@ -1,18 +1,25 @@
use crate::database::cache;
use crate::util::http_client::http_client;
use crate::util::http_client;
use isahc::ReadResponseExt;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Reqwest error: {0}")]
ReqwestErr(#[from] reqwest::Error),
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("HTTP client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("HTTP error: {0}")]
HttpErr(String),
#[error("Response parsing error: {0}")]
IoErr(#[from] std::io::Error),
#[error("Failed to deserialize JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
const UPSTREAM_PACKAGE_JSON_URL: &'static str =
const UPSTREAM_PACKAGE_JSON_URL: &str =
"https://firefish.dev/firefish/firefish/-/raw/main/package.json";
async fn get_latest_version() -> Result<String, Error> {
@ -21,13 +28,17 @@ async fn get_latest_version() -> Result<String, Error> {
version: String,
}
let res = http_client()?
.get(UPSTREAM_PACKAGE_JSON_URL)
.send()
.await?
.text()
.await?;
let res_parsed: Response = serde_json::from_str(&res)?;
let mut response = http_client::client()?.get(UPSTREAM_PACKAGE_JSON_URL)?;
if !response.status().is_success() {
tracing::info!("status: {}", response.status());
tracing::debug!("response body: {:#?}", response.body());
return Err(Error::HttpErr(
"Failed to fetch version from Firefish GitLab".to_string(),
));
}
let res_parsed: Response = serde_json::from_str(&response.text()?)?;
Ok(res_parsed.version)
}
@ -61,9 +72,9 @@ mod unit_test {
use crate::database::cache;
fn validate_version(version: String) {
// version: YYYYMMDD
assert!(version.len() == 8);
assert!(version.chars().all(|c| c.is_ascii_digit()));
// version: YYYYMMDD or YYYYMMDD-X
assert!(version.len() >= 8);
assert!(version[..8].chars().all(|c| c.is_ascii_digit()));
// YYYY
assert!(&version[..4] >= "2024");
@ -73,13 +84,19 @@ mod unit_test {
assert!(&version[4..6] <= "12");
// DD
assert!(&version[6..] >= "01");
assert!(&version[6..] <= "31");
assert!(&version[6..8] >= "01");
assert!(&version[6..8] <= "31");
// -X
if version.len() > 8 {
assert!(version.chars().nth(8).unwrap() == '-');
assert!(version[9..].chars().all(|c| c.is_ascii_digit()));
}
}
#[tokio::test]
async fn check_version() {
// TODO: don't need to do this in CI tasks
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL).unwrap();
// fetch from firefish.dev

View File

@ -1,4 +1,3 @@
pub mod acct;
pub mod add_note_to_antenna;
pub mod check_server_block;
pub mod check_word_mute;
@ -8,6 +7,8 @@ pub mod escape_sql;
pub mod format_milliseconds;
pub mod get_image_size;
pub mod get_note_summary;
pub mod hardware_stats;
pub mod is_safe_url;
pub mod latest_version;
pub mod mastodon_id;
pub mod meta;

View File

@ -78,6 +78,7 @@ pub struct Model {
pub is_indexable: bool,
#[sea_orm(column_name = "mutedPatterns")]
pub muted_patterns: Vec<String>,
pub lang: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -1,3 +1,4 @@
pub mod log;
pub mod nodeinfo;
pub mod note;
pub mod push_notification;
pub mod stream;

View File

@ -0,0 +1,161 @@
use crate::service::nodeinfo::schema::*;
use crate::util::http_client;
use isahc::AsyncReadResponseExt;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Http client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("Http error: {0}")]
HttpErr(#[from] isahc::Error),
#[error("Bad status: {0}")]
BadStatus(String),
#[error("Failed to parse response body as text: {0}")]
ResponseErr(#[from] std::io::Error),
#[error("Failed to parse response body as json: {0}")]
JsonErr(#[from] serde_json::Error),
#[error("No nodeinfo provided")]
MissingNodeinfo,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLinks {
links: Vec<NodeinfoLink>,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLink {
rel: String,
href: String,
}
#[inline]
fn wellknown_nodeinfo_url(host: &str) -> String {
format!("https://{}/.well-known/nodeinfo", host)
}
async fn fetch_nodeinfo_links(host: &str) -> Result<NodeinfoLinks, Error> {
let client = http_client::client()?;
let wellknown_url = wellknown_nodeinfo_url(host);
let mut wellknown_response = client.get_async(&wellknown_url).await?;
if !wellknown_response.status().is_success() {
tracing::debug!("{:#?}", wellknown_response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
wellknown_url,
wellknown_response.status()
)));
}
Ok(serde_json::from_str(&wellknown_response.text().await?)?)
}
fn check_nodeinfo_link(links: NodeinfoLinks) -> Result<String, Error> {
for link in links.links {
if link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.1"
|| link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.0"
{
return Ok(link.href);
}
}
Err(Error::MissingNodeinfo)
}
async fn fetch_nodeinfo_impl(nodeinfo_link: &str) -> Result<Nodeinfo20, Error> {
let client = http_client::client()?;
let mut response = client.get_async(nodeinfo_link).await?;
if !response.status().is_success() {
tracing::debug!("{:#?}", response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
nodeinfo_link,
response.status()
)));
}
Ok(serde_json::from_str(&response.text().await?)?)
}
// for napi export
type Nodeinfo = Nodeinfo20;
#[crate::export]
pub async fn fetch_nodeinfo(host: &str) -> Result<Nodeinfo, Error> {
tracing::info!("fetching from {}", host);
let links = fetch_nodeinfo_links(host).await?;
let nodeinfo_link = check_nodeinfo_link(links)?;
fetch_nodeinfo_impl(&nodeinfo_link).await
}
#[cfg(test)]
mod unit_test {
use super::{check_nodeinfo_link, fetch_nodeinfo, NodeinfoLink, NodeinfoLinks};
use pretty_assertions::assert_eq;
#[test]
fn test_check_nodeinfo_link() {
let links_1 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_1).unwrap(),
"https://example.com/real"
);
let links_2 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.1".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_2).unwrap(),
"https://example.com/real"
);
let links_3 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy/2.0".to_string(),
},
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.1".to_string(),
href: "https://example.com/dummy/2.1".to_string(),
},
],
};
check_nodeinfo_link(links_3).expect_err("No nodeinfo");
}
#[tokio::test]
async fn test_fetch_nodeinfo() {
assert_eq!(
fetch_nodeinfo("info.firefish.dev")
.await
.unwrap()
.software
.name,
"firefish"
);
}
}

View File

@ -0,0 +1,142 @@
use crate::config::CONFIG;
use crate::database::cache;
use crate::database::db_conn;
use crate::misc::meta::fetch_meta;
use crate::model::entity::{note, user};
use crate::service::nodeinfo::schema::*;
use sea_orm::{ColumnTrait, DbErr, EntityTrait, PaginatorTrait, QueryFilter};
use serde_json::json;
use std::collections::HashMap;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Failed to serialize nodeinfo to JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
async fn statistics() -> Result<(u64, u64, u64, u64), DbErr> {
let db = db_conn().await?;
let now = chrono::Local::now().naive_local();
const MONTH: chrono::TimeDelta = chrono::Duration::seconds(2592000000);
const HALF_YEAR: chrono::TimeDelta = chrono::Duration::seconds(15552000000);
let local_users = user::Entity::find()
.filter(user::Column::Host.is_null())
.count(db);
let local_active_halfyear = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - HALF_YEAR))
.count(db);
let local_active_month = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - MONTH))
.count(db);
let local_posts = note::Entity::find()
.filter(note::Column::UserHost.is_null())
.count(db);
tokio::try_join!(
local_users,
local_active_halfyear,
local_active_month,
local_posts
)
}
async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
let (local_users, local_active_halfyear, local_active_month, local_posts) =
statistics().await?;
let meta = fetch_meta(true).await?;
let metadata = HashMap::from([
(
"nodeName".to_string(),
json!(meta.name.unwrap_or(CONFIG.host.clone())),
),
("nodeDescription".to_string(), json!(meta.description)),
("repositoryUrl".to_string(), json!(meta.repository_url)),
(
"enableLocalTimeline".to_string(),
json!(!meta.disable_local_timeline),
),
(
"enableRecommendedTimeline".to_string(),
json!(!meta.disable_recommended_timeline),
),
(
"enableGlobalTimeline".to_string(),
json!(!meta.disable_global_timeline),
),
(
"enableGuestTimeline".to_string(),
json!(meta.enable_guest_timeline),
),
(
"maintainer".to_string(),
json!({"name":meta.maintainer_name,"email":meta.maintainer_email}),
),
("proxyAccountName".to_string(), json!(meta.proxy_account_id)),
(
"themeColor".to_string(),
json!(meta.theme_color.unwrap_or("#31748f".to_string())),
),
]);
Ok(Nodeinfo21 {
version: "2.1".to_string(),
software: Software21 {
name: "firefish".to_string(),
version: CONFIG.version.clone(),
repository: Some(meta.repository_url),
homepage: Some("https://firefish.dev/firefish/firefish".to_string()),
},
protocols: vec![Protocol::Activitypub],
services: Services {
inbound: vec![],
outbound: vec![Outbound::Atom1, Outbound::Rss2],
},
open_registrations: !meta.disable_registration,
usage: Usage {
users: Users {
total: Some(local_users as u32),
active_halfyear: Some(local_active_halfyear as u32),
active_month: Some(local_active_month as u32),
},
local_posts: Some(local_posts as u32),
local_comments: None,
},
metadata,
})
}
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
const NODEINFO_2_1_CACHE_KEY: &str = "nodeinfo_2_1";
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY)?;
if let Some(nodeinfo) = cached {
Ok(nodeinfo)
} else {
let nodeinfo = generate_nodeinfo_2_1().await?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60)?;
Ok(nodeinfo)
}
}
pub async fn nodeinfo_2_0() -> Result<Nodeinfo20, Error> {
Ok(nodeinfo_2_1().await?.into())
}
#[crate::export(js_name = "nodeinfo_2_1")]
pub async fn nodeinfo_2_1_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_1().await?)?)
}
#[crate::export(js_name = "nodeinfo_2_0")]
pub async fn nodeinfo_2_0_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_0().await?)?)
}

View File

@ -0,0 +1,3 @@
pub mod fetch;
pub mod generate;
pub mod schema;

View File

@ -0,0 +1,263 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// TODO: I want to use these macros but they don't work with rmp_serde
// - #[serde(skip_serializing_if = "Option::is_none")] (https://github.com/3Hren/msgpack-rust/issues/86)
// - #[serde(tag = "version", rename = "2.1")] (https://github.com/3Hren/msgpack-rust/issues/318)
/// NodeInfo schema version 2.1. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Nodeinfo21 {
/// The schema version, must be 2.1.
pub version: String,
/// Metadata about server software in use.
pub software: Software21,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "Nodeinfo")]
pub struct Nodeinfo20 {
/// The schema version, must be 2.0.
pub version: String,
/// Metadata about server software in use.
pub software: Software20,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// Metadata about server software in use (version 2.1).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Software21 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
/// The url of the source code repository of this server software.
pub repository: Option<String>,
/// The url of the homepage of this server software.
pub homepage: Option<String>,
}
/// Metadata about server software in use (version 2.0).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Software20 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Protocol {
Activitypub,
Buddycloud,
Dfrn,
Diaspora,
Libertree,
Ostatus,
Pumpio,
Tent,
Xmpp,
Zot,
}
/// The third party sites this server can connect to via their application API.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Services {
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
pub inbound: Vec<Inbound>,
/// The third party sites this server can publish messages to on the behalf of a user.
pub outbound: Vec<Outbound>,
}
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Inbound {
#[serde(rename = "atom1.0")]
Atom1,
Gnusocial,
Imap,
Pnut,
#[serde(rename = "pop3")]
Pop3,
Pumpio,
#[serde(rename = "rss2.0")]
Rss2,
Twitter,
}
/// The third party sites this server can publish messages to on the behalf of a user.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Outbound {
#[serde(rename = "atom1.0")]
Atom1,
Blogger,
Buddycloud,
Diaspora,
Dreamwidth,
Drupal,
Facebook,
Friendica,
Gnusocial,
Google,
Insanejournal,
Libertree,
Linkedin,
Livejournal,
Mediagoblin,
Myspace,
Pinterest,
Pnut,
Posterous,
Pumpio,
Redmatrix,
#[serde(rename = "rss2.0")]
Rss2,
Smtp,
Tent,
Tumblr,
Twitter,
Wordpress,
Xmpp,
}
/// Usage statistics for this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Usage {
pub users: Users,
pub local_posts: Option<u32>,
pub local_comments: Option<u32>,
}
/// statistics about the users of this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Users {
pub total: Option<u32>,
pub active_halfyear: Option<u32>,
pub active_month: Option<u32>,
}
impl From<Software21> for Software20 {
fn from(software: Software21) -> Self {
Self {
name: software.name,
version: software.version,
}
}
}
impl From<Nodeinfo21> for Nodeinfo20 {
fn from(nodeinfo: Nodeinfo21) -> Self {
Self {
version: "2.0".to_string(),
software: nodeinfo.software.into(),
protocols: nodeinfo.protocols,
services: nodeinfo.services,
open_registrations: nodeinfo.open_registrations,
usage: nodeinfo.usage,
metadata: nodeinfo.metadata,
}
}
}
#[cfg(test)]
mod unit_test {
use super::{Nodeinfo20, Nodeinfo21};
use pretty_assertions::assert_eq;
#[test]
fn parse_nodeinfo_2_0() {
let json_str_1 = r#"{"version":"2.0","software":{"name":"mastodon","version":"4.3.0-nightly.2024-04-30"},"protocols":["activitypub"],"services":{"outbound":[],"inbound":[]},"usage":{"users":{"total":1935016,"activeMonth":238223,"activeHalfyear":618795},"localPosts":90175135},"openRegistrations":true,"metadata":{"nodeName":"Mastodon","nodeDescription":"The original server operated by the Mastodon gGmbH non-profit"}}"#;
let parsed_1: Nodeinfo20 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo20 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "mastodon");
assert_eq!(parsed_1.software.version, "4.3.0-nightly.2024-04-30");
let json_str_2 = r#"{"version":"2.0","software":{"name":"peertube","version":"5.0.0"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":false,"usage":{"users":{"total":5,"activeMonth":0,"activeHalfyear":2},"localPosts":1018,"localComments":1},"metadata":{"taxonomy":{"postsName":"Videos"},"nodeName":"Blender Video","nodeDescription":"Blender Foundation PeerTube instance.","nodeConfig":{"search":{"remoteUri":{"users":true,"anonymous":false}},"plugin":{"registered":[]},"theme":{"registered":[],"default":"default"},"email":{"enabled":false},"contactForm":{"enabled":true},"transcoding":{"hls":{"enabled":true},"webtorrent":{"enabled":true},"enabledResolutions":[1080]},"live":{"enabled":false,"transcoding":{"enabled":true,"enabledResolutions":[]}},"import":{"videos":{"http":{"enabled":true},"torrent":{"enabled":false}}},"autoBlacklist":{"videos":{"ofUsers":{"enabled":false}}},"avatar":{"file":{"size":{"max":4194304},"extensions":[".png",".jpeg",".jpg",".gif",".webp"]}},"video":{"image":{"extensions":[".png",".jpg",".jpeg",".webp"],"size":{"max":4194304}},"file":{"extensions":[".webm",".ogv",".ogg",".mp4",".mkv",".mov",".qt",".mqv",".m4v",".flv",".f4v",".wmv",".avi",".3gp",".3gpp",".3g2",".3gpp2",".nut",".mts",".m2ts",".mpv",".m2v",".m1v",".mpg",".mpe",".mpeg",".vob",".mxf",".mp3",".wma",".wav",".flac",".aac",".m4a",".ac3"]}},"videoCaption":{"file":{"size":{"max":20971520},"extensions":[".vtt",".srt"]}},"user":{"videoQuota":5368709120,"videoQuotaDaily":-1},"trending":{"videos":{"intervalDays":7}},"tracker":{"enabled":true}}}}"#;
let parsed_2: Nodeinfo20 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo20 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "peertube");
assert_eq!(parsed_2.software.version, "5.0.0");
let json_str_3 = r#"{"metadata":{"nodeName":"pixelfed","software":{"homepage":"https://pixelfed.org","repo":"https://github.com/pixelfed/pixelfed"},"config":{"features":{"timelines":{"local":true,"network":true},"mobile_apis":true,"stories":true,"video":true,"import":{"instagram":false,"mastodon":false,"pixelfed":false},"label":{"covid":{"enabled":false,"org":"visit the WHO website","url":"https://www.who.int/emergencies/diseases/novel-coronavirus-2019/advice-for-public"}},"hls":{"enabled":false}}}},"protocols":["activitypub"],"services":{"inbound":[],"outbound":[]},"software":{"name":"pixelfed","version":"0.12.0"},"usage":{"localPosts":24059868,"localComments":0,"users":{"total":112832,"activeHalfyear":24366,"activeMonth":8921}},"version":"2.0","openRegistrations":true}"#;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "pixelfed");
assert_eq!(parsed_3.software.version, "0.12.0");
}
#[test]
fn parse_nodeinfo_2_1() {
let json_str_1 = r##"{"version":"2.1","software":{"name":"catodon","version":"24.04-dev.2","repository":"https://codeberg.org/catodon/catodon","homepage":"https://codeberg.org/catodon/catodon"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":294,"activeHalfyear":292,"activeMonth":139},"localPosts":22616,"localComments":0},"metadata":{"nodeName":"Catodon Social","nodeDescription":"🌎 Home of Catodon, a new platform for fedi communities, initially based on Iceshrimp/Firefish/Misskey. Be aware that our first release is not out yet, so things are still experimental.","maintainer":{"name":"admin","email":"redacted@example.com"},"langs":[],"tosUrl":"https://example.com/redacted","repositoryUrl":"https://codeberg.org/catodon/catodon","feedbackUrl":"https://codeberg.org/catodon/catodon/issues","disableRegistration":false,"disableLocalTimeline":false,"disableRecommendedTimeline":true,"disableGlobalTimeline":false,"emailRequiredForSignup":true,"postEditing":true,"postImports":false,"enableHcaptcha":true,"enableRecaptcha":false,"maxNoteTextLength":8000,"maxCaptionTextLength":1500,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableEmail":true,"themeColor":"#31748f"}}"##;
let parsed_1: Nodeinfo21 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo21 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "catodon");
assert_eq!(parsed_1.software.version, "24.04-dev.2");
let json_str_2 = r#"{"version":"2.1","software":{"name":"meisskey","version":"10.102.699-m544","repository":"https://github.com/mei23/misskey"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":1123,"activeHalfyear":305,"activeMonth":89},"localPosts":268739,"localComments":0},"metadata":{"nodeName":"meisskey.one","nodeDescription":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","name":"meisskey.one","description":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","maintainer":{"name":"redacted","email":"redacted"},"langs":[],"announcements":[{"title":"問題・要望など","text":"問題・要望などは <a href=\"https://example.com/redacted\">#meisskeyone要望</a> で投稿してなのだわ"}],"relayActor":"https://example.com/redacted","relays":[],"disableRegistration":false,"disableLocalTimeline":true,"enableRecaptcha":true,"maxNoteTextLength":5000,"enableTwitterIntegration":false,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableServiceWorker":true,"proxyAccountName":"ghost"}}"#;
let parsed_2: Nodeinfo21 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo21 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "meisskey");
assert_eq!(parsed_2.software.version, "10.102.699-m544");
let json_str_3 = r##"{"metadata":{"enableGlobalTimeline":true,"enableGuestTimeline":false,"enableLocalTimeline":true,"enableRecommendedTimeline":false,"maintainer":{"name":"Firefish dev team"},"nodeDescription":"","nodeName":"Firefish","repositoryUrl":"https://firefish.dev/firefish/firefish","themeColor":"#F25A85"},"openRegistrations":false,"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"software":{"homepage":"https://firefish.dev/firefish/firefish","name":"firefish","repository":"https://firefish.dev/firefish/firefish","version":"20240504"},"usage":{"localPosts":23857,"users":{"activeHalfyear":7,"activeMonth":7,"total":9}},"version":"2.1"}"##;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "firefish");
assert_eq!(parsed_3.software.version, "20240504");
}
}

View File

@ -0,0 +1,232 @@
use crate::database::db_conn;
use crate::misc::get_note_summary::{get_note_summary, NoteLike};
use crate::misc::meta::fetch_meta;
use crate::model::entity::sw_subscription;
use crate::util::http_client;
use once_cell::sync::OnceCell;
use sea_orm::{prelude::*, DbErr};
use web_push::{
ContentEncoding, IsahcWebPushClient, SubscriptionInfo, SubscriptionKeys, VapidSignatureBuilder,
WebPushClient, WebPushError, WebPushMessageBuilder,
};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Web Push error: {0}")]
WebPushErr(#[from] WebPushError),
#[error("Failed to (de)serialize an object: {0}")]
SerializeErr(#[from] serde_json::Error),
#[error("Invalid content: {0}")]
InvalidContentErr(String),
#[error("HTTP client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
}
static CLIENT: OnceCell<IsahcWebPushClient> = OnceCell::new();
fn get_client() -> Result<IsahcWebPushClient, Error> {
Ok(CLIENT
.get_or_try_init(|| http_client::client().map(IsahcWebPushClient::from))
.cloned()?)
}
#[derive(strum::Display, PartialEq)]
#[crate::export(string_enum = "camelCase")]
pub enum PushNotificationKind {
#[strum(serialize = "notification")]
Generic,
#[strum(serialize = "unreadMessagingMessage")]
Chat,
#[strum(serialize = "readAllMessagingMessages")]
ReadAllChats,
#[strum(serialize = "readAllMessagingMessagesOfARoom")]
ReadAllChatsInTheRoom,
#[strum(serialize = "readNotifications")]
ReadNotifications,
#[strum(serialize = "readAllNotifications")]
ReadAllNotifications,
}
fn compact_content(
kind: &PushNotificationKind,
mut content: serde_json::Value,
) -> Result<serde_json::Value, Error> {
if kind != &PushNotificationKind::Generic {
return Ok(content);
}
if !content.is_object() {
return Err(Error::InvalidContentErr("not a JSON object".to_string()));
}
let object = content.as_object_mut().unwrap();
if !object.contains_key("note") {
return Ok(content);
}
let mut note = if object.contains_key("type") && object.get("type").unwrap() == "renote" {
object
.get("note")
.unwrap()
.get("renote")
.ok_or(Error::InvalidContentErr(
"renote object is missing".to_string(),
))?
} else {
object.get("note").unwrap()
}
.clone();
if !note.is_object() {
return Err(Error::InvalidContentErr(
"(re)note is not an object".to_string(),
));
}
let note_like: NoteLike = serde_json::from_value(note.clone())?;
let text = get_note_summary(note_like);
let note_object = note.as_object_mut().unwrap();
note_object.remove("reply");
note_object.remove("renote");
note_object.remove("user");
note_object.insert("text".to_string(), text.into());
object.insert("note".to_string(), note);
Ok(serde_json::from_value(Json::Object(object.clone()))?)
}
async fn handle_web_push_failure(
db: &DatabaseConnection,
err: WebPushError,
subscription_id: &str,
error_message: &str,
) -> Result<(), DbErr> {
match err {
WebPushError::BadRequest(_)
| WebPushError::ServerError(_)
| WebPushError::InvalidUri
| WebPushError::EndpointNotValid
| WebPushError::EndpointNotFound
| WebPushError::TlsError
| WebPushError::SslError
| WebPushError::InvalidPackageName
| WebPushError::MissingCryptoKeys
| WebPushError::InvalidCryptoKeys
| WebPushError::InvalidResponse => {
sw_subscription::Entity::delete_by_id(subscription_id)
.exec(db)
.await?;
tracing::info!("{}; {} was unsubscribed", error_message, subscription_id);
tracing::debug!("reason: {:#?}", err);
}
_ => {
tracing::warn!("{}; subscription id: {}", error_message, subscription_id);
tracing::info!("reason: {:#?}", err);
}
};
Ok(())
}
#[crate::export]
pub async fn send_push_notification(
receiver_user_id: &str,
kind: PushNotificationKind,
content: &serde_json::Value,
) -> Result<(), Error> {
let meta = fetch_meta(true).await?;
if !meta.enable_service_worker || meta.sw_public_key.is_none() || meta.sw_private_key.is_none()
{
return Ok(());
}
let db = db_conn().await?;
let signature_builder = VapidSignatureBuilder::from_base64_no_sub(
meta.sw_private_key.unwrap().as_str(),
web_push::URL_SAFE_NO_PAD,
)?;
let subscriptions = sw_subscription::Entity::find()
.filter(sw_subscription::Column::UserId.eq(receiver_user_id))
.all(db)
.await?;
let payload = format!(
"{{\"type\":\"{}\",\"userId\":\"{}\",\"dateTime\":{},\"body\":{}}}",
kind,
receiver_user_id,
chrono::Utc::now().timestamp_millis(),
serde_json::to_string(&compact_content(&kind, content.clone())?)?
);
tracing::trace!("payload: {:#?}", payload);
for subscription in subscriptions.iter() {
if !subscription.send_read_message
&& [
PushNotificationKind::ReadAllChats,
PushNotificationKind::ReadAllChatsInTheRoom,
PushNotificationKind::ReadAllNotifications,
PushNotificationKind::ReadNotifications,
]
.contains(&kind)
{
continue;
}
let subscription_info = SubscriptionInfo {
endpoint: subscription.endpoint.to_owned(),
keys: SubscriptionKeys {
// convert standard base64 into base64url
// https://en.wikipedia.org/wiki/Base64#Variants_summary_table
p256dh: subscription
.publickey
.replace('+', "-")
.replace('/', "_")
.to_owned(),
auth: subscription
.auth
.replace('+', "-")
.replace('/', "_")
.to_owned(),
},
};
let signature = signature_builder
.clone()
.add_sub_info(&subscription_info)
.build();
if let Err(err) = signature {
handle_web_push_failure(db, err, &subscription.id, "failed to build a signature")
.await?;
continue;
}
let mut message_builder = WebPushMessageBuilder::new(&subscription_info);
message_builder.set_ttl(1000);
message_builder.set_payload(ContentEncoding::Aes128Gcm, payload.as_bytes());
message_builder.set_vapid_signature(signature.unwrap());
let message = message_builder.build();
if let Err(err) = message {
handle_web_push_failure(db, err, &subscription.id, "failed to build a payload").await?;
continue;
}
if let Err(err) = get_client()?.send(message.unwrap()).await {
handle_web_push_failure(db, err, &subscription.id, "failed to send").await?;
continue;
}
tracing::debug!("success; subscription id: {}", subscription.id);
}
Ok(())
}

View File

@ -1,24 +1,34 @@
use crate::config::CONFIG;
use isahc::{config::*, HttpClient};
use once_cell::sync::OnceCell;
use reqwest::{Client, Error, NoProxy, Proxy};
use std::time::Duration;
static CLIENT: OnceCell<Client> = OnceCell::new();
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("Url parse error: {0}")]
UrlParseErr(#[from] isahc::http::uri::InvalidUri),
}
pub fn http_client() -> Result<Client, Error> {
static CLIENT: OnceCell<HttpClient> = OnceCell::new();
pub fn client() -> Result<HttpClient, Error> {
CLIENT
.get_or_try_init(|| {
let mut builder = Client::builder().timeout(Duration::from_secs(5));
let mut builder = HttpClient::builder()
.timeout(Duration::from_secs(10))
.default_header("user-agent", &CONFIG.user_agent)
.dns_cache(DnsCache::Timeout(Duration::from_secs(60 * 60)));
if let Some(proxy_url) = &CONFIG.proxy {
let mut proxy = Proxy::all(proxy_url)?;
builder = builder.proxy(Some(proxy_url.parse()?));
if let Some(proxy_bypass_hosts) = &CONFIG.proxy_bypass_hosts {
proxy = proxy.no_proxy(NoProxy::from_string(&proxy_bypass_hosts.join(",")));
builder = builder.proxy_blacklist(proxy_bypass_hosts);
}
builder = builder.proxy(proxy);
}
builder.build()
Ok(builder.build()?)
})
.cloned()
}

View File

@ -1,5 +1,3 @@
pub use http_client::http_client;
pub mod http_client;
pub mod id;
pub mod random;

View File

@ -1,7 +1,8 @@
use rand::{distributions::Alphanumeric, thread_rng, Rng};
/// Generate random string based on [thread_rng] and [Alphanumeric].
pub fn gen_string(length: u16) -> String {
#[crate::export]
pub fn generate_secure_random_string(length: u16) -> String {
thread_rng()
.sample_iter(Alphanumeric)
.take(length.into())
@ -9,9 +10,9 @@ pub fn gen_string(length: u16) -> String {
.collect()
}
#[crate::export(js_name = "secureRndstr")]
pub fn native_random_str(length: Option<u16>) -> String {
gen_string(length.unwrap_or(32))
#[crate::export]
pub fn generate_user_token() -> String {
generate_secure_random_string(16)
}
#[cfg(test)]
@ -19,14 +20,17 @@ mod unit_test {
use pretty_assertions::{assert_eq, assert_ne};
use std::thread;
use super::gen_string;
use super::generate_secure_random_string;
#[test]
fn can_generate_unique_strings() {
assert_eq!(gen_string(16).len(), 16);
assert_ne!(gen_string(16), gen_string(16));
let s1 = thread::spawn(|| gen_string(16));
let s2 = thread::spawn(|| gen_string(16));
assert_eq!(generate_secure_random_string(16).len(), 16);
assert_ne!(
generate_secure_random_string(16),
generate_secure_random_string(16)
);
let s1 = thread::spawn(|| generate_secure_random_string(16));
let s2 = thread::spawn(|| generate_secure_random_string(16));
assert_ne!(s1.join().unwrap(), s2.join().unwrap());
}
}

View File

@ -22,54 +22,54 @@
"@swc/core-android-arm64": "1.3.11"
},
"dependencies": {
"@bull-board/api": "5.16.0",
"@bull-board/koa": "5.16.0",
"@bull-board/ui": "5.16.0",
"@discordapp/twemoji": "^15.0.3",
"@bull-board/api": "5.17.1",
"@bull-board/koa": "5.17.1",
"@bull-board/ui": "5.17.1",
"@discordapp/twemoji": "15.0.3",
"@koa/cors": "5.0.0",
"@koa/multer": "3.0.2",
"@koa/router": "12.0.1",
"@ladjs/koa-views": "9.0.0",
"@peertube/http-signature": "1.7.0",
"@redocly/openapi-core": "1.12.0",
"@redocly/openapi-core": "1.12.2",
"@sinonjs/fake-timers": "11.2.2",
"adm-zip": "0.5.10",
"ajv": "8.12.0",
"ajv": "8.13.0",
"archiver": "7.0.1",
"aws-sdk": "2.1608.0",
"axios": "^1.6.8",
"aws-sdk": "2.1621.0",
"axios": "1.6.8",
"backend-rs": "workspace:*",
"blurhash": "2.0.5",
"bull": "4.12.2",
"bull": "4.12.4",
"cacheable-lookup": "TheEssem/cacheable-lookup",
"cbor-x": "^1.5.9",
"cbor-x": "1.5.9",
"chalk": "5.3.0",
"chalk-template": "1.1.0",
"cli-highlight": "2.1.11",
"color-convert": "2.0.1",
"content-disposition": "0.5.4",
"date-fns": "3.6.0",
"decompress": "^4.2.1",
"decompress": "4.2.1",
"deep-email-validator": "0.1.21",
"deepl-node": "1.13.0",
"escape-regexp": "0.0.1",
"feed": "4.2.2",
"file-type": "19.0.0",
"fluent-ffmpeg": "2.1.2",
"form-data": "^4.0.0",
"form-data": "4.0.0",
"got": "14.2.1",
"gunzip-maybe": "^1.4.2",
"happy-dom": "^14.7.1",
"gunzip-maybe": "1.4.2",
"hpagent": "1.2.0",
"ioredis": "5.4.1",
"ip-cidr": "4.0.0",
"is-svg": "5.0.0",
"is-svg": "5.0.1",
"jsdom": "24.0.0",
"json5": "2.2.3",
"jsonld": "8.3.2",
"jsrsasign": "11.1.0",
"katex": "0.16.10",
"koa": "2.15.3",
"koa-body": "^6.0.1",
"koa-body": "6.0.1",
"koa-bodyparser": "4.4.1",
"koa-favicon": "2.1.0",
"koa-json-body": "5.3.0",
@ -81,14 +81,13 @@
"megalodon": "workspace:*",
"mfm-js": "0.24.0",
"mime-types": "2.1.35",
"msgpackr": "^1.10.1",
"msgpackr": "1.10.2",
"multer": "1.4.5-lts.1",
"nested-property": "4.0.0",
"node-fetch": "3.3.2",
"nodemailer": "6.9.13",
"opencc-js": "^1.0.5",
"os-utils": "0.0.14",
"otpauth": "^9.2.3",
"opencc-js": "1.0.5",
"otpauth": "9.2.4",
"parse5": "7.1.2",
"pg": "8.11.5",
"private-ip": "3.0.2",
@ -106,33 +105,32 @@
"rndstr": "1.0.0",
"rss-parser": "3.13.0",
"sanitize-html": "2.13.0",
"semver": "7.6.0",
"semver": "7.6.2",
"sharp": "0.33.3",
"stringz": "2.1.0",
"summaly": "2.7.0",
"syslog-pro": "1.0.0",
"systeminformation": "5.22.7",
"tar-stream": "^3.1.7",
"tesseract.js": "^5.0.5",
"tar-stream": "3.1.7",
"tesseract.js": "5.1.0",
"tinycolor2": "1.6.0",
"tmp": "0.2.3",
"typeorm": "0.3.20",
"ulid": "2.3.0",
"uuid": "9.0.1",
"web-push": "3.6.7",
"websocket": "1.0.34",
"websocket": "1.0.35",
"xev": "3.0.2"
},
"devDependencies": {
"@swc/cli": "0.3.12",
"@swc/core": "1.5.0",
"@types/adm-zip": "^0.5.5",
"@types/color-convert": "^2.0.3",
"@types/content-disposition": "^0.5.8",
"@swc/core": "1.5.7",
"@types/adm-zip": "0.5.5",
"@types/color-convert": "2.0.3",
"@types/content-disposition": "0.5.8",
"@types/escape-regexp": "0.0.3",
"@types/fluent-ffmpeg": "2.1.24",
"@types/jsdom": "21.1.6",
"@types/jsonld": "1.5.13",
"@types/jsrsasign": "10.5.13",
"@types/jsrsasign": "10.5.14",
"@types/katex": "0.16.7",
"@types/koa": "2.15.0",
"@types/koa-bodyparser": "4.3.12",
@ -145,13 +143,13 @@
"@types/koa__multer": "2.0.7",
"@types/koa__router": "12.0.4",
"@types/mocha": "10.0.6",
"@types/node": "20.12.7",
"@types/node": "20.12.12",
"@types/node-fetch": "2.6.11",
"@types/nodemailer": "6.4.14",
"@types/nodemailer": "6.4.15",
"@types/oauth": "0.9.4",
"@types/opencc-js": "^1.0.3",
"@types/pg": "^8.11.5",
"@types/probe-image-size": "^7.2.4",
"@types/opencc-js": "1.0.3",
"@types/pg": "8.11.6",
"@types/probe-image-size": "7.2.4",
"@types/pug": "2.0.10",
"@types/punycode": "2.1.4",
"@types/qrcode": "1.5.5",
@ -162,7 +160,7 @@
"@types/sanitize-html": "2.11.0",
"@types/semver": "7.5.8",
"@types/sinonjs__fake-timers": "8.1.5",
"@types/syslog-pro": "^1.0.3",
"@types/syslog-pro": "1.0.3",
"@types/tinycolor2": "1.4.6",
"@types/tmp": "0.2.6",
"@types/uuid": "9.0.8",
@ -170,17 +168,17 @@
"@types/websocket": "1.0.10",
"@types/ws": "8.5.10",
"cross-env": "7.0.3",
"eslint": "^9.1.1",
"eslint": "9.2.0",
"mocha": "10.4.0",
"pug": "3.0.2",
"strict-event-emitter-types": "2.0.0",
"swc-loader": "^0.2.6",
"swc-loader": "0.2.6",
"ts-loader": "9.5.1",
"ts-node": "10.9.2",
"tsconfig-paths": "4.2.0",
"type-fest": "4.17.0",
"type-fest": "4.18.2",
"typescript": "5.4.5",
"webpack": "^5.91.0",
"ws": "8.16.0"
"webpack": "5.91.0",
"ws": "8.17.0"
}
}

View File

@ -1,33 +0,0 @@
declare module "os-utils" {
type FreeCommandCallback = (usedmem: number) => void;
type HarddriveCallback = (total: number, free: number, used: number) => void;
type GetProcessesCallback = (result: string) => void;
type CPUCallback = (perc: number) => void;
export function platform(): NodeJS.Platform;
export function cpuCount(): number;
export function sysUptime(): number;
export function processUptime(): number;
export function freemem(): number;
export function totalmem(): number;
export function freememPercentage(): number;
export function freeCommand(callback: FreeCommandCallback): void;
export function harddrive(callback: HarddriveCallback): void;
export function getProcesses(callback: GetProcessesCallback): void;
export function getProcesses(
nProcess: number,
callback: GetProcessesCallback,
): void;
export function allLoadavg(): string;
export function loadavg(_time?: number): number;
export function cpuFree(callback: CPUCallback): void;
export function cpuUsage(callback: CPUCallback): void;
}

View File

@ -8,11 +8,14 @@ import chalkTemplate from "chalk-template";
import semver from "semver";
import Logger from "@/services/logger.js";
import type { Config } from "backend-rs";
import { initializeRustLogger } from "backend-rs";
import { fetchMeta, removeOldAttestationChallenges } from "backend-rs";
import {
fetchMeta,
initializeRustLogger,
removeOldAttestationChallenges,
showServerInfo,
type Config,
} from "backend-rs";
import { config, envOption } from "@/config.js";
import { showMachineInfo } from "@/misc/show-machine-info.js";
import { db, initDb } from "@/db/postgre.js";
import { inspect } from "node:util";
@ -90,12 +93,12 @@ function greet() {
export async function masterMain() {
// initialize app
try {
initializeRustLogger();
greet();
showEnvironment();
await showMachineInfo(bootLogger);
showServerInfo();
showNodejsVersion();
await connectDb();
initializeRustLogger();
} catch (e) {
bootLogger.error(
`Fatal error occurred during initialization:\n${inspect(e)}`,

View File

@ -1,15 +1,8 @@
import si from "systeminformation";
import Xev from "xev";
import * as osUtils from "os-utils";
import { fetchMeta } from "backend-rs";
import { fetchMeta, cpuUsage, memoryUsage } from "backend-rs";
const ev = new Xev();
const interval = 2000;
const roundCpu = (num: number) => Math.round(num * 1000) / 1000;
const round = (num: number) => Math.round(num * 10) / 10;
/**
* Report server stats regularly
*/
@ -24,26 +17,9 @@ export default async function () {
if (!meta.enableServerMachineStats) return;
async function tick() {
const cpu = await cpuUsage();
const memStats = await mem();
const netStats = await net();
const fsStats = await fs();
const stats = {
cpu: roundCpu(cpu),
mem: {
used: round(memStats.used - memStats.buffers - memStats.cached),
active: round(memStats.active),
total: round(memStats.total),
},
net: {
rx: round(Math.max(0, netStats.rx_sec)),
tx: round(Math.max(0, netStats.tx_sec)),
},
fs: {
r: round(Math.max(0, fsStats.rIO_sec ?? 0)),
w: round(Math.max(0, fsStats.wIO_sec ?? 0)),
},
cpu: cpuUsage(),
mem: memoryUsage(),
};
ev.emit("serverStats", stats);
log.unshift(stats);
@ -52,33 +28,5 @@ export default async function () {
tick();
setInterval(tick, interval);
}
// CPU STAT
function cpuUsage(): Promise<number> {
return new Promise((res, rej) => {
osUtils.cpuUsage((cpuUsage) => {
res(cpuUsage);
});
});
}
// MEMORY STAT
async function mem() {
const data = await si.mem();
return data;
}
// NETWORK STAT
async function net() {
const iface = await si.networkInterfaceDefault();
const data = await si.networkStats(iface);
return data[0];
}
// FS STAT
async function fs() {
const data = await si.disksIO().catch(() => ({ rIO_sec: 0, wIO_sec: 0 }));
return data || { rIO_sec: 0, wIO_sec: 0 };
setInterval(tick, 3000);
}

View File

@ -1,21 +1,17 @@
import { type HTMLElement, Window } from "happy-dom";
import { JSDOM } from "jsdom";
import type * as mfm from "mfm-js";
import katex from "katex";
import { config } from "@/config.js";
import { intersperse } from "@/prelude/array.js";
import type { IMentionedRemoteUsers } from "@/models/entities/note.js";
function toMathMl(code: string, displayMode: boolean): HTMLElement | null {
const { window } = new Window();
const document = window.document;
document.body.innerHTML = katex.renderToString(code, {
function toMathMl(code: string, displayMode: boolean): MathMLElement | null {
const rendered = katex.renderToString(code, {
throwOnError: false,
output: "mathml",
displayMode,
});
return document.querySelector("math");
return JSDOM.fragment(rendered).querySelector("math");
}
export function toHtml(
@ -26,7 +22,7 @@ export function toHtml(
return null;
}
const { window } = new Window();
const { window } = new JSDOM("");
const doc = window.document;

View File

@ -0,0 +1,13 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class AddUserProfileLanguage1714888400293 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "user_profile" ADD COLUMN "lang" character varying(32)`,
);
}
async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "user_profile" DROP COLUMN "lang"`);
}
}

View File

@ -7,10 +7,10 @@ import chalk from "chalk";
import Logger from "@/services/logger.js";
import IPCIDR from "ip-cidr";
import PrivateIp from "private-ip";
import { isValidUrl } from "./is-valid-url.js";
import { isSafeUrl } from "backend-rs";
export async function downloadUrl(url: string, path: string): Promise<void> {
if (!isValidUrl(url)) {
if (!isSafeUrl(url)) {
throw new StatusError("Invalid URL", 400);
}
@ -43,8 +43,8 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
limit: 0,
},
})
.on("redirect", (res: Got.Response, opts: Got.NormalizedOptions) => {
if (!isValidUrl(opts.url)) {
.on("redirect", (_res: Got.Response, opts: Got.NormalizedOptions) => {
if (!isSafeUrl(opts.url)) {
downloadLogger.warn(`Invalid URL: ${opts.url}`);
req.destroy();
}

View File

@ -5,7 +5,7 @@ import CacheableLookup from "cacheable-lookup";
import fetch, { type RequestRedirect } from "node-fetch";
import { HttpProxyAgent, HttpsProxyAgent } from "hpagent";
import { config } from "@/config.js";
import { isValidUrl } from "./is-valid-url.js";
import { isSafeUrl } from "backend-rs";
export async function getJson(
url: string,
@ -60,7 +60,7 @@ export async function getResponse(args: {
size?: number;
redirect?: RequestRedirect;
}) {
if (!isValidUrl(args.url)) {
if (!isSafeUrl(args.url)) {
throw new StatusError("Invalid URL", 400);
}
@ -83,7 +83,7 @@ export async function getResponse(args: {
});
if (args.redirect === "manual" && [301, 302, 307, 308].includes(res.status)) {
if (!isValidUrl(res.url)) {
if (!isSafeUrl(res.url)) {
throw new StatusError("Invalid URL", 400);
}
return res;

View File

@ -1,20 +0,0 @@
export function isValidUrl(url: string | URL | undefined): boolean {
if (process.env.NODE_ENV !== "production") return true;
try {
if (url == null) return false;
const u = typeof url === "string" ? new URL(url) : url;
if (!u.protocol.match(/^https?:$/) || u.hostname === "unix") {
return false;
}
if (u.port !== "" && !["80", "443"].includes(u.port)) {
return false;
}
return true;
} catch {
return false;
}
}

View File

@ -1,17 +0,0 @@
import * as os from "node:os";
import sysUtils from "systeminformation";
import type Logger from "@/services/logger.js";
export async function showMachineInfo(parentLogger: Logger) {
const logger = parentLogger.createSubLogger("machine");
logger.debug(`Hostname: ${os.hostname()}`);
logger.debug(`Platform: ${process.platform} Arch: ${process.arch}`);
const mem = await sysUtils.mem();
const totalmem = (mem.total / 1024 / 1024 / 1024).toFixed(1);
const availmem = (mem.available / 1024 / 1024 / 1024).toFixed(1);
logger.debug(
`CPU: ${
os.cpus().length
} core MEM: ${totalmem}GB (available: ${availmem}GB)`,
);
}

View File

@ -1,8 +1,7 @@
import { Brackets } from "typeorm";
import { isBlockedServer } from "backend-rs";
import { isBlockedServer, DAY } from "backend-rs";
import { Instances } from "@/models/index.js";
import type { Instance } from "@/models/entities/instance.js";
import { DAY } from "backend-rs";
// Threshold from last contact after which an instance will be considered
// "dead" and should no longer get activities delivered to it.

View File

@ -50,6 +50,12 @@ export class UserProfile {
verified?: boolean;
}[];
@Column("varchar", {
length: 32,
nullable: true,
})
public lang: string | null;
@Column("varchar", {
length: 512,
nullable: true,

View File

@ -512,6 +512,7 @@ export const UserRepository = db.getRepository(User).extend({
description: profile!.description,
location: profile!.location,
birthday: profile!.birthday,
lang: profile!.lang,
fields: profile!.fields,
followersCount: followersCount ?? null,
followingCount: followingCount ?? null,

View File

@ -204,6 +204,12 @@ export const packedUserDetailedNotMeOnlySchema = {
optional: false,
example: "2018-03-12",
},
lang: {
type: "string",
nullable: true,
optional: false,
example: "ja-JP",
},
fields: {
type: "array",
nullable: false,

View File

@ -1,7 +1,7 @@
// https://gist.github.com/tkrotoff/a6baf96eb6b61b445a9142e5555511a0
import type { Primitive } from "type-fest";
type NullToUndefined<T> = T extends null
export type NullToUndefined<T> = T extends null
? undefined
: T extends Primitive | Function | Date | RegExp
? T
@ -15,7 +15,7 @@ type NullToUndefined<T> = T extends null
? { [K in keyof T]: NullToUndefined<T[K]> }
: unknown;
type UndefinedToNull<T> = T extends undefined
export type UndefinedToNull<T> = T extends undefined
? null
: T extends Primitive | Function | Date | RegExp
? T
@ -47,6 +47,16 @@ function _nullToUndefined<T>(obj: T): NullToUndefined<T> {
return obj as any;
}
/**
* Recursively converts all null values to undefined.
*
* @param obj object to convert
* @returns a copy of the object with all its null values converted to undefined
*/
export function fromRustObject<T>(obj: T) {
return _nullToUndefined(structuredClone(obj));
}
function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
if (obj === undefined) {
return null as any;
@ -71,6 +81,6 @@ function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
* @param obj object to convert
* @returns a copy of the object with all its undefined values converted to null
*/
export function undefinedToNull<T>(obj: T) {
export function toRustObject<T>(obj: T) {
return _undefinedToNull(structuredClone(obj));
}

View File

@ -5,8 +5,12 @@ import perform from "@/remote/activitypub/perform.js";
import Logger from "@/services/logger.js";
import { registerOrFetchInstanceDoc } from "@/services/register-or-fetch-instance-doc.js";
import { Instances } from "@/models/index.js";
import { isAllowedServer, isBlockedServer } from "backend-rs";
import { toPuny, extractHost } from "backend-rs";
import {
extractHost,
isAllowedServer,
isBlockedServer,
toPuny,
} from "backend-rs";
import { getApId } from "@/remote/activitypub/type.js";
import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js";
import type { InboxJobData } from "../types.js";

View File

@ -1,11 +1,15 @@
import { URL } from "url";
import { URL } from "node:url";
import httpSignature, { type IParsedSignature } from "@peertube/http-signature";
import { config } from "@/config.js";
import { fetchMeta, isAllowedServer, isBlockedServer } from "backend-rs";
import { toPuny } from "backend-rs";
import {
fetchMeta,
isAllowedServer,
isBlockedServer,
toPuny,
} from "backend-rs";
import DbResolver from "@/remote/activitypub/db-resolver.js";
import { getApId } from "@/remote/activitypub/type.js";
import type { IncomingMessage } from "http";
import type { IncomingMessage } from "node:http";
import type { CacheableRemoteUser } from "@/models/entities/user.js";
import type { UserPublickey } from "@/models/entities/user-publickey.js";
import { verify } from "node:crypto";

View File

@ -5,12 +5,11 @@ import type { IAnnounce } from "../../type.js";
import { getApId } from "../../type.js";
import { fetchNote, resolveNote } from "../../models/note.js";
import { apLogger } from "../../logger.js";
import { extractHost } from "backend-rs";
import { extractHost, isBlockedServer } from "backend-rs";
import { getApLock } from "@/misc/app-lock.js";
import { parseAudience } from "../../audience.js";
import { StatusError } from "@/misc/fetch.js";
import { Notes } from "@/models/index.js";
import { isBlockedServer } from "backend-rs";
import { inspect } from "node:util";
/**

View File

@ -16,7 +16,9 @@ import type { DriveFile } from "@/models/entities/drive-file.js";
import {
type ImageSize,
extractHost,
genId,
getImageSizeFromUrl,
isBlockedServer,
isSameOrigin,
toPuny,
} from "backend-rs";
@ -39,7 +41,6 @@ import {
getApType,
} from "../type.js";
import type { Emoji } from "@/models/entities/emoji.js";
import { genId, isBlockedServer } from "backend-rs";
import { getApLock } from "@/misc/app-lock.js";
import { createMessage } from "@/services/messages/create.js";
import { parseAudience } from "../audience.js";

View File

@ -16,10 +16,9 @@ import type { IRemoteUser, CacheableUser } from "@/models/entities/user.js";
import { User } from "@/models/entities/user.js";
import type { Emoji } from "@/models/entities/emoji.js";
import { UserNotePining } from "@/models/entities/user-note-pining.js";
import { genId } from "backend-rs";
import { genId, isSameOrigin, toPuny } from "backend-rs";
import { UserPublickey } from "@/models/entities/user-publickey.js";
import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js";
import { isSameOrigin, toPuny } from "backend-rs";
import { UserProfile } from "@/models/entities/user-profile.js";
import { toArray } from "@/prelude/array.js";
import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js";

View File

@ -5,8 +5,8 @@ import { StatusError, getResponse } from "@/misc/fetch.js";
import { createSignedPost, createSignedGet } from "./ap-request.js";
import type { Response } from "node-fetch";
import type { IObject } from "./type.js";
import { isValidUrl } from "@/misc/is-valid-url.js";
import { apLogger } from "@/remote/activitypub/logger.js";
import { isSafeUrl } from "backend-rs";
export default async (user: { id: User["id"] }, url: string, object: any) => {
const body = JSON.stringify(object);
@ -44,7 +44,7 @@ export async function apGet(
user?: ILocalUser,
redirects: boolean = true,
): Promise<{ finalUrl: string; content: IObject }> {
if (!isValidUrl(url)) {
if (!isSafeUrl(url)) {
throw new StatusError("Invalid URL", 400);
}

View File

@ -1,3 +0,0 @@
import { secureRndstr } from "backend-rs";
export default () => secureRndstr(16);

View File

@ -3,10 +3,11 @@ import {
publishToChatStream,
publishToGroupChatStream,
publishToChatIndexStream,
sendPushNotification,
ChatEvent,
ChatIndexEvent,
PushNotificationKind,
} from "backend-rs";
import { pushNotification } from "@/services/push-notification.js";
import type { User, IRemoteUser } from "@/models/entities/user.js";
import type { MessagingMessage } from "@/models/entities/messaging-message.js";
import { MessagingMessages, UserGroupJoinings, Users } from "@/models/index.js";
@ -62,20 +63,19 @@ export async function readUserMessagingMessage(
if (!(await Users.getHasUnreadMessagingMessage(userId))) {
// 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行
publishMainStream(userId, "readAllMessagingMessages");
pushNotification(userId, "readAllMessagingMessages", undefined);
sendPushNotification(userId, PushNotificationKind.ReadAllChats, {});
} else {
// そのユーザーとのメッセージで未読がなければイベント発行
const count = await MessagingMessages.count({
const hasUnread = await MessagingMessages.exists({
where: {
userId: otherpartyId,
recipientId: userId,
isRead: false,
},
take: 1,
});
if (!count) {
pushNotification(userId, "readAllMessagingMessagesOfARoom", {
if (!hasUnread) {
sendPushNotification(userId, PushNotificationKind.ReadAllChatsInTheRoom, {
userId: otherpartyId,
});
}
@ -137,10 +137,10 @@ export async function readGroupMessagingMessage(
if (!(await Users.getHasUnreadMessagingMessage(userId))) {
// 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行
publishMainStream(userId, "readAllMessagingMessages");
pushNotification(userId, "readAllMessagingMessages", undefined);
sendPushNotification(userId, PushNotificationKind.ReadAllChats, {});
} else {
// そのグループにおいて未読がなければイベント発行
const unreadExist = await MessagingMessages.createQueryBuilder("message")
const hasUnread = await MessagingMessages.createQueryBuilder("message")
.where("message.groupId = :groupId", { groupId: groupId })
.andWhere("message.userId != :userId", { userId: userId })
.andWhere("NOT (:userId = ANY(message.reads))", { userId: userId })
@ -150,8 +150,10 @@ export async function readGroupMessagingMessage(
.getOne()
.then((x) => x != null);
if (!unreadExist) {
pushNotification(userId, "readAllMessagingMessagesOfARoom", { groupId });
if (!hasUnread) {
sendPushNotification(userId, PushNotificationKind.ReadAllChatsInTheRoom, {
groupId,
});
}
}
}

View File

@ -1,6 +1,6 @@
import { In } from "typeorm";
import { publishMainStream } from "@/services/stream.js";
import { pushNotification } from "@/services/push-notification.js";
import { sendPushNotification, PushNotificationKind } from "backend-rs";
import type { User } from "@/models/entities/user.js";
import type { Notification } from "@/models/entities/notification.js";
import { Notifications, Users } from "@/models/index.js";
@ -47,7 +47,11 @@ export async function readNotificationByQuery(
function postReadAllNotifications(userId: User["id"]) {
publishMainStream(userId, "readAllNotifications");
return pushNotification(userId, "readAllNotifications", undefined);
return sendPushNotification(
userId,
PushNotificationKind.ReadAllNotifications,
{},
);
}
function postReadNotifications(
@ -55,5 +59,7 @@ function postReadNotifications(
notificationIds: Notification["id"][],
) {
publishMainStream(userId, "readNotifications", notificationIds);
return pushNotification(userId, "readNotifications", { notificationIds });
return sendPushNotification(userId, PushNotificationKind.ReadNotifications, {
notificationIds,
});
}

View File

@ -1,10 +1,9 @@
import { generateKeyPair } from "node:crypto";
import generateUserToken from "./generate-native-user-token.js";
import { User } from "@/models/entities/user.js";
import { Users, UsedUsernames } from "@/models/index.js";
import { UserProfile } from "@/models/entities/user-profile.js";
import { IsNull } from "typeorm";
import { genId, hashPassword, toPuny } from "backend-rs";
import { genId, generateUserToken, hashPassword, toPuny } from "backend-rs";
import { UserKeypair } from "@/models/entities/user-keypair.js";
import { UsedUsername } from "@/models/entities/used-username.js";
import { db } from "@/db/postgre.js";

View File

@ -1,8 +1,8 @@
import * as os from "node:os";
import si from "systeminformation";
import define from "@/server/api/define.js";
import { redisClient } from "@/db/redis.js";
import { db } from "@/db/postgre.js";
import { cpuInfo, memoryUsage, storageUsage } from "backend-rs";
export const meta = {
requireCredential: true,
@ -85,19 +85,6 @@ export const meta = {
},
},
},
net: {
type: "object",
optional: false,
nullable: false,
properties: {
interface: {
type: "string",
optional: false,
nullable: false,
example: "eth0",
},
},
},
},
},
} as const;
@ -109,13 +96,10 @@ export const paramDef = {
} as const;
export default define(meta, paramDef, async () => {
const memStats = await si.mem();
const fsStats = await si.fsSize();
const netInterface = await si.networkInterfaceDefault();
const redisServerInfo = await redisClient.info("Server");
const m = redisServerInfo.match(new RegExp("^redis_version:(.*)", "m"));
const m = redisServerInfo.match(/^redis_version:(.*)/m);
const redis_version = m?.[1];
const storage = storageUsage();
return {
machine: os.hostname(),
@ -125,19 +109,13 @@ export default define(meta, paramDef, async () => {
.query("SHOW server_version")
.then((x) => x[0].server_version),
redis: redis_version,
cpu: {
model: os.cpus()[0].model,
cores: os.cpus().length,
},
cpu: cpuInfo(),
mem: {
total: memStats.total,
total: memoryUsage().total,
},
fs: {
total: fsStats[0].size,
used: fsStats[0].used,
},
net: {
interface: netInterface,
total: storage?.total ?? 0,
used: storage?.used ?? 0,
},
};
});

View File

@ -1,6 +1,6 @@
import define from "@/server/api/define.js";
import { Apps } from "@/models/index.js";
import { genId, secureRndstr } from "backend-rs";
import { genId, generateSecureRandomString } from "backend-rs";
import { unique } from "@/prelude/array.js";
export const meta = {
@ -40,7 +40,7 @@ export default define(meta, paramDef, async (ps, user) => {
includeSecret: true,
});
// Generate secret
const secret = secureRndstr(32);
const secret = generateSecureRandomString(32);
// for backward compatibility
const permission = unique(

View File

@ -2,7 +2,7 @@ import * as crypto from "node:crypto";
import define from "@/server/api/define.js";
import { ApiError } from "@/server/api/error.js";
import { AuthSessions, AccessTokens, Apps } from "@/models/index.js";
import { genId, secureRndstr } from "backend-rs";
import { genId, generateSecureRandomString } from "backend-rs";
export const meta = {
tags: ["auth"],
@ -37,10 +37,10 @@ export default define(meta, paramDef, async (ps, user) => {
}
// Generate access token
const accessToken = secureRndstr(32);
const accessToken = generateSecureRandomString(32);
// Fetch exist access token
const exist = await AccessTokens.exist({
const exist = await AccessTokens.exists({
where: {
appId: session.appId,
userId: user.id,

View File

@ -1,4 +1,4 @@
import { readdir } from "fs/promises";
import { readdir } from "node:fs/promises";
import define from "@/server/api/define.js";
export const meta = {

View File

@ -2,8 +2,7 @@ import define from "@/server/api/define.js";
import { createImportPostsJob } from "@/queue/index.js";
import { ApiError } from "@/server/api/error.js";
import { DriveFiles } from "@/models/index.js";
import { DAY } from "backend-rs";
import { fetchMeta } from "backend-rs";
import { fetchMeta, DAY } from "backend-rs";
export const meta = {
secure: true,

View File

@ -4,11 +4,10 @@ import { resolveUser } from "@/remote/resolve-user.js";
import acceptAllFollowRequests from "@/services/following/requests/accept-all.js";
import { publishToFollowers } from "@/services/i/update.js";
import { publishMainStream } from "@/services/stream.js";
import { DAY } from "backend-rs";
import { stringToAcct, DAY } from "backend-rs";
import { apiLogger } from "@/server/api/logger.js";
import define from "@/server/api/define.js";
import { ApiError } from "@/server/api/error.js";
import { stringToAcct } from "backend-rs";
import { inspect } from "node:util";
export const meta = {

View File

@ -1,6 +1,6 @@
import type { User } from "@/models/entities/user.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { DAY } from "backend-rs";
import { stringToAcct, DAY } from "backend-rs";
import DeliverManager from "@/remote/activitypub/deliver-manager.js";
import { renderActivity } from "@/remote/activitypub/renderer/index.js";
import define from "@/server/api/define.js";
@ -12,7 +12,6 @@ import { getUser } from "@/server/api/common/getters.js";
import { Followings, Users } from "@/models/index.js";
import { config } from "@/config.js";
import { publishMainStream } from "@/services/stream.js";
import { stringToAcct } from "backend-rs";
import { inspect } from "node:util";
export const meta = {

View File

@ -3,10 +3,9 @@ import {
publishMainStream,
publishUserEvent,
} from "@/services/stream.js";
import generateUserToken from "@/server/api/common/generate-native-user-token.js";
import define from "@/server/api/define.js";
import { Users, UserProfiles } from "@/models/index.js";
import { verifyPassword } from "backend-rs";
import { generateUserToken, verifyPassword } from "backend-rs";
export const meta = {
requireCredential: true,

View File

@ -87,6 +87,7 @@ export const paramDef = {
description: { ...Users.descriptionSchema, nullable: true },
location: { ...Users.locationSchema, nullable: true },
birthday: { ...Users.birthdaySchema, nullable: true },
lang: { type: "string", nullable: true },
avatarId: { type: "string", format: "misskey:id", nullable: true },
bannerId: { type: "string", format: "misskey:id", nullable: true },
fields: {
@ -154,6 +155,7 @@ export default define(meta, paramDef, async (ps, _user, token) => {
if (ps.name !== undefined) updates.name = ps.name;
if (ps.description !== undefined) profileUpdates.description = ps.description;
if (typeof ps.lang === "string") profileUpdates.lang = ps.lang;
if (ps.location !== undefined) profileUpdates.location = ps.location;
if (ps.birthday !== undefined) profileUpdates.birthday = ps.birthday;
if (ps.ffVisibility !== undefined)

View File

@ -1,6 +1,6 @@
import define from "@/server/api/define.js";
import { AccessTokens } from "@/models/index.js";
import { genId, secureRndstr } from "backend-rs";
import { genId, generateSecureRandomString } from "backend-rs";
export const meta = {
tags: ["auth"],
@ -43,7 +43,7 @@ export const paramDef = {
export default define(meta, paramDef, async (ps, user) => {
// Generate access token
const accessToken = secureRndstr(32);
const accessToken = generateSecureRandomString(32);
const now = new Date();

View File

@ -18,7 +18,7 @@ import { config } from "@/config.js";
import { noteVisibilities } from "@/types.js";
import { ApiError } from "@/server/api/error.js";
import define from "@/server/api/define.js";
import { HOUR } from "backend-rs";
import { genId, HOUR } from "backend-rs";
import { getNote } from "@/server/api/common/getters.js";
import { Poll } from "@/models/entities/poll.js";
import * as mfm from "mfm-js";
@ -26,7 +26,6 @@ import { concat } from "@/prelude/array.js";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import { extractMentionedUsers } from "@/services/note/create.js";
import { genId } from "backend-rs";
import { publishNoteStream } from "@/services/stream.js";
import DeliverManager from "@/remote/activitypub/deliver-manager.js";
import { renderActivity } from "@/remote/activitypub/renderer/index.js";

View File

@ -1,5 +1,5 @@
import { publishMainStream } from "@/services/stream.js";
import { pushNotification } from "@/services/push-notification.js";
import { sendPushNotification, PushNotificationKind } from "backend-rs";
import { Notifications } from "@/models/index.js";
import define from "@/server/api/define.js";
@ -17,7 +17,7 @@ export const paramDef = {
required: [],
} as const;
export default define(meta, paramDef, async (ps, user) => {
export default define(meta, paramDef, async (_, user) => {
// Update documents
await Notifications.update(
{
@ -31,5 +31,5 @@ export default define(meta, paramDef, async (ps, user) => {
// 全ての通知を読みましたよというイベントを発行
publishMainStream(user.id, "readAllNotifications");
pushNotification(user.id, "readAllNotifications", undefined);
sendPushNotification(user.id, PushNotificationKind.ReadAllNotifications, {});
});

View File

@ -1,9 +1,8 @@
import { Pages, DriveFiles } from "@/models/index.js";
import { genId } from "backend-rs";
import { genId, HOUR } from "backend-rs";
import { Page } from "@/models/entities/page.js";
import define from "@/server/api/define.js";
import { ApiError } from "@/server/api/error.js";
import { HOUR } from "backend-rs";
export const meta = {
tags: ["pages"],

View File

@ -1,7 +1,6 @@
import { IsNull } from "typeorm";
import { Users } from "@/models/index.js";
import { fetchMeta } from "backend-rs";
import { stringToAcct } from "backend-rs";
import { fetchMeta, stringToAcct } from "backend-rs";
import type { User } from "@/models/entities/user.js";
import define from "@/server/api/define.js";

View File

@ -1,10 +1,9 @@
import * as os from "node:os";
import si from "systeminformation";
import define from "@/server/api/define.js";
import { fetchMeta } from "backend-rs";
import { fetchMeta, cpuInfo, memoryUsage, storageUsage } from "backend-rs";
export const meta = {
requireCredential: false,
requireCredential: true,
requireCredentialPrivateMode: true,
allowGet: true,
cacheSec: 30,
@ -18,19 +17,8 @@ export const paramDef = {
} as const;
export default define(meta, paramDef, async () => {
const memStats = await si.mem();
const fsStats = await si.fsSize();
let fsIndex = 0;
// Get the first index of fs sizes that are actualy used.
for (const [i, stat] of fsStats.entries()) {
if (stat.rw === true && stat.used > 0) {
fsIndex = i;
break;
}
}
const instanceMeta = await fetchMeta(true);
if (!instanceMeta.enableServerMachineStats) {
return {
machine: "Not specified",
@ -47,18 +35,19 @@ export default define(meta, paramDef, async () => {
},
};
}
const memory = memoryUsage();
const storage = storageUsage();
return {
machine: os.hostname(),
cpu: {
model: os.cpus()[0].model,
cores: os.cpus().length,
},
cpu: cpuInfo(),
mem: {
total: memStats.total,
total: memory.total,
},
fs: {
total: fsStats[fsIndex].size,
used: fsStats[fsIndex].used,
total: storage?.total ?? 0,
used: storage?.used ?? 0,
},
};
});

View File

@ -1,5 +1,4 @@
import { fetchMeta } from "backend-rs";
import { genId } from "backend-rs";
import { fetchMeta, genId } from "backend-rs";
import { SwSubscriptions } from "@/models/index.js";
import define from "@/server/api/define.js";

View File

@ -1,6 +1,6 @@
import Router from "@koa/router";
import type Router from "@koa/router";
import { getClient } from "../ApiMastodonCompatibleService.js";
import { ParsedUrlQuery } from "querystring";
import type { ParsedUrlQuery } from "node:querystring";
import {
convertAccount,
convertConversation,

View File

@ -16,10 +16,9 @@ import { IsNull } from "typeorm";
import { config, envOption } from "@/config.js";
import Logger from "@/services/logger.js";
import { Users } from "@/models/index.js";
import { fetchMeta } from "backend-rs";
import { fetchMeta, stringToAcct } from "backend-rs";
import { genIdenticon } from "@/misc/gen-identicon.js";
import { createTemp } from "@/misc/create-temp.js";
import { stringToAcct } from "backend-rs";
import megalodon, { type MegalodonInterface } from "megalodon";
import activityPub from "./activitypub.js";
import nodeinfo from "./nodeinfo.js";

View File

@ -1,9 +1,7 @@
import Router from "@koa/router";
import { config } from "@/config.js";
import { fetchMeta } from "backend-rs";
import { Users, Notes } from "@/models/index.js";
import { IsNull, MoreThan } from "typeorm";
import { Cache } from "@/misc/cache.js";
import { nodeinfo_2_0, nodeinfo_2_1 } from "backend-rs";
import { fromRustObject } from "@/prelude/undefined-to-null.js";
const router = new Router();
@ -22,101 +20,14 @@ export const links = [
},
];
const nodeinfo2 = async () => {
const now = Date.now();
const [meta, total, activeHalfyear, activeMonth, localPosts] =
await Promise.all([
fetchMeta(false),
Users.count({ where: { host: IsNull() } }),
Users.count({
where: {
host: IsNull(),
lastActiveDate: MoreThan(new Date(now - 15552000000)),
},
}),
Users.count({
where: {
host: IsNull(),
lastActiveDate: MoreThan(new Date(now - 2592000000)),
},
}),
Notes.count({ where: { userHost: IsNull() } }),
]);
const proxyAccount = meta.proxyAccountId
? await Users.pack(meta.proxyAccountId).catch(() => null)
: null;
return {
software: {
name: "firefish",
version: config.version,
repository: meta.repositoryUrl,
homepage: "https://firefish.dev/firefish/firefish",
},
protocols: ["activitypub"],
services: {
inbound: [] as string[],
outbound: ["atom1.0", "rss2.0"],
},
openRegistrations: !meta.disableRegistration,
usage: {
users: { total, activeHalfyear, activeMonth },
localPosts,
localComments: 0,
},
metadata: {
nodeName: meta.name,
nodeDescription: meta.description,
maintainer: {
name: meta.maintainerName,
email: meta.maintainerEmail,
},
langs: meta.langs,
tosUrl: meta.tosUrl,
repositoryUrl: meta.repositoryUrl,
feedbackUrl: meta.feedbackUrl,
disableRegistration: meta.disableRegistration,
disableLocalTimeline: meta.disableLocalTimeline,
disableRecommendedTimeline: meta.disableRecommendedTimeline,
disableGlobalTimeline: meta.disableGlobalTimeline,
emailRequiredForSignup: meta.emailRequiredForSignup,
postEditing: true,
postImports: meta.experimentalFeatures?.postImports || false,
enableHcaptcha: meta.enableHcaptcha,
enableRecaptcha: meta.enableRecaptcha,
maxNoteTextLength: config.maxNoteLength,
maxCaptionTextLength: config.maxCaptionLength,
enableEmail: meta.enableEmail,
enableServiceWorker: meta.enableServiceWorker,
proxyAccountName: proxyAccount ? proxyAccount.username : null,
themeColor: meta.themeColor || "#31748f",
},
};
};
const cache = new Cache<Awaited<ReturnType<typeof nodeinfo2>>>(
"nodeinfo",
60 * 10,
);
router.get(nodeinfo2_1path, async (ctx) => {
const base = await cache.fetch(null, () => nodeinfo2());
ctx.body = { version: "2.1", ...base };
ctx.set("Cache-Control", "public, max-age=600");
ctx.body = fromRustObject(await nodeinfo_2_1());
ctx.set("Cache-Control", "public, max-age=3600");
});
router.get(nodeinfo2_0path, async (ctx) => {
const base = await cache.fetch(null, () => nodeinfo2());
// @ts-ignore
base.software.repository = undefined;
// @ts-ignore
base.software.homepage = undefined;
ctx.body = { version: "2.0", ...base };
ctx.set("Cache-Control", "public, max-age=600");
ctx.body = fromRustObject(await nodeinfo_2_0());
ctx.set("Cache-Control", "public, max-age=3600");
});
export default router;

View File

@ -149,7 +149,9 @@ router.get<{ Params: { path: string } }>("/emoji/:path(.*)", async (ctx) => {
return;
}
let url = new URL(`${config.mediaProxy || config.url + "/proxy"}/emoji.webp`);
const url = new URL(
`${config.mediaProxy || `${config.url}/proxy`}/emoji.webp`,
);
// || emoji.originalUrl してるのは後方互換性のため
url.searchParams.append("url", emoji.publicUrl || emoji.originalUrl);
url.searchParams.append("emoji", "1");
@ -370,9 +372,8 @@ const getFeed = async (
};
// As the /@user[.json|.rss|.atom]/sub endpoint is complicated, we will use a regex to switch between them.
const reUser = new RegExp(
"^/@(?<user>[^/]+?)(?:.(?<feed>json|rss|atom)(?:\\?[^/]*)?)?(?:/(?<sub>[^/]+))?$",
);
const reUser =
/^\/@(?<user>[^\/]+?)(?:.(?<feed>json|rss|atom)(?:\?[^\/]*)?)?(?:\/(?<sub>[^\/]+))?$/;
router.get(reUser, async (ctx, next) => {
const groups = reUser.exec(ctx.originalUrl)?.groups;
if (!groups) {

Some files were not shown because too many files have changed in this diff Show More