Compare commits

...

36 commits

Author SHA1 Message Date
flamingos-cant a0ad7806cb
Increase alt_text size to 1500 (#4724) 2024-05-17 13:03:19 -04:00
Nutomic 99aac07714
Mark database fields as sensitive so they dont show up in logs (#4720)
* Mark database fields as sensitive so they dont show up in logs

* add file

* fix test

* Update crates/apub/src/objects/person.rs

Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>

* Update crates/apub/src/objects/community.rs

Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>

* Update crates/apub/src/objects/instance.rs

Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>

---------

Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>
Co-authored-by: Dessalines <dessalines@users.noreply.github.com>
2024-05-16 20:41:57 -04:00
Dessalines 1a4aa3eaba
Stop using a diesel_cli docker image, use cargo-install in woodpecker. (#4723)
* Stop using a diesel_cli docker image, use cargo-binstall in woodpecker.

- The diesel_cli image is 500MB, and rebuilt daily. Much easier to use
  binstall to install it.

* Trying out a multiline var.

* Try sequence merges 1

* Try removing features.

* Try path fix.

* Abstracting diesel cli install.

* Try installing mysql and postgres.

* Try installing mysql and postgres 2.

* Try installing mysql and postgres 3.

* Try installing mysql and postgres 4.

* Try installing mysql and postgres 5.

* Try installing mysql and postgres 6.

* Try installing mysql and postgres 7.

* Try installing mysql and postgres 8.

* Try installing mysql and postgres 9.

* Try installing mysql and postgres 10.

* Try installing mysql and postgres 11.

* Try installing mysql and postgres 12.

* Try installing mysql and postgres 13.

* Add logging line.

* Add logging line 2.

* Add logging line 3.

* Add logging line 4.

* Removing binstall.

* Extract taplo into its own image, ignore binstall.

* Use a smaller taplo.

* taplo is the same image.
2024-05-16 16:41:36 -04:00
Nutomic 93c9a5f2b1
Dont federate post locking via Update activity (#4717)
* Dont federate post locking via Update activity

* cleanup

* add missing mod log entries

* update assets
2024-05-15 07:36:00 -04:00
Nutomic 9a9d518153
Fix import blocked objects (#4712)
* Allow importing partial backup (fixes #4672)

* Fetch blocked objects if not known locally (fixes #4669)

* extract helper fn

* add comment

* cleanup

* remove test

* fmt

* remove .ok()
2024-05-14 23:03:43 -04:00
Nutomic 7fb03c502e
Add test to ensure reports are sent to user's home instance (ref #4701) (#4711)
* Add test to ensure reports are sent to user's home instance (ref #4701)

* enable all tests

* set package-manager-strict=false
2024-05-14 22:48:24 -04:00
Nutomic 49bb17b583
Stricter rate limit for login (#4718) 2024-05-14 22:43:43 -04:00
Nutomic 723cb549d4
Allow importing partial backup (fixes #4672) (#4705)
* Allow importing partial backup (fixes #4672)

* Dont throw error on empty LocalUser::update

* fix tests
2024-05-14 22:37:30 -04:00
Nutomic 8b6a4c060e
Make nodeinfo standard compliant, upgrade to nodeinfo 2.1 (fixes #4702) (#4706)
* Always set activitypub protocol in nodeinfo response (fixes #4702)

* Add mandatory fields
2024-05-13 22:53:20 -04:00
Dessalines cb80980027 Version 0.19.4-beta.7 2024-05-11 13:51:09 -04:00
dullbananas c4fc3a8ede
Optimize stuff in attempt to fix high amount of locks, and fix comment_aggregates.child_count (#4696)
* separate triggers

* auto_explain.log_triggers=on

* Revert "auto_explain.log_triggers=on"

This reverts commit 078b2dbb9b.

* Revert "separate triggers"

This reverts commit 95600da4af.

* bring back migration

* re-order statements

* add comment about statement ordering

* no redundant updates

* optimize post_aggregates update in comment trigger

* set comment path in trigger

* update comment_aggregates.child_count using trigger

* move `LEFT JOIN post` to inner query

* clean up newest_comment_time_necro

* add down.sql
2024-05-09 08:18:55 -04:00
Nutomic b4f9ef24a5
Dont exit early when running only scheduled tasks (#4707)
* Dont exit early when running only scheduled tasks (fixes #4709)

* fix
2024-05-08 14:56:44 +02:00
Nutomic 866d752a3c
Instance.preferred_username should be optional (fixes #4701) (#4713) 2024-05-08 08:01:04 -04:00
Nutomic e0b1d0553d
Add timeout for processing incoming activities (#4708)
* Add timeout for processing incoming activities

* move to const
2024-05-08 08:00:55 -04:00
Nutomic 7c146272c3
Federate with wordpress, improvements for NodeBB, Discourse federation (#4692)
* Federate with wordpress

* upgrade apub lib with fix

* Also read post's community from `audience`

* cleanup

* cargo update

* upgrade apub lib

* add wordpress test activity
2024-05-07 16:20:43 -04:00
Nutomic cfdc732d3a
On registration set show_nsfw based on site.content_warning (#4616)
Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>
2024-05-07 16:18:58 -04:00
Tim Coombs 522f974e30
fix: use docker compose v2 (#4622)
* fix: use docker compose v2

* Using sudo tee.

* fix: correct postgres sed command

---------

Co-authored-by: Dessalines <tyhou13@gmx.com>
Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>
2024-05-07 11:41:40 +02:00
SleeplessOne1917 b152be7951
Update rustls (#4690)
* Update rustls

* Format code
2024-05-03 16:06:14 -04:00
SleeplessOne1917 485b0f1a54
Replace unmaintained encoding dep with maintained encoding_rs dep (#4694)
* Replace dependency on unmaintained encoding crate with dep on maintained encoding_rs crate

* Update lockfile

* Taplo format Cargo.toml

* Use better variable name

* Replace into_owned with into
2024-05-03 10:42:48 +00:00
Nutomic 7540b02723
Update activitypub library (#4691)
https://github.com/LemmyNet/activitypub-federation-rust/releases/tag/0.5.5
2024-05-02 12:46:34 -04:00
Nutomic 7746db4169
Testing and minor fix for federation with Discourse (#4628)
* Testing and minor fix for federation with Discourse

* prettier
2024-05-02 07:49:19 -04:00
Dessalines db2ce81fc4
Show trigger logging. #4681 (#4688) 2024-05-01 18:46:14 -04:00
renovate[bot] 4175a1af80
chore(deps): update rust crate serde_with to 3.8.1 (#4687)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-30 23:47:02 -04:00
renovate[bot] 563280456e
chore(deps): update pnpm to v9.0.6 (#4682)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-30 23:43:14 -04:00
Dessalines 2fecb7ecdf
Dont show own comments for liked and disliked_only. Fixes #4675 (#4679)
Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>
2024-04-30 23:26:55 -04:00
renovate[bot] 2c6f9c7fd5
chore(deps): update rust crate serde to 1.0.199 (#4684)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-30 23:17:23 -04:00
renovate[bot] e338e59868
fix(deps): update rust crate lettre to 0.11.7 (#4685)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-30 22:54:16 -04:00
renovate[bot] b0caa85ed4
chore(deps): update rust crate base64 to 0.22.1 (#4683)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-30 22:30:19 -04:00
Dessalines ad60d91f5c
Dont publish lemmy_db_perf to fix crates.io publish. Fixes #4678 (#4680) 2024-04-30 17:51:12 +00:00
Dessalines 6423d2dde5 Version 0.19.4-beta.6 2024-04-30 06:38:44 -04:00
Nutomic 12163701e7
Avoid crash when handling urls without domain (#4676)
* Avoid crash when handling urls without domain

* Add some extra checks
2024-04-30 06:33:37 -04:00
Dessalines 5c35e97a75
Dont show deleted / removed posts when searching. Fixes #4576 (#4671)
* Dont show deleted / removed posts when searching. Fixes #4576

* Address PR comments.

* Clean up comment removed also.

---------

Co-authored-by: SleeplessOne1917 <28871516+SleeplessOne1917@users.noreply.github.com>
2024-04-30 12:24:18 +02:00
SleeplessOne1917 b05f221565
Remove login step from publish to crates.io (#4674) 2024-04-30 00:07:03 +00:00
Nutomic beec080274
Testing for federation with NodeBB, make community.followers_url optional (#4629)
* Testing for federation with NodeBB, make community.followers_url optional

* clippy
2024-04-29 12:34:11 +02:00
Dessalines 492d8f1b01
Fix communities with broken outboxes, and use PostView. Fixes #4658 (#4668)
* Fix communities with broken outboxes, and use PostView. Fixes #4658

* Fixing tests.

* Dont pass ref and clone.
2024-04-29 12:22:00 +02:00
dullbananas d3737d4453
Optimize actor_language.rs (#4612)
* Remove useless transaction in actor_language.rs

* Update actor_language.rs

* site

* community

* Update actor_language.rs

* Update actor_language.rs

* Update actor_language.rs

* Update actor_language.rs

* Update actor_language.rs
2024-04-27 10:59:58 -04:00
90 changed files with 1931 additions and 1160 deletions

View file

@ -3,6 +3,7 @@
variables:
- &rust_image "rust:1.77"
- &rust_nightly_image "rustlang/rust:nightly"
- &install_pnpm "corepack enable pnpm"
- &slow_check_paths
- event: pull_request
@ -24,15 +25,17 @@ variables:
"diesel.toml",
".gitmodules",
]
# Broken for cron jobs currently, see
# https://github.com/woodpecker-ci/woodpecker/issues/1716
# clone:
# git:
# image: woodpeckerci/plugin-git
# settings:
# recursive: true
# submodule_update_remote: true
- install_binstall: &install_binstall
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
- cp cargo-binstall /usr/local/cargo/bin
- install_diesel_cli: &install_diesel_cli
- apt update && apt install -y lsb-release build-essential
- sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
- wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
- apt update && apt install -y postgresql-client-16
- cargo install diesel_cli --no-default-features --features postgres
- export PATH="$CARGO_HOME/bin:$PATH"
steps:
prepare_repo:
@ -66,7 +69,7 @@ steps:
- event: pull_request
cargo_fmt:
image: rustlang/rust:nightly
image: *rust_nightly_image
environment:
# store cargo data in repo folder so that it gets cached between steps
CARGO_HOME: .cargo_home
@ -77,11 +80,9 @@ steps:
- event: pull_request
cargo_machete:
image: rustlang/rust:nightly
image: *rust_nightly_image
commands:
- wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz
- tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz
- cp cargo-binstall /usr/local/cargo/bin
- <<: *install_binstall
- cargo binstall -y cargo-machete
- cargo machete
when:
@ -133,11 +134,12 @@ steps:
when: *slow_check_paths
check_diesel_schema:
image: willsquire/diesel-cli
image: *rust_image
environment:
CARGO_HOME: .cargo_home
DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
commands:
- <<: *install_diesel_cli
- diesel migration run
- diesel print-schema --config-file=diesel.toml > tmp.schema
- diff tmp.schema crates/db_schema/src/schema.rs
@ -197,8 +199,8 @@ steps:
PGHOST: database
PGDATABASE: lemmy
commands:
- cargo install diesel_cli
- export PATH="$CARGO_HOME/bin:$PATH"
# Install diesel_cli
- <<: *install_diesel_cli
# Run all migrations
- diesel migration run
# Dump schema to before.sqldump (PostgreSQL apt repo is used to prevent pg_dump version mismatch error)
@ -276,10 +278,11 @@ steps:
publish_to_crates_io:
image: *rust_image
commands:
- cargo install cargo-workspaces
- <<: *install_binstall
# Install cargo-workspaces
- cargo binstall -y cargo-workspaces
- cp -r migrations crates/db_schema/
- cargo login "$CARGO_API_TOKEN"
- cargo workspaces publish --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
- cargo workspaces publish --token "$CARGO_API_TOKEN" --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}"
secrets: [cargo_api_token]
when:
- event: tag

656
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
[workspace.package]
version = "0.19.4-beta.5"
version = "0.19.4-beta.7"
edition = "2021"
description = "A link aggregator for the fediverse"
license = "AGPL-3.0"
@ -88,25 +88,25 @@ unused_self = "deny"
unwrap_used = "deny"
[workspace.dependencies]
lemmy_api = { version = "=0.19.4-beta.5", path = "./crates/api" }
lemmy_api_crud = { version = "=0.19.4-beta.5", path = "./crates/api_crud" }
lemmy_apub = { version = "=0.19.4-beta.5", path = "./crates/apub" }
lemmy_utils = { version = "=0.19.4-beta.5", path = "./crates/utils", default-features = false }
lemmy_db_schema = { version = "=0.19.4-beta.5", path = "./crates/db_schema" }
lemmy_api_common = { version = "=0.19.4-beta.5", path = "./crates/api_common" }
lemmy_routes = { version = "=0.19.4-beta.5", path = "./crates/routes" }
lemmy_db_views = { version = "=0.19.4-beta.5", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.4-beta.5", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.4-beta.5", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.4-beta.5", path = "./crates/federate" }
activitypub_federation = { version = "0.5.4", default-features = false, features = [
lemmy_api = { version = "=0.19.4-beta.7", path = "./crates/api" }
lemmy_api_crud = { version = "=0.19.4-beta.7", path = "./crates/api_crud" }
lemmy_apub = { version = "=0.19.4-beta.7", path = "./crates/apub" }
lemmy_utils = { version = "=0.19.4-beta.7", path = "./crates/utils", default-features = false }
lemmy_db_schema = { version = "=0.19.4-beta.7", path = "./crates/db_schema" }
lemmy_api_common = { version = "=0.19.4-beta.7", path = "./crates/api_common" }
lemmy_routes = { version = "=0.19.4-beta.7", path = "./crates/routes" }
lemmy_db_views = { version = "=0.19.4-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.4-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.4-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.4-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.5.6", default-features = false, features = [
"actix-web",
] }
diesel = "2.1.6"
diesel_migrations = "2.1.0"
diesel-async = "0.4.1"
serde = { version = "1.0.198", features = ["derive"] }
serde_with = "3.7.0"
serde = { version = "1.0.199", features = ["derive"] }
serde_with = "3.8.1"
actix-web = { version = "4.5.1", default-features = false, features = [
"macros",
"rustls",
@ -129,7 +129,7 @@ doku = { version = "0.21.1", features = ["url-2"] }
bcrypt = "0.15.1"
chrono = { version = "0.4.38", features = ["serde"], default-features = false }
serde_json = { version = "1.0.116", features = ["preserve_order"] }
base64 = "0.22.0"
base64 = "0.22.1"
uuid = { version = "1.8.0", features = ["serde", "v4"] }
async-trait = "0.1.80"
captcha = "0.0.9"
@ -157,10 +157,10 @@ ts-rs = { version = "7.1.1", features = [
"chrono-impl",
"no-serde-warnings",
] }
rustls = { version = "0.21.11", features = ["dangerous_configuration"] }
rustls = { version = "0.23.5", features = ["ring"] }
futures-util = "0.3.30"
tokio-postgres = "0.7.10"
tokio-postgres-rustls = "0.10.0"
tokio-postgres-rustls = "0.12.0"
urlencoding = "2.1.3"
enum-map = "2.7"
moka = { version = "0.12.7", features = ["future"] }

1
api_tests/.npmrc Normal file
View file

@ -0,0 +1 @@
package-manager-strict=false

View file

@ -6,7 +6,7 @@
"repository": "https://github.com/LemmyNet/lemmy",
"author": "Dessalines",
"license": "AGPL-3.0",
"packageManager": "pnpm@9.0.4",
"packageManager": "pnpm@9.0.6",
"scripts": {
"lint": "tsc --noEmit && eslint --report-unused-disable-directives --ext .js,.ts,.tsx src && prettier --check 'src/**/*.ts'",
"fix": "prettier --write src && eslint --fix src",

View file

@ -13,13 +13,13 @@ importers:
version: 29.5.12
'@types/node':
specifier: ^20.12.4
version: 20.12.7
version: 20.12.4
'@typescript-eslint/eslint-plugin':
specifier: ^7.5.0
version: 7.5.0(@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5)
version: 7.5.0(@typescript-eslint/parser@7.5.0)(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/parser':
specifier: ^7.5.0
version: 7.5.0(eslint@8.57.0)(typescript@5.4.5)
version: 7.5.0(eslint@8.57.0)(typescript@5.4.4)
download-file-sync:
specifier: ^1.0.4
version: 1.0.4
@ -31,7 +31,7 @@ importers:
version: 5.1.3(eslint@8.57.0)(prettier@3.2.5)
jest:
specifier: ^29.5.0
version: 29.7.0(@types/node@20.12.7)
version: 29.7.0(@types/node@20.12.4)
lemmy-js-client:
specifier: 0.19.4-alpha.18
version: 0.19.4-alpha.18
@ -40,10 +40,10 @@ importers:
version: 3.2.5
ts-jest:
specifier: ^29.1.0
version: 29.1.2(@babel/core@7.23.9)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@20.12.7))(typescript@5.4.5)
version: 29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.4)
typescript:
specifier: ^5.4.4
version: 5.4.5
version: 5.4.4
packages:
@ -398,8 +398,8 @@ packages:
'@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/node@20.12.7':
resolution: {integrity: sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==}
'@types/node@20.12.4':
resolution: {integrity: sha512-E+Fa9z3wSQpzgYQdYmme5X3OTuejnnTx88A6p6vkkJosR3KBz+HpE3kqNm98VE6cfLFcISx7zW7MsJkH6KwbTw==}
'@types/semver@7.5.8':
resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==}
@ -1539,8 +1539,8 @@ packages:
resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==}
engines: {node: '>=10'}
typescript@5.4.5:
resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==}
typescript@5.4.4:
resolution: {integrity: sha512-dGE2Vv8cpVvw28v8HCPqyb08EzbBURxDpuhJvTrusShUfGnhHBafDsLdS1EhhxyL6BJQE+2cT3dDPAv+MQ6oLw==}
engines: {node: '>=14.17'}
hasBin: true
@ -1857,7 +1857,7 @@ snapshots:
'@jest/console@29.7.0':
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
jest-message-util: 29.7.0
jest-util: 29.7.0
@ -1870,14 +1870,14 @@ snapshots:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
ansi-escapes: 4.3.2
chalk: 4.1.2
ci-info: 3.9.0
exit: 0.1.2
graceful-fs: 4.2.11
jest-changed-files: 29.7.0
jest-config: 29.7.0(@types/node@20.12.7)
jest-config: 29.7.0(@types/node@20.12.4)
jest-haste-map: 29.7.0
jest-message-util: 29.7.0
jest-regex-util: 29.6.3
@ -1902,7 +1902,7 @@ snapshots:
dependencies:
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
jest-mock: 29.7.0
'@jest/expect-utils@29.7.0':
@ -1920,7 +1920,7 @@ snapshots:
dependencies:
'@jest/types': 29.6.3
'@sinonjs/fake-timers': 10.3.0
'@types/node': 20.12.7
'@types/node': 20.12.4
jest-message-util: 29.7.0
jest-mock: 29.7.0
jest-util: 29.7.0
@ -1942,7 +1942,7 @@ snapshots:
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.22
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
collect-v8-coverage: 1.0.2
exit: 0.1.2
@ -2012,7 +2012,7 @@ snapshots:
'@jest/schemas': 29.6.3
'@types/istanbul-lib-coverage': 2.0.6
'@types/istanbul-reports': 3.0.4
'@types/node': 20.12.7
'@types/node': 20.12.4
'@types/yargs': 17.0.32
chalk: 4.1.2
@ -2080,7 +2080,7 @@ snapshots:
'@types/graceful-fs@4.1.9':
dependencies:
'@types/node': 20.12.7
'@types/node': 20.12.4
'@types/istanbul-lib-coverage@2.0.6': {}
@ -2099,7 +2099,7 @@ snapshots:
'@types/json-schema@7.0.15': {}
'@types/node@20.12.7':
'@types/node@20.12.4':
dependencies:
undici-types: 5.26.5
@ -2113,13 +2113,13 @@ snapshots:
dependencies:
'@types/yargs-parser': 21.0.3
'@typescript-eslint/eslint-plugin@7.5.0(@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5)':
'@typescript-eslint/eslint-plugin@7.5.0(@typescript-eslint/parser@7.5.0)(eslint@8.57.0)(typescript@5.4.4)':
dependencies:
'@eslint-community/regexpp': 4.10.0
'@typescript-eslint/parser': 7.5.0(eslint@8.57.0)(typescript@5.4.5)
'@typescript-eslint/parser': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/type-utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5)
'@typescript-eslint/type-utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/visitor-keys': 7.5.0
debug: 4.3.4
eslint: 8.57.0
@ -2127,22 +2127,20 @@ snapshots:
ignore: 5.3.1
natural-compare: 1.4.0
semver: 7.6.0
ts-api-utils: 1.3.0(typescript@5.4.5)
optionalDependencies:
typescript: 5.4.5
ts-api-utils: 1.3.0(typescript@5.4.4)
typescript: 5.4.4
transitivePeerDependencies:
- supports-color
'@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5)':
'@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies:
'@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/types': 7.5.0
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5)
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
'@typescript-eslint/visitor-keys': 7.5.0
debug: 4.3.4
eslint: 8.57.0
optionalDependencies:
typescript: 5.4.5
typescript: 5.4.4
transitivePeerDependencies:
- supports-color
@ -2151,21 +2149,20 @@ snapshots:
'@typescript-eslint/types': 7.5.0
'@typescript-eslint/visitor-keys': 7.5.0
'@typescript-eslint/type-utils@7.5.0(eslint@8.57.0)(typescript@5.4.5)':
'@typescript-eslint/type-utils@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies:
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5)
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
debug: 4.3.4
eslint: 8.57.0
ts-api-utils: 1.3.0(typescript@5.4.5)
optionalDependencies:
typescript: 5.4.5
ts-api-utils: 1.3.0(typescript@5.4.4)
typescript: 5.4.4
transitivePeerDependencies:
- supports-color
'@typescript-eslint/types@7.5.0': {}
'@typescript-eslint/typescript-estree@7.5.0(typescript@5.4.5)':
'@typescript-eslint/typescript-estree@7.5.0(typescript@5.4.4)':
dependencies:
'@typescript-eslint/types': 7.5.0
'@typescript-eslint/visitor-keys': 7.5.0
@ -2174,20 +2171,19 @@ snapshots:
is-glob: 4.0.3
minimatch: 9.0.3
semver: 7.6.0
ts-api-utils: 1.3.0(typescript@5.4.5)
optionalDependencies:
typescript: 5.4.5
ts-api-utils: 1.3.0(typescript@5.4.4)
typescript: 5.4.4
transitivePeerDependencies:
- supports-color
'@typescript-eslint/utils@7.5.0(eslint@8.57.0)(typescript@5.4.5)':
'@typescript-eslint/utils@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies:
'@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0)
'@types/json-schema': 7.0.15
'@types/semver': 7.5.8
'@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/types': 7.5.0
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5)
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
eslint: 8.57.0
semver: 7.6.0
transitivePeerDependencies:
@ -2378,13 +2374,13 @@ snapshots:
convert-source-map@2.0.0: {}
create-jest@29.7.0(@types/node@20.12.7):
create-jest@29.7.0(@types/node@20.12.4):
dependencies:
'@jest/types': 29.6.3
chalk: 4.1.2
exit: 0.1.2
graceful-fs: 4.2.11
jest-config: 29.7.0(@types/node@20.12.7)
jest-config: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0
prompts: 2.4.2
transitivePeerDependencies:
@ -2751,7 +2747,7 @@ snapshots:
'@jest/expect': 29.7.0
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
co: 4.6.0
dedent: 1.5.1
@ -2771,16 +2767,16 @@ snapshots:
- babel-plugin-macros
- supports-color
jest-cli@29.7.0(@types/node@20.12.7):
jest-cli@29.7.0(@types/node@20.12.4):
dependencies:
'@jest/core': 29.7.0
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
chalk: 4.1.2
create-jest: 29.7.0(@types/node@20.12.7)
create-jest: 29.7.0(@types/node@20.12.4)
exit: 0.1.2
import-local: 3.1.0
jest-config: 29.7.0(@types/node@20.12.7)
jest-config: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0
jest-validate: 29.7.0
yargs: 17.7.2
@ -2790,11 +2786,12 @@ snapshots:
- supports-color
- ts-node
jest-config@29.7.0(@types/node@20.12.7):
jest-config@29.7.0(@types/node@20.12.4):
dependencies:
'@babel/core': 7.23.9
'@jest/test-sequencer': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.4
babel-jest: 29.7.0(@babel/core@7.23.9)
chalk: 4.1.2
ci-info: 3.9.0
@ -2814,8 +2811,6 @@ snapshots:
pretty-format: 29.7.0
slash: 3.0.0
strip-json-comments: 3.1.1
optionalDependencies:
'@types/node': 20.12.7
transitivePeerDependencies:
- babel-plugin-macros
- supports-color
@ -2844,7 +2839,7 @@ snapshots:
'@jest/environment': 29.7.0
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
jest-mock: 29.7.0
jest-util: 29.7.0
@ -2854,7 +2849,7 @@ snapshots:
dependencies:
'@jest/types': 29.6.3
'@types/graceful-fs': 4.1.9
'@types/node': 20.12.7
'@types/node': 20.12.4
anymatch: 3.1.3
fb-watchman: 2.0.2
graceful-fs: 4.2.11
@ -2893,11 +2888,11 @@ snapshots:
jest-mock@29.7.0:
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
jest-util: 29.7.0
jest-pnp-resolver@1.2.3(jest-resolve@29.7.0):
optionalDependencies:
dependencies:
jest-resolve: 29.7.0
jest-regex-util@29.6.3: {}
@ -2928,7 +2923,7 @@ snapshots:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
emittery: 0.13.1
graceful-fs: 4.2.11
@ -2956,7 +2951,7 @@ snapshots:
'@jest/test-result': 29.7.0
'@jest/transform': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
cjs-module-lexer: 1.2.3
collect-v8-coverage: 1.0.2
@ -3002,7 +2997,7 @@ snapshots:
jest-util@29.7.0:
dependencies:
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
chalk: 4.1.2
ci-info: 3.9.0
graceful-fs: 4.2.11
@ -3021,7 +3016,7 @@ snapshots:
dependencies:
'@jest/test-result': 29.7.0
'@jest/types': 29.6.3
'@types/node': 20.12.7
'@types/node': 20.12.4
ansi-escapes: 4.3.2
chalk: 4.1.2
emittery: 0.13.1
@ -3030,17 +3025,17 @@ snapshots:
jest-worker@29.7.0:
dependencies:
'@types/node': 20.12.7
'@types/node': 20.12.4
jest-util: 29.7.0
merge-stream: 2.0.0
supports-color: 8.1.1
jest@29.7.0(@types/node@20.12.7):
jest@29.7.0(@types/node@20.12.4):
dependencies:
'@jest/core': 29.7.0
'@jest/types': 29.6.3
import-local: 3.1.0
jest-cli: 29.7.0(@types/node@20.12.7)
jest-cli: 29.7.0(@types/node@20.12.4)
transitivePeerDependencies:
- '@types/node'
- babel-plugin-macros
@ -3362,26 +3357,23 @@ snapshots:
dependencies:
is-number: 7.0.0
ts-api-utils@1.3.0(typescript@5.4.5):
ts-api-utils@1.3.0(typescript@5.4.4):
dependencies:
typescript: 5.4.5
typescript: 5.4.4
ts-jest@29.1.2(@babel/core@7.23.9)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@20.12.7))(typescript@5.4.5):
ts-jest@29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.4):
dependencies:
'@babel/core': 7.23.9
bs-logger: 0.2.6
fast-json-stable-stringify: 2.1.0
jest: 29.7.0(@types/node@20.12.7)
jest: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0
json5: 2.2.3
lodash.memoize: 4.1.2
make-error: 1.3.6
semver: 7.5.4
typescript: 5.4.5
typescript: 5.4.4
yargs-parser: 21.1.1
optionalDependencies:
'@babel/core': 7.23.9
'@jest/types': 29.6.3
babel-jest: 29.7.0(@babel/core@7.23.9)
tslib@2.6.2: {}
@ -3395,7 +3387,7 @@ snapshots:
type-fest@0.21.3: {}
typescript@5.4.5: {}
typescript@5.4.4: {}
undici-types@5.26.5: {}

View file

@ -661,40 +661,60 @@ test("A and G subscribe to B (center) A posts, it gets announced to G", async ()
});
test("Report a post", async () => {
// Note, this is a different one from the setup
let betaCommunity = (await resolveBetaCommunity(beta)).community;
if (!betaCommunity) {
throw "Missing beta community";
}
// Create post from alpha
let alphaCommunity = (await resolveBetaCommunity(alpha)).community!;
await followBeta(alpha);
let postRes = await createPost(beta, betaCommunity.community.id);
let postRes = await createPost(alpha, alphaCommunity.community.id);
expect(postRes.post_view.post).toBeDefined();
let alphaPost = (await resolvePost(alpha, postRes.post_view.post)).post;
if (!alphaPost) {
throw "Missing alpha post";
}
let alphaReport = (
await reportPost(alpha, alphaPost.post.id, randomString(10))
).post_report_view.post_report;
// Send report from gamma
let gammaPost = (await resolvePost(gamma, alphaPost.post)).post!;
let gammaReport = (
await reportPost(gamma, gammaPost.post.id, randomString(10))
).post_report_view.post_report;
expect(gammaReport).toBeDefined();
// Report was federated to community instance
let betaReport = (await waitUntil(
() =>
listPostReports(beta).then(p =>
p.post_reports.find(
r =>
r.post_report.original_post_name === alphaReport.original_post_name,
r.post_report.original_post_name === gammaReport.original_post_name,
),
),
res => !!res,
))!.post_report;
expect(betaReport).toBeDefined();
expect(betaReport.resolved).toBe(false);
expect(betaReport.original_post_name).toBe(alphaReport.original_post_name);
expect(betaReport.original_post_url).toBe(alphaReport.original_post_url);
expect(betaReport.original_post_body).toBe(alphaReport.original_post_body);
expect(betaReport.reason).toBe(alphaReport.reason);
expect(betaReport.original_post_name).toBe(gammaReport.original_post_name);
//expect(betaReport.original_post_url).toBe(gammaReport.original_post_url);
expect(betaReport.original_post_body).toBe(gammaReport.original_post_body);
expect(betaReport.reason).toBe(gammaReport.reason);
await unfollowRemotes(alpha);
// Report was federated to poster's instance
let alphaReport = (await waitUntil(
() =>
listPostReports(alpha).then(p =>
p.post_reports.find(
r =>
r.post_report.original_post_name === gammaReport.original_post_name,
),
),
res => !!res,
))!.post_report;
expect(alphaReport).toBeDefined();
expect(alphaReport.resolved).toBe(false);
expect(alphaReport.original_post_name).toBe(gammaReport.original_post_name);
//expect(alphaReport.original_post_url).toBe(gammaReport.original_post_url);
expect(alphaReport.original_post_body).toBe(gammaReport.original_post_body);
expect(alphaReport.reason).toBe(gammaReport.reason);
});
test("Fetch post via redirect", async () => {

View file

@ -29,7 +29,7 @@ pub async fn add_admin(
.await?
.ok_or(LemmyErrorType::ObjectNotLocal)?;
let added_admin = LocalUser::update(
LocalUser::update(
&mut context.pool(),
added_local_user.local_user.id,
&LocalUserUpdateForm {
@ -43,7 +43,7 @@ pub async fn add_admin(
// Mod tables
let form = ModAddForm {
mod_person_id: local_user_view.person.id,
other_person_id: added_admin.person_id,
other_person_id: added_local_user.person.id,
removed: Some(!data.added),
};

View file

@ -1,11 +1,7 @@
use crate::{build_totp_2fa, generate_totp_2fa_secret};
use activitypub_federation::config::Data;
use actix_web::web::Json;
use lemmy_api_common::{
context::LemmyContext,
person::GenerateTotpSecretResponse,
sensitive::Sensitive,
};
use lemmy_api_common::{context::LemmyContext, person::GenerateTotpSecretResponse};
use lemmy_db_schema::source::local_user::{LocalUser, LocalUserUpdateForm};
use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
@ -41,6 +37,6 @@ pub async fn generate_totp_secret(
.await?;
Ok(Json(GenerateTotpSecretResponse {
totp_secret_url: Sensitive::new(secret_url),
totp_secret_url: secret_url.into(),
}))
}

View file

@ -28,6 +28,7 @@ use lemmy_utils::{
error::{LemmyErrorType, LemmyResult},
utils::validation::{is_valid_bio_field, is_valid_display_name, is_valid_matrix_id},
};
use std::ops::Deref;
#[tracing::instrument(skip(context))]
pub async fn save_user_settings(
@ -57,7 +58,7 @@ pub async fn save_user_settings(
if let Some(Some(email)) = &email {
let previous_email = local_user_view.local_user.email.clone().unwrap_or_default();
// if email was changed, check that it is not taken and send verification mail
if &previous_email != email {
if previous_email.deref() != email {
if LocalUser::is_email_taken(&mut context.pool(), email).await? {
return Err(LemmyErrorType::EmailAlreadyExists)?;
}
@ -141,11 +142,7 @@ pub async fn save_user_settings(
..Default::default()
};
// Ignore errors, because 'no fields updated' will return an error.
// https://github.com/LemmyNet/lemmy/issues/4076
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form)
.await
.ok();
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form).await?;
// Update the vote display modes
let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm {

View file

@ -9,12 +9,10 @@ use lemmy_db_schema::{
source::{
email_verification::EmailVerification,
local_user::{LocalUser, LocalUserUpdateForm},
person::Person,
},
traits::Crud,
RegistrationMode,
};
use lemmy_db_views::structs::SiteView;
use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
pub async fn verify_email(
@ -38,7 +36,7 @@ pub async fn verify_email(
};
let local_user_id = verification.local_user_id;
let local_user = LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
@ -46,12 +44,16 @@ pub async fn verify_email(
if site_view.local_site.registration_mode == RegistrationMode::RequireApplication
&& site_view.local_site.application_email_admins
{
let person = Person::read(&mut context.pool(), local_user.person_id)
let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
.await?
.ok_or(LemmyErrorType::CouldntFindPerson)?;
send_new_applicant_email_to_admins(&person.name, &mut context.pool(), context.settings())
.await?;
send_new_applicant_email_to_admins(
&local_user.person.name,
&mut context.pool(),
context.settings(),
)
.await?;
}
Ok(Json(SuccessResponse::default()))

View file

@ -25,7 +25,7 @@ full = [
"lemmy_db_views_moderator/full",
"lemmy_utils/full",
"activitypub_federation",
"encoding",
"encoding_rs",
"reqwest-middleware",
"webpage",
"ts-rs",
@ -69,7 +69,7 @@ mime = { version = "0.3.17", optional = true }
webpage = { version = "1.6", default-features = false, features = [
"serde",
], optional = true }
encoding = { version = "0.2.33", optional = true }
encoding_rs = { version = "0.8.34", optional = true }
jsonwebtoken = { version = "8.3.0", optional = true }
# necessary for wasmt compilation
getrandom = { version = "0.2.14", features = ["js"] }

View file

@ -1,9 +1,10 @@
use crate::{context::LemmyContext, sensitive::Sensitive};
use crate::context::LemmyContext;
use actix_web::{http::header::USER_AGENT, HttpRequest};
use chrono::Utc;
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use lemmy_db_schema::{
newtypes::LocalUserId,
sensitive::SensitiveString,
source::login_token::{LoginToken, LoginTokenCreateForm},
};
use lemmy_utils::error::{LemmyErrorExt, LemmyErrorType, LemmyResult};
@ -40,7 +41,7 @@ impl Claims {
user_id: LocalUserId,
req: HttpRequest,
context: &LemmyContext,
) -> LemmyResult<Sensitive<String>> {
) -> LemmyResult<SensitiveString> {
let hostname = context.settings().hostname.clone();
let my_claims = Claims {
sub: user_id.0.to_string(),
@ -50,7 +51,7 @@ impl Claims {
let secret = &context.secret().jwt_secret;
let key = EncodingKey::from_secret(secret.as_ref());
let token = encode(&Header::default(), &my_claims, &key)?;
let token: SensitiveString = encode(&Header::default(), &my_claims, &key)?.into();
let ip = req
.connection_info()
.realip_remote_addr()
@ -67,7 +68,7 @@ impl Claims {
user_agent,
};
LoginToken::create(&mut context.pool(), form).await?;
Ok(Sensitive::new(token))
Ok(token)
}
}

View file

@ -64,7 +64,7 @@ impl LemmyContext {
let client = ClientBuilder::new(client).build();
let secret = Secret {
id: 0,
jwt_secret: String::new(),
jwt_secret: String::new().into(),
};
let rate_limit_cell = RateLimitCell::with_test_config();

View file

@ -14,7 +14,6 @@ pub mod private_message;
pub mod request;
#[cfg(feature = "full")]
pub mod send_activity;
pub mod sensitive;
pub mod site;
#[cfg(feature = "full")]
pub mod utils;

View file

@ -1,6 +1,6 @@
use crate::sensitive::Sensitive;
use lemmy_db_schema::{
newtypes::{CommentReplyId, CommunityId, LanguageId, PersonId, PersonMentionId},
sensitive::SensitiveString,
source::site::Site,
CommentSortType,
ListingType,
@ -25,8 +25,8 @@ use ts_rs::TS;
#[cfg_attr(feature = "full", ts(export))]
/// Logging into lemmy.
pub struct Login {
pub username_or_email: Sensitive<String>,
pub password: Sensitive<String>,
pub username_or_email: SensitiveString,
pub password: SensitiveString,
/// May be required, if totp is enabled for their account.
pub totp_2fa_token: Option<String>,
}
@ -38,11 +38,11 @@ pub struct Login {
/// Register / Sign up to lemmy.
pub struct Register {
pub username: String,
pub password: Sensitive<String>,
pub password_verify: Sensitive<String>,
pub show_nsfw: bool,
pub password: SensitiveString,
pub password_verify: SensitiveString,
pub show_nsfw: Option<bool>,
/// email is mandatory if email verification is enabled on the server
pub email: Option<Sensitive<String>>,
pub email: Option<SensitiveString>,
/// The UUID of the captcha item.
pub captcha_uuid: Option<String>,
/// Your captcha answer.
@ -99,7 +99,7 @@ pub struct SaveUserSettings {
/// Your display name, which can contain strange characters, and does not need to be unique.
pub display_name: Option<String>,
/// Your email.
pub email: Option<Sensitive<String>>,
pub email: Option<SensitiveString>,
/// Your bio / info, in markdown.
pub bio: Option<String>,
/// Your matrix user id. Ex: @my_user:matrix.org
@ -140,9 +140,9 @@ pub struct SaveUserSettings {
#[cfg_attr(feature = "full", ts(export))]
/// Changes your account password.
pub struct ChangePassword {
pub new_password: Sensitive<String>,
pub new_password_verify: Sensitive<String>,
pub old_password: Sensitive<String>,
pub new_password: SensitiveString,
pub new_password_verify: SensitiveString,
pub old_password: SensitiveString,
}
#[skip_serializing_none]
@ -152,7 +152,7 @@ pub struct ChangePassword {
/// A response for your login.
pub struct LoginResponse {
/// This is None in response to `Register` if email verification is enabled, or the server requires registration applications.
pub jwt: Option<Sensitive<String>>,
pub jwt: Option<SensitiveString>,
/// If registration applications are required, this will return true for a signup response.
pub registration_created: bool,
/// If email verifications are required, this will return true for a signup response.
@ -340,7 +340,7 @@ pub struct CommentReplyResponse {
#[cfg_attr(feature = "full", ts(export))]
/// Delete your account.
pub struct DeleteAccount {
pub password: Sensitive<String>,
pub password: SensitiveString,
pub delete_content: bool,
}
@ -349,7 +349,7 @@ pub struct DeleteAccount {
#[cfg_attr(feature = "full", ts(export))]
/// Reset your password via email.
pub struct PasswordReset {
pub email: Sensitive<String>,
pub email: SensitiveString,
}
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
@ -357,9 +357,9 @@ pub struct PasswordReset {
#[cfg_attr(feature = "full", ts(export))]
/// Change your password after receiving a reset request.
pub struct PasswordChangeAfterReset {
pub token: Sensitive<String>,
pub password: Sensitive<String>,
pub password_verify: Sensitive<String>,
pub token: SensitiveString,
pub password: SensitiveString,
pub password_verify: SensitiveString,
}
#[skip_serializing_none]
@ -405,7 +405,7 @@ pub struct VerifyEmail {
#[cfg_attr(feature = "full", derive(TS))]
#[cfg_attr(feature = "full", ts(export))]
pub struct GenerateTotpSecretResponse {
pub totp_secret_url: Sensitive<String>,
pub totp_secret_url: SensitiveString,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]

View file

@ -6,7 +6,7 @@ use crate::{
utils::{local_site_opt_to_sensitive, proxy_image_link, proxy_image_link_opt_apub},
};
use activitypub_federation::config::Data;
use encoding::{all::encodings, DecoderTrap};
use encoding_rs::{Encoding, UTF_8};
use lemmy_db_schema::{
newtypes::DbUrl,
source::{
@ -160,11 +160,9 @@ fn extract_opengraph_data(html_bytes: &[u8], url: &Url) -> LemmyResult<OpenGraph
// proper encoding. If the specified encoding cannot be found, fall back to the original UTF-8
// version.
if let Some(charset) = page.meta.get("charset") {
if charset.to_lowercase() != "utf-8" {
if let Some(encoding_ref) = encodings().iter().find(|e| e.name() == charset) {
if let Ok(html_with_encoding) = encoding_ref.decode(html_bytes, DecoderTrap::Replace) {
page = HTML::from_string(html_with_encoding, None)?;
}
if charset != UTF_8.name() {
if let Some(encoding) = Encoding::for_label(charset.as_bytes()) {
page = HTML::from_string(encoding.decode(html_bytes).0.into(), None)?;
}
}
}

View file

@ -1,116 +0,0 @@
use serde::{Deserialize, Serialize};
use std::{
borrow::Borrow,
ops::{Deref, DerefMut},
};
#[cfg(feature = "full")]
use ts_rs::TS;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
#[serde(transparent)]
pub struct Sensitive<T>(T);
impl<T> Sensitive<T> {
pub fn new(item: T) -> Self {
Sensitive(item)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> std::fmt::Debug for Sensitive<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Sensitive").finish()
}
}
impl<T> AsRef<T> for Sensitive<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl AsRef<str> for Sensitive<String> {
fn as_ref(&self) -> &str {
&self.0
}
}
impl AsRef<[u8]> for Sensitive<String> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl AsRef<[u8]> for Sensitive<Vec<u8>> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<T> AsMut<T> for Sensitive<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl AsMut<str> for Sensitive<String> {
fn as_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl Deref for Sensitive<String> {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Sensitive<String> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T> From<T> for Sensitive<T> {
fn from(t: T) -> Self {
Sensitive(t)
}
}
impl From<&str> for Sensitive<String> {
fn from(s: &str) -> Self {
Sensitive(s.into())
}
}
impl<T> Borrow<T> for Sensitive<T> {
fn borrow(&self) -> &T {
&self.0
}
}
impl Borrow<str> for Sensitive<String> {
fn borrow(&self) -> &str {
&self.0
}
}
#[cfg(feature = "full")]
impl TS for Sensitive<String> {
fn name() -> String {
"string".to_string()
}
fn name_with_type_args(_args: Vec<String>) -> String {
"string".to_string()
}
fn dependencies() -> Vec<ts_rs::Dependency> {
Vec::new()
}
fn transparent() -> bool {
true
}
}

View file

@ -142,12 +142,17 @@ pub async fn register(
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
.collect();
// Show nsfw content if param is true, or if content_warning exists
let show_nsfw = data
.show_nsfw
.unwrap_or(site_view.site.content_warning.is_some());
// Create the local user
let local_user_form = LocalUserInsertForm::builder()
.person_id(inserted_person.id)
.email(data.email.as_deref().map(str::to_lowercase))
.password_encrypted(data.password.to_string())
.show_nsfw(Some(data.show_nsfw))
.show_nsfw(Some(show_nsfw))
.accepted_application(accepted_application)
.default_listing_type(Some(local_site.default_post_listing_type))
.post_listing_mode(Some(local_site.default_post_listing_mode))

View file

@ -0,0 +1,22 @@
{
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"type": "Group",
"updated": "2024-04-05T12:49:51Z",
"url": "https://socialhub.activitypub.rocks/c/meeting/threadiverse-wg/88",
"name": "Threadiverse Working Group (SocialHub)",
"inbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/inbox",
"outbox": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/outbox",
"followers": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146/followers",
"preferredUsername": "threadiverse-wg",
"publicKey": {
"id": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146#main-key",
"owner": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApJi4iAcW6bPiHVCxT9p0\n8DVnrDDO4QtLNy7bpRFdMFifmmmXprsuAi9D2MSwbhH49V54HtIkxBpKd2IR/UD8\nmhMDY4CNI9FHpjqLw0wtkzxcqF9urSqhn0/vWX+9oxyhIgQS5KMiIkYDMJiAc691\niEcZ8LCran23xIGl6Dk54Nr3TqTMLcjDhzQYUJbxMrLq5/knWqOKG3IF5OxK+9ZZ\n1wxDF872eJTxJLkmpag+WYNtHzvB2SGTp8j5IF1/pZ9J1c3cpYfaeolTch/B/GQn\najCB4l27U52rIIObxJqFXSY8wHyd0aAmNmxzPZ7cduRlBDhmI40cAmnCV1YQPvpk\nDwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://socialhub.activitypub.rocks/uploads/default/original/1X/8faac84234dc73d074dadaa2bcf24dc746b8647f.png"
},
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -0,0 +1,13 @@
{
"id": "https://socialhub.activitypub.rocks/ap/object/1899f65c062200daec50a4c89ed76dc9",
"type": "Note",
"audience": "https://socialhub.activitypub.rocks/ap/actor/797217cf18c0e819dfafc52425590146",
"published": "2024-04-13T14:36:19Z",
"updated": "2024-04-13T14:36:19Z",
"url": "https://socialhub.activitypub.rocks/t/our-next-meeting/4079/1",
"attributedTo": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"name": "Our next meeting",
"context": "https://socialhub.activitypub.rocks/ap/collection/8850f6e85b57c490da915a5dfbbd5045",
"content": "<h3>Last Meeting</h3>\n<h4>Recording</h4>\n<a href=\"https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000\">https://us06web.zoom.us/rec/share/4hGBTvgXJPlu8UkjkkxVARypNg5DH0eeaKlIBv71D4G3lokYyrCrg7cqBCJmL109.FsHYTZDlVvZXrgcn?startTime=1712254114000</a>\nPasscode: z+1*4pUB\n<h4>Minutes</h4>\nTo refresh your memory, you can read the minutes of last week's meeting <a href=\"https://community.nodebb.org/topic/17949/minutes&hellip;",
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -0,0 +1,23 @@
{
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"type": "Person",
"updated": "2024-01-15T12:27:03Z",
"url": "https://socialhub.activitypub.rocks/u/angus",
"name": "Angus McLeod",
"inbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/inbox",
"outbox": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/outbox",
"sharedInbox": "https://socialhub.activitypub.rocks/ap/users/inbox",
"followers": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1/followers",
"preferredUsername": "angus",
"publicKey": {
"id": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1#main-key",
"owner": "https://socialhub.activitypub.rocks/ap/actor/495843076e9e469fbd35ccf467ae9fb1",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3RpuFDuwXZzOeHO5fO2O\nHmP7Flc5JDXJ8OOEJYq5T/dzUKqREOF1ZT0WMww8/E3P6w+gfFsjzThraJb8nHuW\nP6798SUD35CWBclfhw9DapjVn99JyFcAWcH3b9fr6LYshc4y1BoeJagk1kcro2Dc\n+pX0vVXgNjwWnGfyucAgGIUWrNUjcvIvXmyVCBSQfXG3nCALV1JbI4KSgf/5KyBn\nza/QefaetxYiFV8wAisPKLsz3XQAaITsQmbSi+8gmwXt/9U808PK1KphCiClDOWg\noi0HPzJn0rn+mwFCfgNWenvribfeG40AHLG33OkWKvslufjifdWDCOcBYYzyCEV6\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
},
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://socialhub.activitypub.rocks/user_avatar/socialhub.activitypub.rocks/angus/96/2295_2.png"
},
"@context": "https://www.w3.org/ns/activitystreams"
}

View file

@ -23,7 +23,6 @@
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
}
],
"commentsEnabled": true,
"sensitive": false,
"language": {
"identifier": "ko",

View file

@ -23,7 +23,6 @@
"href": "https://lemmy.ml/pictrs/image/xl8W7FZfk9.jpg"
}
],
"commentsEnabled": true,
"sensitive": false,
"published": "2021-10-29T15:10:51.557399Z",
"updated": "2021-10-29T15:11:35.976374Z"

View file

@ -15,7 +15,6 @@
"cc": [],
"mediaType": "text/html",
"attachment": [],
"commentsEnabled": true,
"sensitive": false,
"published": "2023-02-06T06:42:41.939437Z",
"language": {
@ -36,7 +35,6 @@
"cc": [],
"mediaType": "text/html",
"attachment": [],
"commentsEnabled": true,
"sensitive": false,
"published": "2023-02-06T06:42:37.119567Z",
"language": {

View file

@ -22,7 +22,6 @@
],
"name": "another outbox test",
"mediaType": "text/html",
"commentsEnabled": true,
"sensitive": false,
"stickied": false,
"published": "2021-11-18T17:19:45.895163Z"
@ -51,7 +50,6 @@
],
"name": "outbox test",
"mediaType": "text/html",
"commentsEnabled": true,
"sensitive": false,
"stickied": false,
"published": "2021-11-18T17:19:05.763109Z"

View file

@ -25,7 +25,6 @@
"url": "https://enterprise.lemmy.ml/pictrs/image/eOtYb9iEiB.png"
},
"sensitive": false,
"commentsEnabled": true,
"language": {
"identifier": "fr",
"name": "Français"

View file

@ -0,0 +1,22 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/category/31",
"url": "https://community.nodebb.org/category/31/threadiverse-working-group",
"inbox": "https://community.nodebb.org/category/31/inbox",
"outbox": "https://community.nodebb.org/category/31/outbox",
"sharedInbox": "https://community.nodebb.org/inbox",
"type": "Group",
"name": "Threadiverse Working Group",
"preferredUsername": "swicg-threadiverse-wg",
"summary": "Discussion and announcements related to the SWICG Threadiverse task force",
"icon": {
"type": "Image",
"mediaType": "image/png",
"url": "https://community.nodebb.org/assets/uploads/system/site-logo.png"
},
"publicKey": {
"id": "https://community.nodebb.org/category/31#key",
"owner": "https://community.nodebb.org/category/31",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0/Or3Ox2/jbhBZzF8W0Y\nWuS/4lgm5O5rxQk2nDRBXU/qNaZnMPkW2FxFPuPetndUVKSD2+vWF3SUlFyZ/vhT\nITzLkbRSILMiZCUg+0mvqi6va1WMBglMe5jLkc7wdfgNsosqBzKMdyMxqDZr++mJ\n8DjuqzWHENcjWcbMfSfAa9nkZHBIQUsHGGIwxEbKNlPqF0JIB66py7xmXbboDxpD\nPVF3EMkgZNnbmDGtlkZCKbztradyNRVl/u6KJpV3fbi+m/8CZ+POc4I5sKCQY1Hr\ndslHlm6tCkJQxIIKQtz0ZJ5yCUYmk48C2gFCndfJtYoEy9iR62xSemky6y04gWVc\naQIDAQAB\n-----END PUBLIC KEY-----\n"
}
}

View file

@ -0,0 +1,38 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/topic/17908",
"type": "Page",
"to": ["https://www.w3.org/ns/activitystreams#Public"],
"cc": ["https://community.nodebb.org/uid/2/followers"],
"inReplyTo": null,
"published": "2024-03-19T20:25:39.462Z",
"url": "https://community.nodebb.org/topic/17908/threadiverse-working-group",
"attributedTo": "https://community.nodebb.org/uid/2",
"audience": "https://community.nodebb.org/category/31/threadiverse-working-group",
"sensitive": false,
"summary": null,
"name": "Threadiverse Working Group",
"content": "<p dir=\"auto\">NodeBB is at this year's FediForum, and one of the breakout sessions centred around <strong>the Theadiverse</strong>, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.</p>\n<p dir=\"auto\">Some of the topic touched upon included:</p>\n<ul>\n<li>Aligning on a standard representation for collections of Notes</li>\n<li>FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.</li>\n<li>Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging</li>\n<li>Going forward: collaborating on building compatible threadiverse implementations</li>\n</ul>\n<p dir=\"auto\">The main action item involved <strong>the genesis of an informal working group for the threadiverse</strong>, in order to align our disparate implementations toward a common path.</p>\n<p dir=\"auto\">We intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.</p>\n<p dir=\"auto\">The topic of the first WG call is: <strong>Representation of the higherlevel collection of Notes (posts, etc.) — Article vs. Page, etc?</strong></p>\n<p dir=\"auto\">Interested?</p>\n<ul>\n<li>Publicly reply to this post (NodeBB does not support non-public posts at this time) if you'd like to join the list</li>\n<li>If you prefer to remain private, please email <a href=\"mailto:julian@nodebb.org\" rel=\"nofollow ugc\">julian@nodebb.org</a></li>\n</ul>\n<hr />\n<p dir=\"auto\">As an aside, I'd love to try something new and attempt tokeep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?</p>\n",
"source": {
"content": "NodeBB is at this year's FediForum, and one of the breakout sessions centred around **the Theadiverse**, the subset of ActivityPub-enabled applications built around a topic-centric model of content representation.\n\nSome of the topic touched upon included:\n\n* Aligning on a standard representation for collections of Notes\n* FEP-1b12 — Group federation and implementation thereof by Lemmy, et al.\n* Offering a comparatively more feature-rich experience vis-a-vis restrictions re: microblogging\n* Going forward: collaborating on building compatible threadiverse implementations\n\nThe main action item involved **the genesis of an informal working group for the threadiverse**, in order to align our disparate implementations toward a common path.\n\nWe intend to meet monthly at first, with the first meeting likely sometime early-to-mid April.\n\nThe topic of the first WG call is: **Representation of the higher level collection of Notes (posts, etc.) — Article vs. Page, etc?**\n\nInterested?\n\n* Publicly reply to this post (NodeBB does not support non-public postsat this time) if you'd like to join the list\n* If you prefer to remain private, please email julian@nodebb.org\n\n----\n\nAs an aside, I'd love to try something new and attempt to keep as much of this as I can on the social web. Can you do me a favour and boost this to your followers?",
"mediaType": "text/markdown"
},
"tag": [
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/fediforum",
"name": "#fediforum"
},
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/activitypub",
"name": "#activitypub"
},
{
"type": "Hashtag",
"href": "https://community.nodebb.org/tags/threadiverse",
"name": "#threadiverse"
}
],
"attachment": []
}

View file

@ -0,0 +1,29 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "https://community.nodebb.org/uid/2",
"url": "https://community.nodebb.org/user/julian",
"followers": "https://community.nodebb.org/uid/2/followers",
"following": "https://community.nodebb.org/uid/2/following",
"inbox": "https://community.nodebb.org/uid/2/inbox",
"outbox": "https://community.nodebb.org/uid/2/outbox",
"sharedInbox": "https://community.nodebb.org/inbox",
"type": "Person",
"name": "julian",
"preferredUsername": "julian",
"summary": "Hi! I'm Julian, one of the co-founders of NodeBB, the forum software you are using right now.\r\n\r\nI started this company with two colleagues, Baris and Andrew, in 2013, and have been doing the startup thing since (although I think at some point along the way we stopped being a startup and just became a boring ol' small business).\r\n\r\nIn my free time I rock climb, cycle, and lift weights. I live just outside Toronto, Canada, with my wife and three children.",
"icon": {
"type": "Image",
"mediaType": "image/jpeg",
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profileavatar-1701457270279.jpeg"
},
"image": {
"type": "Image",
"mediaType": "image/jpeg",
"url": "https://community.nodebb.org/assets/uploads/profile/uid-2/2-profilecover-1649468285913.jpeg"
},
"publicKey": {
"id": "https://community.nodebb.org/uid/2#key",
"owner": "https://community.nodebb.org/uid/2",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzEr0sFdATahQzprS4EOT\nZq+KMc6UTbt2GDP20OrQi/P5AXAbMaQiRCRdGWhYGjnH0jicn5NnozNxRo+HchJT\nV6NOHxpsxqPCoaLeoBkhfhbSCLr2Gzil6mmfqf9TjnI7A7ZTtCc0G+n0ztyL9HwL\nkEAI178l2gckk4XKKYnEd+dyiIevExrq/ROLgwW1o428FZvlF5amKxhpVUEygRU8\nCd1hqWYs+xYDOJURCP5qEx/MmRPpV/yGMTMyF+/gcQc0TUZnhWAM2E4M+aq3aKh6\nJP/vsry+5YZPUaPWfopbT5Ijyt6ZSElp6Avkg56eTz0a5SRcjCVS6IFVPwiLlzOe\nYwIDAQAB\n-----END PUBLIC KEY-----\n"
}
}

View file

@ -0,0 +1,49 @@
{
"@context": ["https://www.w3.org/ns/activitystreams"],
"id": "https://pfefferle.org/lemmy-part-4/#activity#activity",
"type": "Announce",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/#activity",
"type": "Update",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"audience": "https://pfefferle.org/@pfefferle.org",
"content": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E",
"contentMap": {
"en": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E"
},
"name": "Lemmy (Part 4)",
"published": "2024-05-03T12:32:29Z",
"summary": "Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object [...]",
"tag": [],
"updated": "2024-05-06T08:20:33Z",
"url": "https://pfefferle.org/lemmy-part-4/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": []
},
"actor": "https://pfefferle.org/author/pfefferle/"
},
"actor": "https://pfefferle.org/@pfefferle.org"
}

View file

@ -0,0 +1,66 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/@pfefferle.org",
"type": "Group",
"attachment": [],
"attributedTo": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://pfefferle.org/wp-content/uploads/2023/06/cropped-BeLItBV-_400x400.jpg"
},
"published": "2024-04-03T16:58:22Z",
"summary": "<p>Webworker, blogger und podcaster</p>\n",
"tag": [],
"url": "https://pfefferle.org/@pfefferle.org",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers",
"preferredUsername": "pfefferle.org",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/@pfefferle.org#main-key",
"owner": "https://pfefferle.org/@pfefferle.org",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuq8xeLMFcaCwPFBhgMRE\n/dDh2XKoNXFXnixctmK8BXSuuLMxucm3I/8NyhIvb3LqU+uP1fO8F0ecUbk2sN+x\nKag5vIV6yKXzJ8ILMWQ9AaELpXDmMZqL0zal0LUJRAOkDgPDovDAoq6tx++yDoV0\njdVbf9CoZKit1cz2ZrEuE5dswq3J/z9+c6POkhCkWEX5TPJzkOrmnjkvrXxGHUJ2\nA3+P+VaZhd5cmvqYosSpYNJshxCdev12pIF78OnYLiYiyXlgGHU+7uQR0M4tTcij\n6cUdLkms9m+b6H3ctXntPn410e5YLFPldjAYzQB5wHVdFZsWtyrbqfYdCa+KkKpA\nvwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/collections/featured",
"moderators": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"discoverable": true,
"indexable": true,
"webfinger": "pfefferle.org@pfefferle.org",
"postingRestrictedToMods": true
}

View file

@ -0,0 +1,24 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org?c=148",
"type": "Note",
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>Nice! Hello from WordPress!</p>",
"contentMap": {
"en": "<p>Nice! Hello from WordPress!</p>"
},
"inReplyTo": "https://socialhub.activitypub.rocks/ap/object/ce040f1ead95964f6dbbf1084b81432d",
"published": "2024-04-30T15:21:13Z",
"tag": [],
"url": "https://pfefferle.org?c=148",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers"
],
"cc": []
}

View file

@ -0,0 +1,26 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org/this-is-a-test-federation/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>with Discource!</p>",
"contentMap": {
"en": "<p>with Discource!</p>"
},
"name": "This is a test-federation",
"published": "2024-04-30T15:16:41Z",
"summary": "with Discource! [...]",
"tag": [],
"url": "https://pfefferle.org/this-is-a-test-federation/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers"
],
"cc": []
}

View file

@ -0,0 +1,74 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/author/pfefferle/",
"type": "Person",
"attachment": [
{
"type": "PropertyValue",
"name": "Blog",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/\" target=\"_blank\" href=\"https://pfefferle.org/\">pfefferle.org</a>"
},
{
"type": "PropertyValue",
"name": "Profile",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/author/pfefferle/\" target=\"_blank\" href=\"https://pfefferle.org/author/pfefferle/\">pfefferle.org</a>"
}
],
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://secure.gravatar.com/avatar/a2bdca7870e859658cece96c044b3be5?s=120&#038;d=mm&#038;r=g"
},
"published": "2014-02-10T15:23:08Z",
"summary": "<p>Ich arbeite als Open Web Lead für Automattic.</p>\n",
"tag": [],
"url": "https://pfefferle.org/author/pfefferle/",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers",
"preferredUsername": "matthias",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/author/pfefferle/#main-key",
"owner": "https://pfefferle.org/author/pfefferle/",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvTA5RA40nOsso04RSwyX\nHXTojRPUMlIlArDcSy3M5GUJp9/xbxSUOdBjqd31KKB1GIi3vrLmD1Qi/ZqS95Qy\nw2Zd3xOsCg+o9bsyOG+O6Y8Lu+HEB5JKLUbNHdiSviakJ8wGadH9Wm4WIiN20y+q\n/u6lgxgiWfZ2CFCN6SOc28fUKi9NmKvXK+M12BhFfy1tC5KWXKDm0UbfI1+dmqhR\n3Ffe6vEsCI/YIVVdWxQ9kouOd0XSHOGdslktkepRO7IP9i9TdwyeCa0WWRoeO5Wa\ntVpc1Y0WuNbTM2ksIXTg0G+rO1/6KO/hrHnGu3RCfb/ZIHK5L/aWYb9B3PG3LyKV\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/collections/featured",
"discoverable": true,
"indexable": true,
"webfinger": "matthias@pfefferle.org"
}

View file

@ -39,7 +39,10 @@ use lemmy_db_schema::{
},
traits::{Bannable, Crud, Followable},
};
use lemmy_utils::error::{LemmyError, LemmyResult};
use lemmy_utils::{
error::{LemmyError, LemmyResult},
LemmyErrorType,
};
use url::Url;
impl BlockUser {
@ -129,7 +132,11 @@ impl ActivityHandler for BlockUser {
verify_is_public(&self.to, &self.cc)?;
match self.target.dereference(context).await? {
SiteOrCommunity::Site(site) => {
let domain = self.object.inner().domain().expect("url needs domain");
let domain = self
.object
.inner()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
if context.settings().hostname == domain {
return Err(
anyhow!("Site bans from remote instance can't affect user's home instance").into(),

View file

@ -94,7 +94,12 @@ impl AnnounceActivity {
actor: community.id().into(),
to: vec![public()],
object: IdOrNestedObject::NestedObject(object),
cc: vec![community.followers_url.clone().into()],
cc: community
.followers_url
.clone()
.map(Into::into)
.into_iter()
.collect(),
kind: AnnounceType::Announce,
id,
})

View file

@ -26,6 +26,7 @@ use lemmy_db_schema::{
source::{
activity::ActivitySendTargets,
community::Community,
moderator::{ModLockPost, ModLockPostForm},
person::Person,
post::{Post, PostUpdateForm},
},
@ -60,12 +61,22 @@ impl ActivityHandler for LockPage {
}
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
insert_received_activity(&self.id, context).await?;
let locked = Some(true);
let form = PostUpdateForm {
locked: Some(true),
locked,
..Default::default()
};
let post = self.object.dereference(context).await?;
Post::update(&mut context.pool(), post.id, &form).await?;
let form = ModLockPostForm {
mod_person_id: self.actor.dereference(context).await?.id,
post_id: post.id,
locked,
};
ModLockPost::create(&mut context.pool(), &form).await?;
Ok(())
}
}
@ -94,12 +105,21 @@ impl ActivityHandler for UndoLockPage {
async fn receive(self, context: &Data<Self::DataType>) -> Result<(), Self::Error> {
insert_received_activity(&self.id, context).await?;
let locked = Some(false);
let form = PostUpdateForm {
locked: Some(false),
locked,
..Default::default()
};
let post = self.object.object.dereference(context).await?;
Post::update(&mut context.pool(), post.id, &form).await?;
let form = ModLockPostForm {
mod_person_id: self.actor.dereference(context).await?.id,
post_id: post.id,
locked,
};
ModLockPost::create(&mut context.pool(), &form).await?;
Ok(())
}
}

View file

@ -105,7 +105,7 @@ impl ActivityHandler for UpdateCommunity {
last_refreshed_at: Some(naive_now()),
icon: Some(self.object.icon.map(|i| i.url.into())),
banner: Some(self.object.image.map(|i| i.url.into())),
followers_url: Some(self.object.followers.into()),
followers_url: self.object.followers.map(Into::into),
inbox_url: Some(self.object.inbox.into()),
shared_inbox_url: Some(self.object.endpoints.map(|e| e.shared_inbox.into())),
moderators_url: self.object.attributed_to.map(Into::into),

View file

@ -19,7 +19,7 @@ use activitypub_federation::{
config::Data,
fetch::object_id::ObjectId,
kinds::public,
protocol::verification::verify_domains_match,
protocol::verification::{verify_domains_match, verify_urls_match},
traits::{ActivityHandler, Actor, Object},
};
use lemmy_api_common::{
@ -133,6 +133,7 @@ impl ActivityHandler for CreateOrUpdateNote {
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
check_community_deleted_or_removed(&community)?;
check_post_deleted_or_removed(&post)?;
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
ApubComment::verify(&self.object, self.actor.inner(), context).await?;
Ok(())

View file

@ -4,7 +4,6 @@ use crate::{
community::send_activity_in_community,
generate_activity_id,
verify_is_public,
verify_mod_action,
verify_person_in_community,
},
activity_lists::AnnouncableActivities,
@ -66,7 +65,6 @@ impl CreateOrUpdatePage {
kind: CreateOrUpdateType,
context: Data<LemmyContext>,
) -> LemmyResult<()> {
let post = ApubPost(post);
let community_id = post.community_id;
let person: ApubPerson = Person::read(&mut context.pool(), person_id)
.await?
@ -78,15 +76,14 @@ impl CreateOrUpdatePage {
.into();
let create_or_update =
CreateOrUpdatePage::new(post, &person, &community, kind, &context).await?;
let is_mod_action = create_or_update.object.is_mod_action(&context).await?;
CreateOrUpdatePage::new(post.into(), &person, &community, kind, &context).await?;
let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update);
send_activity_in_community(
activity,
&person,
&community,
ActivitySendTargets::empty(),
is_mod_action,
false,
&context,
)
.await?;
@ -113,30 +110,8 @@ impl ActivityHandler for CreateOrUpdatePage {
let community = self.community(context).await?;
verify_person_in_community(&self.actor, &community, context).await?;
check_community_deleted_or_removed(&community)?;
match self.kind {
CreateOrUpdateType::Create => {
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
// Check that the post isnt locked, as that isnt possible for newly created posts.
// However, when fetching a remote post we generate a new create activity with the current
// locked value, so this check may fail. So only check if its a local community,
// because then we will definitely receive all create and update activities separately.
let is_locked = self.object.comments_enabled == Some(false);
if community.local && is_locked {
Err(LemmyErrorType::NewPostCannotBeLocked)?
}
}
CreateOrUpdateType::Update => {
let is_mod_action = self.object.is_mod_action(context).await?;
if is_mod_action {
verify_mod_action(&self.actor, &community, context).await?;
} else {
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
}
}
}
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_urls_match(self.actor.inner(), self.object.creator()?.inner())?;
ApubPost::verify(&self.object, self.actor.inner(), context).await?;
Ok(())
}

View file

@ -9,7 +9,7 @@ use crate::{
};
use activitypub_federation::{
config::Data,
protocol::verification::verify_domains_match,
protocol::verification::{verify_domains_match, verify_urls_match},
traits::{ActivityHandler, Actor, Object},
};
use lemmy_api_common::context::LemmyContext;
@ -61,6 +61,7 @@ impl ActivityHandler for CreateOrUpdateChatMessage {
verify_person(&self.actor, context).await?;
verify_domains_match(self.actor.inner(), self.object.id.inner())?;
verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?;
verify_urls_match(self.actor.inner(), self.object.attributed_to.inner())?;
ApubPrivateMessage::verify(&self.object, self.actor.inner(), context).await?;
Ok(())
}

View file

@ -4,9 +4,10 @@ use crate::objects::{
person::ApubPerson,
post::ApubPost,
};
use activitypub_federation::{config::Data, fetch::object_id::ObjectId};
use activitypub_federation::{config::Data, fetch::object_id::ObjectId, traits::Object};
use actix_web::web::Json;
use futures::{future::try_join_all, StreamExt};
use itertools::Itertools;
use lemmy_api_common::{context::LemmyContext, SuccessResponse};
use lemmy_db_schema::{
newtypes::DbUrl,
@ -30,8 +31,11 @@ use lemmy_utils::{
spawn_try_task,
};
use serde::{Deserialize, Serialize};
use std::future::Future;
use tracing::info;
const PARALLELISM: usize = 10;
/// Backup of user data. This struct should never be changed so that the data can be used as a
/// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow
/// importing partial backups.
@ -40,7 +44,7 @@ use tracing::info;
///
/// Be careful with any changes to this struct, to avoid breaking changes which could prevent
/// importing older backups.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct UserSettingsBackup {
pub display_name: Option<String>,
pub bio: Option<String>,
@ -167,141 +171,91 @@ pub async fn import_settings(
}
spawn_try_task(async move {
const PARALLELISM: usize = 10;
let person_id = local_user_view.person.id;
// These tasks fetch objects from remote instances which might be down.
// TODO: Would be nice if we could send a list of failed items with api response, but then
// the request would likely timeout.
let mut failed_items = vec![];
info!(
"Starting settings backup for {}",
"Starting settings import for {}",
local_user_view.person.name
);
futures::stream::iter(
data
.followed_communities
.clone()
.into_iter()
// reset_request_count works like clone, and is necessary to avoid running into request limit
.map(|f| (f, context.reset_request_count()))
.map(|(followed, context)| async move {
// need to reset outgoing request count to avoid running into limit
let community = followed.dereference(&context).await?;
let form = CommunityFollowerForm {
person_id,
community_id: community.id,
pending: true,
};
CommunityFollower::follow(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
}),
let failed_followed_communities = fetch_and_import(
data.followed_communities.clone(),
&context,
|(followed, context)| async move {
let community = followed.dereference(&context).await?;
let form = CommunityFollowerForm {
person_id,
community_id: community.id,
pending: true,
};
CommunityFollower::follow(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},
)
.buffer_unordered(PARALLELISM)
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import followed community: {e}");
}
});
futures::stream::iter(
data
.saved_posts
.clone()
.into_iter()
.map(|s| (s, context.reset_request_count()))
.map(|(saved, context)| async move {
let post = saved.dereference(&context).await?;
let form = PostSavedForm {
person_id,
post_id: post.id,
};
PostSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
}),
)
.buffer_unordered(PARALLELISM)
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import saved post community: {e}");
}
});
futures::stream::iter(
data
.saved_comments
.clone()
.into_iter()
.map(|s| (s, context.reset_request_count()))
.map(|(saved, context)| async move {
let comment = saved.dereference(&context).await?;
let form = CommentSavedForm {
person_id,
comment_id: comment.id,
};
CommentSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
}),
)
.buffer_unordered(PARALLELISM)
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r)| {
if let Err(e) = r {
failed_items.push(data.followed_communities.get(i).map(|u| u.inner().clone()));
info!("Failed to import saved comment community: {e}");
}
});
let failed_items: Vec<_> = failed_items.into_iter().flatten().collect();
info!(
"Finished settings backup for {}, failed items: {:#?}",
local_user_view.person.name, failed_items
);
// These tasks don't connect to any remote instances but only insert directly in the database.
// That means the only error condition are db connection failures, so no extra error handling is
// needed.
try_join_all(data.blocked_communities.iter().map(|blocked| async {
// dont fetch unknown blocked objects from home server
let community = blocked.dereference_local(&context).await?;
let form = CommunityBlockForm {
person_id,
community_id: community.id,
};
CommunityBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
}))
.await?;
try_join_all(data.blocked_users.iter().map(|blocked| async {
// dont fetch unknown blocked objects from home server
let target = blocked.dereference_local(&context).await?;
let form = PersonBlockForm {
person_id,
target_id: target.id,
};
PersonBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
}))
let failed_saved_posts = fetch_and_import(
data.saved_posts.clone(),
&context,
|(saved, context)| async move {
let post = saved.dereference(&context).await?;
let form = PostSavedForm {
person_id,
post_id: post.id,
};
PostSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},
)
.await?;
let failed_saved_comments = fetch_and_import(
data.saved_comments.clone(),
&context,
|(saved, context)| async move {
let comment = saved.dereference(&context).await?;
let form = CommentSavedForm {
person_id,
comment_id: comment.id,
};
CommentSaved::save(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},
)
.await?;
let failed_community_blocks = fetch_and_import(
data.blocked_communities.clone(),
&context,
|(blocked, context)| async move {
let community = blocked.dereference(&context).await?;
let form = CommunityBlockForm {
person_id,
community_id: community.id,
};
CommunityBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},
)
.await?;
let failed_user_blocks = fetch_and_import(
data.blocked_users.clone(),
&context,
|(blocked, context)| async move {
let context = context.reset_request_count();
let target = blocked.dereference(&context).await?;
let form = PersonBlockForm {
person_id,
target_id: target.id,
};
PersonBlock::block(&mut context.pool(), &form).await?;
LemmyResult::Ok(())
},
)
.await?;
try_join_all(data.blocked_instances.iter().map(|domain| async {
// dont fetch unknown blocked objects from home server
let instance = Instance::read_or_create(&mut context.pool(), domain.clone()).await?;
let form = InstanceBlockForm {
person_id,
@ -312,17 +266,53 @@ pub async fn import_settings(
}))
.await?;
info!("Settings import completed for {}, the following items failed: {failed_followed_communities}, {failed_saved_posts}, {failed_saved_comments}, {failed_community_blocks}, {failed_user_blocks}",
local_user_view.person.name);
Ok(())
});
Ok(Json(Default::default()))
}
async fn fetch_and_import<Kind, Fut>(
objects: Vec<ObjectId<Kind>>,
context: &Data<LemmyContext>,
import_fn: impl FnMut((ObjectId<Kind>, Data<LemmyContext>)) -> Fut,
) -> LemmyResult<String>
where
Kind: Object + Send + 'static,
for<'de2> <Kind as Object>::Kind: Deserialize<'de2>,
Fut: Future<Output = LemmyResult<()>>,
{
let mut failed_items = vec![];
futures::stream::iter(
objects
.clone()
.into_iter()
// need to reset outgoing request count to avoid running into limit
.map(|s| (s, context.reset_request_count()))
.map(import_fn),
)
.buffer_unordered(PARALLELISM)
.collect::<Vec<_>>()
.await
.into_iter()
.enumerate()
.for_each(|(i, r): (usize, LemmyResult<()>)| {
if r.is_err() {
if let Some(object) = objects.get(i) {
failed_items.push(object.inner().clone());
}
}
});
Ok(failed_items.into_iter().join(","))
}
#[cfg(test)]
#[allow(clippy::indexing_slicing)]
mod tests {
use crate::api::user_settings_backup::{export_settings, import_settings};
use crate::api::user_settings_backup::{export_settings, import_settings, UserSettingsBackup};
use activitypub_federation::config::Data;
use lemmy_api_common::context::LemmyContext;
use lemmy_db_schema::{
@ -420,6 +410,44 @@ mod tests {
Ok(())
}
#[tokio::test]
#[serial]
async fn test_settings_partial_import() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let export_user =
create_user("hanna".to_string(), Some("my bio".to_string()), &context).await?;
let community_form = CommunityInsertForm::builder()
.name("testcom".to_string())
.title("testcom".to_string())
.instance_id(export_user.person.instance_id)
.build();
let community = Community::create(&mut context.pool(), &community_form).await?;
let follower_form = CommunityFollowerForm {
community_id: community.id,
person_id: export_user.person.id,
pending: false,
};
CommunityFollower::follow(&mut context.pool(), &follower_form).await?;
let backup = export_settings(export_user.clone(), context.reset_request_count()).await?;
let import_user = create_user("charles".to_string(), None, &context).await?;
let backup2 = UserSettingsBackup {
followed_communities: backup.followed_communities.clone(),
..Default::default()
};
import_settings(
actix_web::web::Json(backup2),
import_user.clone(),
context.reset_request_count(),
)
.await?;
Ok(())
}
#[tokio::test]
#[serial]
async fn disallow_large_backup() -> LemmyResult<()> {

View file

@ -1,6 +1,6 @@
use crate::{
activity_lists::AnnouncableActivities,
objects::{community::ApubCommunity, post::ApubPost},
objects::community::ApubCommunity,
protocol::{
activities::{
community::announce::AnnounceActivity,
@ -18,11 +18,8 @@ use activitypub_federation::{
};
use futures::future::join_all;
use lemmy_api_common::{context::LemmyContext, utils::generate_outbox_url};
use lemmy_db_schema::{
source::{person::Person, post::Post},
traits::Crud,
utils::FETCH_LIMIT_MAX,
};
use lemmy_db_schema::{utils::FETCH_LIMIT_MAX, SortType};
use lemmy_db_views::{post_view::PostQuery, structs::SiteView};
use lemmy_utils::{
error::{LemmyError, LemmyResult},
LemmyErrorType,
@ -41,19 +38,30 @@ impl Collection for ApubCommunityOutbox {
#[tracing::instrument(skip_all)]
async fn read_local(owner: &Self::Owner, data: &Data<Self::DataType>) -> LemmyResult<Self::Kind> {
let post_list: Vec<ApubPost> = Post::list_for_community(&mut data.pool(), owner.id)
let site = SiteView::read_local(&mut data.pool())
.await?
.into_iter()
.map(Into::into)
.collect();
.ok_or(LemmyErrorType::LocalSiteNotSetup)?
.site;
let post_views = PostQuery {
community_id: Some(owner.id),
sort: Some(SortType::New),
limit: Some(FETCH_LIMIT_MAX),
..Default::default()
}
.list(&site, &mut data.pool())
.await?;
let mut ordered_items = vec![];
for post in post_list {
let person = Person::read(&mut data.pool(), post.creator_id)
.await?
.ok_or(LemmyErrorType::CouldntFindPerson)?
.into();
let create =
CreateOrUpdatePage::new(post, &person, owner, CreateOrUpdateType::Create, data).await?;
for post_view in post_views {
let create = CreateOrUpdatePage::new(
post_view.post.into(),
&post_view.creator.into(),
owner,
CreateOrUpdateType::Create,
data,
)
.await?;
let announcable = AnnouncableActivities::CreateOrUpdatePost(create);
let announce = AnnounceActivity::new(announcable.try_into()?, owner, data)?;
ordered_items.push(announce);

View file

@ -128,7 +128,14 @@ pub(crate) mod tests {
use crate::protocol::objects::{group::Group, tombstone::Tombstone};
use actix_web::body::to_bytes;
use lemmy_db_schema::{
source::{community::CommunityInsertForm, instance::Instance},
newtypes::InstanceId,
source::{
community::CommunityInsertForm,
instance::Instance,
local_site::{LocalSite, LocalSiteInsertForm},
local_site_rate_limit::{LocalSiteRateLimit, LocalSiteRateLimitInsertForm},
site::{Site, SiteInsertForm},
},
traits::Crud,
CommunityVisibility,
};
@ -142,6 +149,8 @@ pub(crate) mod tests {
) -> LemmyResult<(Instance, Community)> {
let instance =
Instance::read_or_create(&mut context.pool(), "my_domain.tld".to_string()).await?;
create_local_site(context, instance.id).await?;
let community_form = CommunityInsertForm::builder()
.name("testcom6".to_string())
.title("nada".to_owned())
@ -154,6 +163,28 @@ pub(crate) mod tests {
Ok((instance, community))
}
/// Necessary for the community outbox fetching
async fn create_local_site(
context: &Data<LemmyContext>,
instance_id: InstanceId,
) -> LemmyResult<()> {
// Create a local site, since this is necessary for community fetching.
let site_form = SiteInsertForm::builder()
.name("test site".to_string())
.instance_id(instance_id)
.build();
let site = Site::create(&mut context.pool(), &site_form).await?;
let local_site_form = LocalSiteInsertForm::builder().site_id(site.id).build();
let local_site = LocalSite::create(&mut context.pool(), &local_site_form).await?;
let local_site_rate_limit_form = LocalSiteRateLimitInsertForm::builder()
.local_site_id(local_site.id)
.build();
LocalSiteRateLimit::create(&mut context.pool(), &local_site_rate_limit_form).await?;
Ok(())
}
async fn decode_response<T: DeserializeOwned>(res: HttpResponse) -> LemmyResult<T> {
let body = to_bytes(res.into_body()).await.unwrap();
let body = std::str::from_utf8(&body)?;
@ -164,6 +195,7 @@ pub(crate) mod tests {
#[serial]
async fn test_get_community() -> LemmyResult<()> {
let context = LemmyContext::init_test_context().await;
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
// fetch invalid community
let query = CommunityQuery {
@ -172,8 +204,6 @@ pub(crate) mod tests {
let res = get_apub_community_http(query.into(), context.reset_request_count()).await;
assert!(res.is_err());
let (instance, community) = init(false, CommunityVisibility::Public, &context).await?;
// fetch valid community
let query = CommunityQuery {
community_name: community.name.clone(),

View file

@ -20,7 +20,8 @@ use lemmy_db_schema::{
};
use lemmy_utils::error::{LemmyErrorType, LemmyResult};
use serde::{Deserialize, Serialize};
use std::ops::Deref;
use std::{ops::Deref, time::Duration};
use tokio::time::timeout;
use url::Url;
mod comment;
@ -30,13 +31,22 @@ mod post;
pub mod routes;
pub mod site;
const INCOMING_ACTIVITY_TIMEOUT: Duration = Duration::from_secs(9);
pub async fn shared_inbox(
request: HttpRequest,
body: Bytes,
data: Data<LemmyContext>,
) -> LemmyResult<HttpResponse> {
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data)
let receive_fut =
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data);
// Set a timeout shorter than `REQWEST_TIMEOUT` for processing incoming activities. This is to
// avoid taking a long time to process an incoming activity when a required data fetch times out.
// In this case our own instance would timeout and be marked as dead by the sender. Better to
// consider the activity broken and move on.
timeout(INCOMING_ACTIVITY_TIMEOUT, receive_fut)
.await
.map_err(|_| LemmyErrorType::InboxTimeout)?
}
/// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub

View file

@ -29,7 +29,9 @@ pub(crate) mod mentions;
pub mod objects;
pub mod protocol;
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 50;
/// Maximum number of outgoing HTTP requests to fetch a single object. Needs to be high enough
/// to fetch a new community with posts, moderators and featured posts.
pub const FEDERATION_HTTP_FETCH_LIMIT: u32 = 100;
/// Only include a basic context to save space and bandwidth. The main context is hosted statically
/// on join-lemmy.org. Include activitystreams explicitly for better compat, but this could
@ -78,7 +80,10 @@ impl UrlVerifier for VerifyUrlData {
/// - URL not being in the blocklist (if it is active)
#[tracing::instrument(skip(local_site_data))]
fn check_apub_id_valid(apub_id: &Url, local_site_data: &LocalSiteData) -> LemmyResult<()> {
let domain = apub_id.domain().expect("apud id has domain").to_string();
let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
if !local_site_data
.local_site
@ -158,7 +163,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
is_strict: bool,
context: &LemmyContext,
) -> LemmyResult<()> {
let domain = apub_id.domain().expect("apud id has domain").to_string();
let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
let local_instance = context
.settings()
.get_hostname_without_port()
@ -185,7 +193,10 @@ pub(crate) async fn check_apub_id_valid_with_strictness(
.expect("local hostname is valid");
allowed_and_local.push(local_instance);
let domain = apub_id.domain().expect("apud id has domain").to_string();
let domain = apub_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
.to_string();
if !allowed_and_local.contains(&domain) {
Err(LemmyErrorType::FederationDisabledByStrictAllowList)?
}

View file

@ -54,7 +54,10 @@ pub async fn collect_non_local_mentions(
name: Some(format!(
"@{}@{}",
&parent_creator.name,
&parent_creator.id().domain().expect("has domain")
&parent_creator
.id()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?
)),
kind: MentionType::Mention,
};

View file

@ -28,6 +28,7 @@ use lemmy_api_common::{
},
};
use lemmy_db_schema::{
sensitive::SensitiveString,
source::{
activity::ActorType,
actor_language::CommunityLanguage,
@ -113,7 +114,7 @@ impl Object for ApubCommunity {
featured: Some(generate_featured_url(&self.actor_id)?.into()),
inbox: self.inbox_url.clone().into(),
outbox: generate_outbox_url(&self.actor_id)?.into(),
followers: self.followers_url.clone().into(),
followers: self.followers_url.clone().map(Into::into),
endpoints: self.shared_inbox_url.clone().map(|s| Endpoints {
shared_inbox: s.into(),
}),
@ -164,7 +165,7 @@ impl Object for ApubCommunity {
last_refreshed_at: Some(naive_now()),
icon,
banner,
followers_url: Some(group.followers.clone().into()),
followers_url: group.followers.clone().map(Into::into),
inbox_url: Some(group.inbox.into()),
shared_inbox_url: group.endpoints.map(|e| e.shared_inbox.into()),
moderators_url: group.attributed_to.clone().map(Into::into),
@ -187,11 +188,9 @@ impl Object for ApubCommunity {
let context_ = context.reset_request_count();
spawn_try_task(async move {
group.outbox.dereference(&community_, &context_).await.ok();
group
.followers
.dereference(&community_, &context_)
.await
.ok();
if let Some(followers) = group.followers {
followers.dereference(&community_, &context_).await.ok();
}
if let Some(featured) = group.featured {
featured.dereference(&community_, &context_).await.ok();
}
@ -215,7 +214,7 @@ impl Actor for ApubCommunity {
}
fn private_key_pem(&self) -> Option<String> {
self.private_key.clone()
self.private_key.clone().map(SensitiveString::into_inner)
}
fn inbox(&self) -> Url {
@ -275,7 +274,9 @@ pub(crate) mod tests {
// change these links so they dont fetch over the network
json.attributed_to = None;
json.outbox = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_outbox")?;
json.followers = CollectionId::parse("https://enterprise.lemmy.ml/c/tenforward/not_followers")?;
json.followers = Some(CollectionId::parse(
"https://enterprise.lemmy.ml/c/tenforward/not_followers",
)?);
let url = Url::parse("https://enterprise.lemmy.ml/c/tenforward")?;
ApubCommunity::verify(&json, &url, &context2).await?;

View file

@ -29,6 +29,7 @@ use lemmy_api_common::{
};
use lemmy_db_schema::{
newtypes::InstanceId,
sensitive::SensitiveString,
source::{
activity::ActorType,
actor_language::SiteLanguage,
@ -45,6 +46,7 @@ use lemmy_utils::{
markdown::markdown_to_html,
slurs::{check_slurs, check_slurs_opt},
},
LemmyErrorType,
};
use std::ops::Deref;
use tracing::debug;
@ -99,7 +101,7 @@ impl Object for ApubSite {
kind: ApplicationType::Application,
id: self.id().into(),
name: self.name.clone(),
preferred_username: data.domain().to_string(),
preferred_username: Some(data.domain().to_string()),
content: self.sidebar.as_ref().map(|d| markdown_to_html(d)),
source: self.sidebar.clone().map(Source::new),
summary: self.description.clone(),
@ -137,7 +139,11 @@ impl Object for ApubSite {
#[tracing::instrument(skip_all)]
async fn from_json(apub: Self::Kind, context: &Data<Self::DataType>) -> LemmyResult<Self> {
let domain = apub.id.inner().domain().expect("group id has domain");
let domain = apub
.id
.inner()
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
let instance = DbInstance::read_or_create(&mut context.pool(), domain.to_string()).await?;
let local_site = LocalSite::read(&mut context.pool()).await.ok();
@ -182,7 +188,7 @@ impl Actor for ApubSite {
}
fn private_key_pem(&self) -> Option<String> {
self.private_key.clone()
self.private_key.clone().map(SensitiveString::into_inner)
}
fn inbox(&self) -> Url {
@ -210,7 +216,9 @@ pub(in crate::objects) async fn fetch_instance_actor_for_object<T: Into<Url> + C
Err(e) => {
// Failed to fetch instance actor, its probably not a lemmy instance
debug!("Failed to dereference site for {}: {}", &instance_id, e);
let domain = instance_id.domain().expect("has domain");
let domain = instance_id
.domain()
.ok_or(LemmyErrorType::UrlWithoutDomain)?;
Ok(
DbInstance::read_or_create(&mut context.pool(), domain.to_string())
.await?

View file

@ -30,6 +30,7 @@ use lemmy_api_common::{
},
};
use lemmy_db_schema::{
sensitive::SensitiveString,
source::{
activity::ActorType,
local_site::LocalSite,
@ -200,7 +201,7 @@ impl Actor for ApubPerson {
}
fn private_key_pem(&self) -> Option<String> {
self.private_key.clone()
self.private_key.clone().map(SensitiveString::into_inner)
}
fn inbox(&self) -> Url {

View file

@ -36,7 +36,6 @@ use lemmy_db_schema::{
source::{
community::Community,
local_site::LocalSite,
moderator::{ModLockPost, ModLockPostForm},
person::Person,
post::{Post, PostInsertForm, PostUpdateForm},
},
@ -147,7 +146,6 @@ impl Object for ApubPost {
source: self.body.clone().map(Source::new),
attachment,
image: self.thumbnail_url.clone().map(ImageObject::new),
comments_enabled: Some(!self.locked),
sensitive: Some(self.nsfw),
language,
published: Some(self.published),
@ -165,12 +163,8 @@ impl Object for ApubPost {
expected_domain: &Url,
context: &Data<Self::DataType>,
) -> LemmyResult<()> {
// We can't verify the domain in case of mod action, because the mod may be on a different
// instance from the post author.
if !page.is_mod_action(context).await? {
verify_domains_match(page.id.inner(), expected_domain)?;
verify_is_remote_object(&page.id, context)?;
};
verify_domains_match(page.id.inner(), expected_domain)?;
verify_is_remote_object(&page.id, context)?;
let community = page.community(context).await?;
check_apub_id_valid_with_strictness(page.id.inner(), community.local, context).await?;
@ -218,62 +212,46 @@ impl Object for ApubPost {
name = name.chars().take(MAX_TITLE_LENGTH).collect();
}
// read existing, local post if any (for generating mod log)
let old_post = page.id.dereference_local(context).await;
let first_attachment = page.attachment.first();
let local_site = LocalSite::read(&mut context.pool()).await.ok();
let form = if !page.is_mod_action(context).await? {
let url = if let Some(attachment) = first_attachment.cloned() {
Some(attachment.url())
} else if page.kind == PageType::Video {
// we cant display videos directly, so insert a link to external video page
Some(page.id.inner().clone())
} else {
None
};
check_url_scheme(&url)?;
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
let url = proxy_image_link_opt_apub(url, context).await?;
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
let language_id =
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;
PostInsertForm::builder()
.name(name)
.url(url.map(Into::into))
.body(body)
.alt_text(alt_text)
.creator_id(creator.id)
.community_id(community.id)
.locked(page.comments_enabled.map(|e| !e))
.published(page.published.map(Into::into))
.updated(page.updated.map(Into::into))
.deleted(Some(false))
.nsfw(page.sensitive)
.ap_id(Some(page.id.clone().into()))
.local(Some(false))
.language_id(language_id)
.build()
let url = if let Some(attachment) = first_attachment.cloned() {
Some(attachment.url())
} else if page.kind == PageType::Video {
// we cant display videos directly, so insert a link to external video page
Some(page.id.inner().clone())
} else {
// if is mod action, only update locked/stickied fields, nothing else
PostInsertForm::builder()
.name(name)
.creator_id(creator.id)
.community_id(community.id)
.ap_id(Some(page.id.clone().into()))
.locked(page.comments_enabled.map(|e| !e))
.updated(page.updated.map(Into::into))
.build()
None
};
check_url_scheme(&url)?;
let alt_text = first_attachment.cloned().and_then(Attachment::alt_text);
let url = proxy_image_link_opt_apub(url, context).await?;
let slur_regex = &local_site_opt_to_slur_regex(&local_site);
let url_blocklist = get_url_blocklist(context).await?;
let body = read_from_string_or_source_opt(&page.content, &page.media_type, &page.source);
let body = process_markdown_opt(&body, slur_regex, &url_blocklist, context).await?;
let language_id =
LanguageTag::to_language_id_single(page.language, &mut context.pool()).await?;
let form = PostInsertForm::builder()
.name(name)
.url(url.map(Into::into))
.body(body)
.alt_text(alt_text)
.creator_id(creator.id)
.community_id(community.id)
.published(page.published.map(Into::into))
.updated(page.updated.map(Into::into))
.deleted(Some(false))
.nsfw(page.sensitive)
.ap_id(Some(page.id.clone().into()))
.local(Some(false))
.language_id(language_id)
.build();
let timestamp = page.updated.or(page.published).unwrap_or_else(naive_now);
let post = Post::insert_apub(&mut context.pool(), timestamp, &form).await?;
@ -287,16 +265,6 @@ impl Object for ApubPost {
context.reset_request_count(),
);
// write mod log entry for lock
if Page::is_locked_changed(&old_post, &page.comments_enabled) {
let form = ModLockPostForm {
mod_person_id: creator.id,
post_id: post.id,
locked: Some(post.locked),
};
ModLockPost::create(&mut context.pool(), &form).await?;
}
Ok(post.into())
}
}

View file

@ -96,4 +96,10 @@ mod tests {
test_json::<Report>("assets/mbin/activities/flag.json")?;
Ok(())
}
#[test]
fn test_parse_wordpress_activities() -> LemmyResult<()> {
test_json::<AnnounceActivity>("assets/wordpress/activities/announce.json")?;
Ok(())
}
}

View file

@ -45,7 +45,7 @@ pub struct Group {
/// username, set at account creation and usually fixed after that
pub(crate) preferred_username: String,
pub(crate) inbox: Url,
pub(crate) followers: CollectionId<ApubCommunityFollower>,
pub(crate) followers: Option<CollectionId<ApubCommunityFollower>>,
pub(crate) public_key: PublicKey,
/// title

View file

@ -22,7 +22,7 @@ pub struct Instance {
/// site name
pub(crate) name: String,
/// instance domain, necessary for mastodon authorized fetch
pub(crate) preferred_username: String,
pub(crate) preferred_username: Option<String>,
pub(crate) inbox: Url,
/// mandatory field in activitypub, lemmy currently serves an empty outbox
pub(crate) outbox: Url,

View file

@ -190,4 +190,29 @@ mod tests {
test_json::<Person>("assets/mobilizon/objects/person.json")?;
Ok(())
}
#[test]
fn test_parse_object_discourse() -> LemmyResult<()> {
test_json::<Group>("assets/discourse/objects/group.json")?;
test_json::<Page>("assets/discourse/objects/page.json")?;
test_json::<Person>("assets/discourse/objects/person.json")?;
Ok(())
}
#[test]
fn test_parse_object_nodebb() -> LemmyResult<()> {
test_json::<Group>("assets/nodebb/objects/group.json")?;
test_json::<Page>("assets/nodebb/objects/page.json")?;
test_json::<Person>("assets/nodebb/objects/person.json")?;
Ok(())
}
#[test]
fn test_parse_object_wordpress() -> LemmyResult<()> {
test_json::<Group>("assets/wordpress/objects/group.json")?;
test_json::<Page>("assets/wordpress/objects/page.json")?;
test_json::<Person>("assets/wordpress/objects/person.json")?;
test_json::<Note>("assets/wordpress/objects/note.json")?;
Ok(())
}
}

View file

@ -42,7 +42,7 @@ pub struct Page {
pub(crate) kind: PageType,
pub(crate) id: ObjectId<ApubPost>,
pub(crate) attributed_to: AttributedTo,
#[serde(deserialize_with = "deserialize_one_or_many")]
#[serde(deserialize_with = "deserialize_one_or_many", default)]
pub(crate) to: Vec<Url>,
// If there is inReplyTo field this is actually a comment and must not be parsed
#[serde(deserialize_with = "deserialize_not_present", default)]
@ -60,7 +60,6 @@ pub struct Page {
#[serde(default)]
pub(crate) attachment: Vec<Attachment>,
pub(crate) image: Option<ImageObject>,
pub(crate) comments_enabled: Option<bool>,
pub(crate) sensitive: Option<bool>,
pub(crate) published: Option<DateTime<Utc>>,
pub(crate) updated: Option<DateTime<Utc>>,
@ -156,28 +155,6 @@ pub enum HashtagType {
}
impl Page {
/// Only mods can change the post's locked status. So if it is changed from the default value,
/// it is a mod action and needs to be verified as such.
///
/// Locked needs to be false on a newly created post (verified in [[CreatePost]].
pub(crate) async fn is_mod_action(&self, context: &Data<LemmyContext>) -> LemmyResult<bool> {
let old_post = self.id.clone().dereference_local(context).await;
Ok(Page::is_locked_changed(&old_post, &self.comments_enabled))
}
pub(crate) fn is_locked_changed<E>(
old_post: &Result<ApubPost, E>,
new_comments_enabled: &Option<bool>,
) -> bool {
if let Some(new_comments_enabled) = new_comments_enabled {
if let Ok(old_post) = old_post {
return new_comments_enabled != &!old_post.locked;
}
}
false
}
pub(crate) fn creator(&self) -> LemmyResult<ObjectId<ApubPerson>> {
match &self.attributed_to {
AttributedTo::Lemmy(l) => Ok(l.clone()),
@ -233,6 +210,10 @@ impl ActivityHandler for Page {
#[async_trait::async_trait]
impl InCommunity for Page {
async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> {
if let Some(audience) = &self.audience {
return audience.dereference(context).await;
}
let community = match &self.attributed_to {
AttributedTo::Lemmy(_) => {
let mut iter = self.to.iter().merge(self.cc.iter());
@ -243,7 +224,7 @@ impl InCommunity for Page {
break c;
}
} else {
Err(LemmyErrorType::NoCommunityFoundInCc)?
Err(LemmyErrorType::CouldntFindCommunity)?;
}
}
}
@ -251,11 +232,12 @@ impl InCommunity for Page {
p.iter()
.find(|a| a.kind == PersonOrGroupType::Group)
.map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner()))
.ok_or(LemmyErrorType::PageDoesNotSpecifyGroup)?
.ok_or(LemmyErrorType::CouldntFindCommunity)?
.dereference(context)
.await?
}
};
if let Some(audience) = &self.audience {
verify_community_matches(audience, community.actor_id.clone())?;
}

View file

@ -1,5 +1,6 @@
[package]
name = "lemmy_db_perf"
publish = false
version.workspace = true
edition.workspace = true
description.workspace = true

View file

@ -5,6 +5,12 @@
-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and
-- before the automatic deletion of the row that references it. This is not a problem for insert or delete.
--
-- After a row update begins, a concurrent update on the same row can't begin until the whole
-- transaction that contains the first update is finished. To reduce this locking, statements in
-- triggers should be ordered based on the likelihood of concurrent writers. For example, updating
-- site_aggregates should be done last because the same row is updated for all local stuff. If
-- it were not last, then the locking period for concurrent writers would extend to include the
-- time consumed by statements that come after.
--
--
-- Create triggers for both post and comments
@ -38,16 +44,18 @@ BEGIN
(thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff
WHERE
a.thing_id = diff.thing_id
RETURNING
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
UPDATE
person_aggregates AS a
SET
thing_score = a.thing_score + diff.score FROM (
SELECT
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
WHERE
a.person_id = diff.creator_id;
AND (diff.upvotes, diff.downvotes) != (0, 0)
RETURNING
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
UPDATE
person_aggregates AS a
SET
thing_score = a.thing_score + diff.score FROM (
SELECT
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
WHERE
a.person_id = diff.creator_id
AND diff.score != 0;
RETURN NULL;
END;
$$);
@ -62,6 +70,21 @@ CALL r.post_or_comment ('post');
CALL r.post_or_comment ('comment');
-- Create triggers that update counts in parent aggregates
CREATE FUNCTION r.parent_comment_ids (path ltree)
RETURNS SETOF int
LANGUAGE sql
IMMUTABLE parallel safe
BEGIN
ATOMIC
SELECT
comment_id::int
FROM
string_to_table (ltree2text (path), '.') AS comment_id
-- Skip first and last
LIMIT (nlevel (path) - 2) OFFSET 1;
END;
CALL r.create_triggers ('comment', $$
BEGIN
UPDATE
@ -76,60 +99,84 @@ BEGIN
r.is_counted (comment)
GROUP BY (comment).creator_id) AS diff
WHERE
a.person_id = diff.creator_id;
a.person_id = diff.creator_id
AND diff.comment_count != 0;
UPDATE
site_aggregates AS a
comment_aggregates AS a
SET
comments = a.comments + diff.comments
child_count = a.child_count + diff.child_count
FROM (
SELECT
coalesce(sum(count_diff), 0) AS comments
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (comment)
AND (comment).local) AS diff;
parent_id,
coalesce(sum(count_diff), 0) AS child_count
FROM (
-- For each inserted or deleted comment, this outputs 1 row for each parent comment.
-- For example, this:
--
-- count_diff | (comment).path
-- ------------+----------------
-- 1 | 0.5.6.7
-- 1 | 0.5.6.7.8
--
-- becomes this:
--
-- count_diff | parent_id
-- ------------+-----------
-- 1 | 5
-- 1 | 6
-- 1 | 5
-- 1 | 6
-- 1 | 7
SELECT
count_diff,
parent_id
FROM
select_old_and_new_rows AS old_and_new_rows,
LATERAL r.parent_comment_ids ((comment).path) AS parent_id) AS expanded_old_and_new_rows
GROUP BY
parent_id) AS diff
WHERE
a.comment_id = diff.parent_id
AND diff.child_count != 0;
WITH post_diff AS (
UPDATE
post_aggregates AS a
SET
comments = a.comments + diff.comments,
newest_comment_time = GREATEST (a.newest_comment_time, (
SELECT
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id ORDER BY published DESC LIMIT 1)),
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, (
SELECT
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id
-- Ignore comments from the post's creator
AND a.creator_id != new_comment.creator_id
-- Ignore comments on old posts
AND a.published > (new_comment.published - '2 days'::interval)
ORDER BY published DESC LIMIT 1))
newest_comment_time = GREATEST (a.newest_comment_time, diff.newest_comment_time),
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)
FROM (
SELECT
(comment).post_id,
coalesce(sum(count_diff), 0) AS comments
post.id AS post_id,
coalesce(sum(count_diff), 0) AS comments,
-- Old rows are excluded using `count_diff = 1`
max((comment).published) FILTER (WHERE count_diff = 1) AS newest_comment_time,
max((comment).published) FILTER (WHERE count_diff = 1
-- Ignore comments from the post's creator
AND post.creator_id != (comment).creator_id
-- Ignore comments on old posts
AND post.published > ((comment).published - '2 days'::interval)) AS newest_comment_time_necro,
r.is_counted (post.*) AS include_in_community_aggregates
FROM
select_old_and_new_rows AS old_and_new_rows
LEFT JOIN post ON post.id = (comment).post_id
WHERE
r.is_counted (comment)
GROUP BY
(comment).post_id) AS diff
LEFT JOIN post ON post.id = diff.post_id
post.id) AS diff
WHERE
a.post_id = diff.post_id
AND (diff.comments,
GREATEST (a.newest_comment_time, diff.newest_comment_time),
GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)) != (0,
a.newest_comment_time,
a.newest_comment_time_necro)
RETURNING
a.community_id,
diff.comments,
r.is_counted (post.*) AS include_in_community_aggregates)
diff.include_in_community_aggregates)
UPDATE
community_aggregates AS a
SET
@ -145,7 +192,23 @@ FROM (
GROUP BY
community_id) AS diff
WHERE
a.community_id = diff.community_id;
a.community_id = diff.community_id
AND diff.comments != 0;
UPDATE
site_aggregates AS a
SET
comments = a.comments + diff.comments
FROM (
SELECT
coalesce(sum(count_diff), 0) AS comments
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (comment)
AND (comment).local) AS diff
WHERE
diff.comments != 0;
RETURN NULL;
@ -167,20 +230,8 @@ BEGIN
r.is_counted (post)
GROUP BY (post).creator_id) AS diff
WHERE
a.person_id = diff.creator_id;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff;
a.person_id = diff.creator_id
AND diff.post_count != 0;
UPDATE
community_aggregates AS a
@ -197,7 +248,23 @@ FROM (
GROUP BY
(post).community_id) AS diff
WHERE
a.community_id = diff.community_id;
a.community_id = diff.community_id
AND diff.posts != 0;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff
WHERE
diff.posts != 0;
RETURN NULL;
@ -217,7 +284,9 @@ BEGIN
FROM select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (community)
AND (community).local) AS diff;
AND (community).local) AS diff
WHERE
diff.communities != 0;
RETURN NULL;
@ -235,7 +304,9 @@ BEGIN
SELECT
coalesce(sum(count_diff), 0) AS users
FROM select_old_and_new_rows AS old_and_new_rows
WHERE (person).local) AS diff;
WHERE (person).local) AS diff
WHERE
diff.users != 0;
RETURN NULL;
@ -270,7 +341,8 @@ BEGIN
GROUP BY
old_post.community_id) AS diff
WHERE
a.community_id = diff.community_id;
a.community_id = diff.community_id
AND diff.comments != 0;
RETURN NULL;
END;
$$;
@ -296,7 +368,8 @@ BEGIN
LEFT JOIN community ON community.id = (community_follower).community_id
LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff
WHERE
a.community_id = diff.community_id;
a.community_id = diff.community_id
AND (diff.subscribers, diff.subscribers_local) != (0, 0);
RETURN NULL;
@ -474,3 +547,24 @@ CREATE TRIGGER delete_follow
FOR EACH ROW
EXECUTE FUNCTION r.delete_follow_before_person ();
-- Triggers that change values before insert or update
CREATE FUNCTION r.comment_change_values ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
DECLARE
id text = NEW.id::text;
BEGIN
-- Make `path` end with `id` if it doesn't already
IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN
NEW.path = NEW.path || id;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER change_values
BEFORE INSERT OR UPDATE ON comment
FOR EACH ROW
EXECUTE FUNCTION r.comment_change_values ();

View file

@ -43,20 +43,13 @@ impl LocalUserLanguage {
};
let conn = &mut get_conn(pool).await?;
conn
.build_transaction()
.run(|conn| {
Box::pin(async move {
let langs = local_user_language
.filter(local_user_id.eq(for_local_user_id))
.order(language_id)
.select(language_id)
.get_results(conn)
.await?;
convert_read_languages(conn, langs).await
}) as _
})
.await
let langs = local_user_language
.filter(local_user_id.eq(for_local_user_id))
.order(language_id)
.select(language_id)
.get_results(conn)
.await?;
convert_read_languages(conn, langs).await
}
/// Update the user's languages.
@ -90,24 +83,33 @@ impl LocalUserLanguage {
.build_transaction()
.run(|conn| {
Box::pin(async move {
use crate::schema::local_user_language::dsl::{local_user_id, local_user_language};
// Clear the current user languages
delete(local_user_language.filter(local_user_id.eq(for_local_user_id)))
.execute(conn)
.await?;
use crate::schema::local_user_language::dsl::{
language_id,
local_user_id,
local_user_language,
};
// Delete old languages, not including new languages
let delete_old = delete(local_user_language)
.filter(local_user_id.eq(for_local_user_id))
.filter(language_id.ne_all(&lang_ids))
.execute(conn);
let forms = lang_ids
.into_iter()
.map(|l| LocalUserLanguageForm {
.iter()
.map(|&l| LocalUserLanguageForm {
local_user_id: for_local_user_id,
language_id: l,
})
.collect::<Vec<_>>();
insert_into(local_user_language)
// Insert new languages
let insert_new = insert_into(local_user_language)
.values(forms)
.execute(conn)
.await?;
.on_conflict((language_id, local_user_id))
.do_nothing()
.execute(conn);
tokio::try_join!(delete_old, insert_new)?;
Ok(())
}) as _
})
@ -159,25 +161,30 @@ impl SiteLanguage {
.build_transaction()
.run(|conn| {
Box::pin(async move {
use crate::schema::site_language::dsl::{site_id, site_language};
use crate::schema::site_language::dsl::{language_id, site_id, site_language};
// Clear the current languages
delete(site_language.filter(site_id.eq(for_site_id)))
.execute(conn)
.await?;
// Delete old languages, not including new languages
let delete_old = delete(site_language)
.filter(site_id.eq(for_site_id))
.filter(language_id.ne_all(&lang_ids))
.execute(conn);
let forms = lang_ids
.into_iter()
.map(|l| SiteLanguageForm {
.iter()
.map(|&l| SiteLanguageForm {
site_id: for_site_id,
language_id: l,
})
.collect::<Vec<_>>();
insert_into(site_language)
// Insert new languages
let insert_new = insert_into(site_language)
.values(forms)
.get_result::<Self>(conn)
.await?;
.on_conflict((site_id, language_id))
.do_nothing()
.execute(conn);
tokio::try_join!(delete_old, insert_new)?;
CommunityLanguage::limit_languages(conn, instance_id).await?;
@ -278,8 +285,8 @@ impl CommunityLanguage {
}
let form = lang_ids
.into_iter()
.map(|language_id| CommunityLanguageForm {
.iter()
.map(|&language_id| CommunityLanguageForm {
community_id: for_community_id,
language_id,
})
@ -289,25 +296,25 @@ impl CommunityLanguage {
.build_transaction()
.run(|conn| {
Box::pin(async move {
use crate::schema::community_language::dsl::{community_id, community_language};
use diesel::result::DatabaseErrorKind::UniqueViolation;
// Clear the current languages
delete(community_language.filter(community_id.eq(for_community_id)))
.execute(conn)
.await?;
use crate::schema::community_language::dsl::{
community_id,
community_language,
language_id,
};
// Delete old languages, not including new languages
let delete_old = delete(community_language)
.filter(community_id.eq(for_community_id))
.filter(language_id.ne_all(&lang_ids))
.execute(conn);
let insert_res = insert_into(community_language)
// Insert new languages
let insert_new = insert_into(community_language)
.values(form)
.get_result::<Self>(conn)
.await;
.on_conflict((community_id, language_id))
.do_nothing()
.execute(conn);
if let Err(Error::DatabaseError(UniqueViolation, _info)) = insert_res {
// race condition: this function was probably called simultaneously from another caller. ignore error
// tracing::warn!("unique error: {_info:#?}");
// _info.constraint_name() should be = "community_language_community_id_language_id_key"
return Ok(());
}
insert_res?;
tokio::try_join!(delete_old, insert_new)?;
Ok(())
}) as _

View file

@ -15,12 +15,7 @@ use crate::{
utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT},
};
use chrono::{DateTime, Utc};
use diesel::{
dsl::{insert_into, sql_query},
result::Error,
ExpressionMethods,
QueryDsl,
};
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl};
use diesel_async::RunQueryDsl;
use diesel_ltree::Ltree;
use url::Url;
@ -72,81 +67,23 @@ impl Comment {
parent_path: Option<&Ltree>,
) -> Result<Comment, Error> {
let conn = &mut get_conn(pool).await?;
let comment_form = (comment_form, parent_path.map(|p| comment::path.eq(p)));
conn
.build_transaction()
.run(|conn| {
Box::pin(async move {
// Insert, to get the id
let inserted_comment = if let Some(timestamp) = timestamp {
insert_into(comment::table)
.values(comment_form)
.on_conflict(comment::ap_id)
.filter_target(coalesce(comment::updated, comment::published).lt(timestamp))
.do_update()
.set(comment_form)
.get_result::<Self>(conn)
.await?
} else {
insert_into(comment::table)
.values(comment_form)
.get_result::<Self>(conn)
.await?
};
let comment_id = inserted_comment.id;
// You need to update the ltree column
let ltree = Ltree(if let Some(parent_path) = parent_path {
// The previous parent will already have 0 in it
// Append this comment id
format!("{}.{}", parent_path.0, comment_id)
} else {
// '0' is always the first path, append to that
format!("{}.{}", 0, comment_id)
});
let updated_comment = diesel::update(comment::table.find(comment_id))
.set(comment::path.eq(ltree))
.get_result::<Self>(conn)
.await?;
// Update the child count for the parent comment_aggregates
// You could do this with a trigger, but since you have to do this manually anyway,
// you can just have it here
if let Some(parent_path) = parent_path {
// You have to update counts for all parents, not just the immediate one
// TODO if the performance of this is terrible, it might be better to do this as part of a
// scheduled query... although the counts would often be wrong.
//
// The child_count query for reference:
// select c.id, c.path, count(c2.id) as child_count from comment c
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
// group by c.id
let parent_id = parent_path.0.split('.').nth(1);
if let Some(parent_id) = parent_id {
let top_parent = format!("0.{}", parent_id);
let update_child_count_stmt = format!(
"
update comment_aggregates ca set child_count = c.child_count
from (
select c.id, c.path, count(c2.id) as child_count from comment c
join comment c2 on c2.path <@ c.path and c2.path != c.path
and c.path <@ '{top_parent}'
group by c.id
) as c
where ca.comment_id = c.id"
);
sql_query(update_child_count_stmt).execute(conn).await?;
}
}
Ok(updated_comment)
}) as _
})
.await
if let Some(timestamp) = timestamp {
insert_into(comment::table)
.values(comment_form)
.on_conflict(comment::ap_id)
.filter_target(coalesce(comment::updated, comment::published).lt(timestamp))
.do_update()
.set(comment_form)
.get_result::<Self>(conn)
.await
} else {
insert_into(comment::table)
.values(comment_form)
.get_result::<Self>(conn)
.await
}
}
pub async fn read_from_apub_id(

View file

@ -55,12 +55,17 @@ impl LocalUser {
pool: &mut DbPool<'_>,
local_user_id: LocalUserId,
form: &LocalUserUpdateForm,
) -> Result<LocalUser, Error> {
) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?;
diesel::update(local_user::table.find(local_user_id))
let res = diesel::update(local_user::table.find(local_user_id))
.set(form)
.get_result::<Self>(conn)
.await
.execute(conn)
.await;
// Diesel will throw an error if the query is all Nones (not updating anything), ignore this.
match res {
Err(Error::QueryBuilderError(_)) => Ok(0),
other => other,
}
}
pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> {

View file

@ -50,7 +50,7 @@ impl PasswordResetRequest {
) -> Result<PasswordResetRequest, Error> {
let form = PasswordResetRequestForm {
local_user_id: from_local_user_id,
token: token_,
token: token_.into(),
};
Self::create(pool, &form).await
@ -134,7 +134,7 @@ mod tests {
let expected_password_reset_request = PasswordResetRequest {
id: inserted_password_reset_request.id,
local_user_id: inserted_local_user.id,
token: token.to_string(),
token: token.to_string().into(),
published: inserted_password_reset_request.published,
};

View file

@ -84,22 +84,6 @@ impl Post {
.await
}
pub async fn list_for_community(
pool: &mut DbPool<'_>,
the_community_id: CommunityId,
) -> Result<Vec<Self>, Error> {
let conn = &mut get_conn(pool).await?;
post::table
.filter(post::community_id.eq(the_community_id))
.filter(post::deleted.eq(false))
.filter(post::removed.eq(false))
.then_order_by(post::featured_community.desc())
.then_order_by(post::published.desc())
.limit(FETCH_LIMIT_MAX)
.load::<Self>(conn)
.await
}
pub async fn list_featured_for_community(
pool: &mut DbPool<'_>,
the_community_id: CommunityId,

View file

@ -24,6 +24,7 @@ pub mod aggregates;
#[cfg(feature = "full")]
pub mod impls;
pub mod newtypes;
pub mod sensitive;
#[cfg(feature = "full")]
#[rustfmt::skip]
#[allow(clippy::wildcard_imports)]

View file

@ -178,7 +178,7 @@ diesel::table! {
icon -> Nullable<Text>,
banner -> Nullable<Text>,
#[max_length = 255]
followers_url -> Varchar,
followers_url -> Nullable<Varchar>,
#[max_length = 255]
inbox_url -> Varchar,
#[max_length = 255]

View file

@ -0,0 +1,57 @@
use serde::{Deserialize, Serialize};
use std::{fmt::Debug, ops::Deref};
#[cfg(feature = "full")]
use ts_rs::TS;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, Default)]
#[cfg_attr(feature = "full", derive(DieselNewType))]
#[serde(transparent)]
pub struct SensitiveString(String);
impl SensitiveString {
pub fn into_inner(self) -> String {
self.0
}
}
impl Debug for SensitiveString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Sensitive").finish()
}
}
impl AsRef<[u8]> for SensitiveString {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl Deref for SensitiveString {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<String> for SensitiveString {
fn from(t: String) -> Self {
SensitiveString(t)
}
}
#[cfg(feature = "full")]
impl TS for SensitiveString {
fn name() -> String {
"string".to_string()
}
fn name_with_type_args(_args: Vec<String>) -> String {
"string".to_string()
}
fn dependencies() -> Vec<ts_rs::Dependency> {
Vec::new()
}
fn transparent() -> bool {
true
}
}

View file

@ -2,6 +2,7 @@
use crate::schema::{community, community_follower, community_moderator, community_person_ban};
use crate::{
newtypes::{CommunityId, DbUrl, InstanceId, PersonId},
sensitive::SensitiveString,
source::placeholder_apub_url,
CommunityVisibility,
};
@ -39,7 +40,7 @@ pub struct Community {
/// Whether the community is local.
pub local: bool,
#[serde(skip)]
pub private_key: Option<String>,
pub private_key: Option<SensitiveString>,
#[serde(skip)]
pub public_key: String,
#[serde(skip)]
@ -49,8 +50,8 @@ pub struct Community {
/// A URL for a banner.
pub banner: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(skip))]
#[serde(skip, default = "placeholder_apub_url")]
pub followers_url: DbUrl,
#[serde(skip)]
pub followers_url: Option<DbUrl>,
#[cfg_attr(feature = "full", ts(skip))]
#[serde(skip, default = "placeholder_apub_url")]
pub inbox_url: DbUrl,

View file

@ -2,6 +2,7 @@
use crate::schema::local_user;
use crate::{
newtypes::{LocalUserId, PersonId},
sensitive::SensitiveString,
ListingType,
PostListingMode,
SortType,
@ -24,8 +25,8 @@ pub struct LocalUser {
/// The person_id for the local user.
pub person_id: PersonId,
#[serde(skip)]
pub password_encrypted: String,
pub email: Option<String>,
pub password_encrypted: SensitiveString,
pub email: Option<SensitiveString>,
/// Whether to show NSFW content.
pub show_nsfw: bool,
pub theme: String,
@ -47,7 +48,7 @@ pub struct LocalUser {
/// Whether their registration application has been accepted.
pub accepted_application: bool,
#[serde(skip)]
pub totp_2fa_secret: Option<String>,
pub totp_2fa_secret: Option<SensitiveString>,
/// Open links in a new tab.
pub open_links_in_new_tab: bool,
pub blur_nsfw: bool,

View file

@ -1,6 +1,6 @@
use crate::newtypes::LocalUserId;
#[cfg(feature = "full")]
use crate::schema::login_token;
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
@ -18,7 +18,7 @@ use ts_rs::TS;
pub struct LoginToken {
/// Jwt token for this login
#[serde(skip)]
pub token: String,
pub token: SensitiveString,
pub user_id: LocalUserId,
/// Time of login
pub published: DateTime<Utc>,
@ -31,7 +31,7 @@ pub struct LoginToken {
#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = login_token))]
pub struct LoginTokenCreateForm {
pub token: String,
pub token: SensitiveString,
pub user_id: LocalUserId,
pub ip: Option<String>,
pub user_agent: Option<String>,

View file

@ -1,6 +1,6 @@
use crate::newtypes::LocalUserId;
#[cfg(feature = "full")]
use crate::schema::password_reset_request;
use crate::{newtypes::LocalUserId, sensitive::SensitiveString};
use chrono::{DateTime, Utc};
#[derive(PartialEq, Eq, Debug)]
@ -9,7 +9,7 @@ use chrono::{DateTime, Utc};
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
pub struct PasswordResetRequest {
pub id: i32,
pub token: String,
pub token: SensitiveString,
pub published: DateTime<Utc>,
pub local_user_id: LocalUserId,
}
@ -18,5 +18,5 @@ pub struct PasswordResetRequest {
#[cfg_attr(feature = "full", diesel(table_name = password_reset_request))]
pub struct PasswordResetRequestForm {
pub local_user_id: LocalUserId,
pub token: String,
pub token: SensitiveString,
}

View file

@ -2,6 +2,7 @@
use crate::schema::{person, person_follower};
use crate::{
newtypes::{DbUrl, InstanceId, PersonId},
sensitive::SensitiveString,
source::placeholder_apub_url,
};
use chrono::{DateTime, Utc};
@ -36,7 +37,7 @@ pub struct Person {
/// Whether the person is local to our site.
pub local: bool,
#[serde(skip)]
pub private_key: Option<String>,
pub private_key: Option<SensitiveString>,
#[serde(skip)]
pub public_key: String,
#[serde(skip)]

View file

@ -1,5 +1,6 @@
#[cfg(feature = "full")]
use crate::schema::secret;
use crate::sensitive::SensitiveString;
#[derive(Clone)]
#[cfg_attr(feature = "full", derive(Queryable, Selectable, Identifiable))]
@ -7,5 +8,5 @@ use crate::schema::secret;
#[cfg_attr(feature = "full", diesel(check_for_backend(diesel::pg::Pg)))]
pub struct Secret {
pub id: i32,
pub jwt_secret: String,
pub jwt_secret: SensitiveString,
}

View file

@ -1,6 +1,9 @@
use crate::newtypes::{DbUrl, InstanceId, SiteId};
#[cfg(feature = "full")]
use crate::schema::site;
use crate::{
newtypes::{DbUrl, InstanceId, SiteId},
sensitive::SensitiveString,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
@ -35,7 +38,7 @@ pub struct Site {
/// The site inbox
pub inbox_url: DbUrl,
#[serde(skip)]
pub private_key: Option<String>,
pub private_key: Option<SensitiveString>,
// TODO: mark as `serde(skip)` in next major release as its not needed for api
pub public_key: String,
pub instance_id: InstanceId,

View file

@ -33,13 +33,22 @@ use lemmy_utils::{
use once_cell::sync::Lazy;
use regex::Regex;
use rustls::{
client::{ServerCertVerified, ServerCertVerifier},
ServerName,
client::danger::{
DangerousClientConfigBuilder,
HandshakeSignatureValid,
ServerCertVerified,
ServerCertVerifier,
},
crypto::{self, verify_tls12_signature, verify_tls13_signature},
pki_types::{CertificateDer, ServerName, UnixTime},
ClientConfig,
DigitallySignedStruct,
SignatureScheme,
};
use std::{
ops::{Deref, DerefMut},
sync::Arc,
time::{Duration, SystemTime},
time::Duration,
};
use tracing::error;
use url::Url;
@ -312,10 +321,11 @@ pub fn diesel_option_overwrite_to_url_create(opt: &Option<String>) -> LemmyResul
fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConnection>> {
let fut = async {
let rustls_config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
.with_no_client_auth();
let rustls_config = DangerousClientConfigBuilder {
cfg: ClientConfig::builder(),
}
.with_custom_certificate_verifier(Arc::new(NoCertVerifier {}))
.with_no_client_auth();
let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config);
let (client, conn) = tokio_postgres::connect(config, tls)
@ -338,21 +348,55 @@ fn establish_connection(config: &str) -> BoxFuture<ConnectionResult<AsyncPgConne
fut.boxed()
}
#[derive(Debug)]
struct NoCertVerifier {}
impl ServerCertVerifier for NoCertVerifier {
fn verify_server_cert(
&self,
_end_entity: &rustls::Certificate,
_intermediates: &[rustls::Certificate],
_end_entity: &CertificateDer,
_intermediates: &[CertificateDer],
_server_name: &ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>,
_ocsp_response: &[u8],
_now: SystemTime,
_ocsp: &[u8],
_now: UnixTime,
) -> Result<ServerCertVerified, rustls::Error> {
// Will verify all (even invalid) certs without any checks (sslmode=require)
Ok(ServerCertVerified::assertion())
}
fn verify_tls12_signature(
&self,
message: &[u8],
cert: &CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
verify_tls12_signature(
message,
cert,
dss,
&crypto::ring::default_provider().signature_verification_algorithms,
)
}
fn verify_tls13_signature(
&self,
message: &[u8],
cert: &CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
verify_tls13_signature(
message,
cert,
dss,
&crypto::ring::default_provider().signature_verification_algorithms,
)
}
fn supported_verify_schemes(&self) -> Vec<SignatureScheme> {
crypto::ring::default_provider()
.signature_verification_algorithms
.supported_schemes()
}
}
pub async fn build_db_pool() -> LemmyResult<ActualDbPool> {

View file

@ -220,8 +220,7 @@ fn queries<'a>() -> Queries<
query = query.filter(
comment::content
.ilike(fuzzy_search(&search_term))
.and(comment::removed.eq(false))
.and(comment::deleted.eq(false)),
.and(not(comment::removed.or(comment::deleted))),
);
};
@ -265,10 +264,13 @@ fn queries<'a>() -> Queries<
.then_order_by(is_saved(person_id_join).desc());
}
if options.liked_only {
query = query.filter(score(person_id_join).eq(1));
} else if options.disliked_only {
query = query.filter(score(person_id_join).eq(-1));
if let Some(my_id) = my_person_id {
let not_creator_filter = comment::creator_id.ne(my_id);
if options.liked_only {
query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
} else if options.disliked_only {
query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
}
}
if !options
@ -683,8 +685,10 @@ mod tests {
.await?;
assert_eq!(
expected_comment_view_no_person,
read_comment_views_no_person[0]
&expected_comment_view_no_person,
read_comment_views_no_person
.first()
.ok_or(LemmyErrorType::CouldntFindComment)?
);
let read_comment_views_with_person = CommentQuery {
@ -715,18 +719,45 @@ mod tests {
// Make sure block set the creator blocked
assert!(read_comment_from_blocked_person.creator_blocked);
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn test_liked_only() -> LemmyResult<()> {
let pool = &build_db_pool_for_tests().await;
let pool = &mut pool.into();
let data = init_data(pool).await;
// Unblock sara first
let timmy_unblocks_sara_form = PersonBlockForm {
person_id: data.timmy_local_user_view.person.id,
target_id: data.inserted_sara_person.id,
};
PersonBlock::unblock(pool, &timmy_unblocks_sara_form).await?;
// Like a new comment
let comment_like_form = CommentLikeForm {
comment_id: data.inserted_comment_1.id,
post_id: data.inserted_post.id,
person_id: data.timmy_local_user_view.person.id,
score: 1,
};
CommentLike::like(pool, &comment_like_form).await.unwrap();
let read_liked_comment_views = CommentQuery {
local_user: (Some(&data.timmy_local_user_view)),
liked_only: (true),
..Default::default()
}
.list(pool)
.await?;
.await?
.into_iter()
.map(|c| c.comment.content)
.collect::<Vec<String>>();
assert_eq!(
expected_comment_view_with_person,
read_liked_comment_views[0]
);
// Shouldn't include your own post, only other peoples
assert_eq!(data.inserted_comment_1.content, read_liked_comment_views[0]);
assert_length!(1, read_liked_comment_views);
@ -836,7 +867,7 @@ mod tests {
// change user lang to finnish, should only show one post in finnish and one undetermined
let finnish_id = Language::read_id_from_code(pool, Some("fi"))
.await?
.unwrap();
.ok_or(LemmyErrorType::LanguageNotAllowed)?;
LocalUserLanguage::update(
pool,
vec![finnish_id],
@ -856,7 +887,10 @@ mod tests {
assert!(finnish_comment.is_some());
assert_eq!(
data.inserted_comment_2.content,
finnish_comment.unwrap().comment.content
finnish_comment
.ok_or(LemmyErrorType::CouldntFindComment)?
.comment
.content
);
// now show all comments with undetermined language (which is the default value)

View file

@ -396,11 +396,13 @@ fn queries<'a>() -> Queries<
if let Some(search_term) = &options.search_term {
let searcher = fuzzy_search(search_term);
query = query.filter(
post::name
.ilike(searcher.clone())
.or(post::body.ilike(searcher)),
);
query = query
.filter(
post::name
.ilike(searcher.clone())
.or(post::body.ilike(searcher)),
)
.filter(not(post::removed.or(post::deleted)));
}
// If there is a content warning, show nsfw content by default.
@ -450,11 +452,12 @@ fn queries<'a>() -> Queries<
}
}
if let Some(person_id) = my_person_id {
if let Some(my_id) = my_person_id {
let not_creator_filter = post_aggregates::creator_id.ne(my_id);
if options.liked_only {
query = query.filter(score(person_id).eq(1));
query = query.filter(not_creator_filter).filter(score(my_id).eq(1));
} else if options.disliked_only {
query = query.filter(score(person_id).eq(-1));
query = query.filter(not_creator_filter).filter(score(my_id).eq(-1));
}
};
@ -947,9 +950,8 @@ mod tests {
show_bot_accounts: Some(false),
..Default::default()
};
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id),
@ -983,9 +985,8 @@ mod tests {
show_bot_accounts: Some(true),
..Default::default()
};
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_bot_accounts = true;
let post_listings_with_bots = PostQuery {
community_id: Some(data.inserted_community.id),
@ -1107,9 +1108,8 @@ mod tests {
show_bot_accounts: Some(false),
..Default::default()
};
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_bot_accounts = false;
let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id),
@ -1119,6 +1119,36 @@ mod tests {
.await?;
assert_eq!(vec![expected_post_with_upvote], read_post_listing);
let like_removed =
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
assert_eq!(1, like_removed);
cleanup(data, pool).await
}
#[tokio::test]
#[serial]
async fn post_listing_liked_only() -> LemmyResult<()> {
let pool = &build_db_pool().await?;
let pool = &mut pool.into();
let data = init_data(pool).await?;
// Like both the bot post, and your own
// The liked_only should not show your own post
let post_like_form = PostLikeForm {
post_id: data.inserted_post.id,
person_id: data.local_user_view.person.id,
score: 1,
};
PostLike::like(pool, &post_like_form).await?;
let bot_post_like_form = PostLikeForm {
post_id: data.inserted_bot_post.id,
person_id: data.local_user_view.person.id,
score: 1,
};
PostLike::like(pool, &bot_post_like_form).await?;
// Read the liked only
let read_liked_post_listing = PostQuery {
community_id: Some(data.inserted_community.id),
liked_only: true,
@ -1126,7 +1156,9 @@ mod tests {
}
.list(&data.site, pool)
.await?;
assert_eq!(read_post_listing, read_liked_post_listing);
// This should only include the bot post, not the one you created
assert_eq!(vec![POST_BY_BOT], names(&read_liked_post_listing));
let read_disliked_post_listing = PostQuery {
community_id: Some(data.inserted_community.id),
@ -1135,11 +1167,10 @@ mod tests {
}
.list(&data.site, pool)
.await?;
// Should be no posts
assert_eq!(read_disliked_post_listing, vec![]);
let like_removed =
PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id).await?;
assert_eq!(1, like_removed);
cleanup(data, pool).await
}
@ -1499,9 +1530,8 @@ mod tests {
show_read_posts: Some(false),
..Default::default()
};
let inserted_local_user =
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user = inserted_local_user;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
data.local_user_view.local_user.show_read_posts = false;
// Mark a post as read
PostRead::mark_as_read(
@ -1552,7 +1582,7 @@ mod tests {
assert!(
&post_listings_show_hidden
.first()
.expect("first post should exist")
.ok_or(LemmyErrorType::CouldntFindPost)?
.hidden
);

View file

@ -43,21 +43,18 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
.map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?
.ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?;
let protocols = if site_view.local_site.federation_enabled {
Some(vec!["activitypub".to_string()])
} else {
None
};
// Since there are 3 registration options,
// we need to set open_registrations as true if RegistrationMode is not Closed.
let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed);
let json = NodeInfo {
version: Some("2.0".to_string()),
version: Some("2.1".to_string()),
software: Some(NodeInfoSoftware {
name: Some("lemmy".to_string()),
version: Some(VERSION.to_string()),
repository: Some("https://github.com/LemmyNet/lemmy".to_string()),
homepage: Some("https://join-lemmy.org/".to_string()),
}),
protocols,
protocols: Some(vec!["activitypub".to_string()]),
usage: Some(NodeInfoUsage {
users: Some(NodeInfoUsers {
total: Some(site_view.counts.users),
@ -68,6 +65,11 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
local_comments: Some(site_view.counts.comments),
}),
open_registrations,
services: Some(NodeInfoServices {
inbound: Some(vec![]),
outbound: Some(vec![]),
}),
metadata: Some(vec![]),
};
Ok(HttpResponse::Ok().json(json))
@ -84,6 +86,7 @@ struct NodeInfoWellKnownLinks {
pub href: Url,
}
/// Nodeinfo spec: http://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct NodeInfo {
@ -92,6 +95,9 @@ pub struct NodeInfo {
pub protocols: Option<Vec<String>>,
pub usage: Option<NodeInfoUsage>,
pub open_registrations: Option<bool>,
/// These fields are required by the spec for no reason
pub services: Option<NodeInfoServices>,
pub metadata: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
@ -99,6 +105,8 @@ pub struct NodeInfo {
pub struct NodeInfoSoftware {
pub name: Option<String>,
pub version: Option<String>,
pub repository: Option<String>,
pub homepage: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
@ -116,3 +124,10 @@ pub struct NodeInfoUsers {
pub active_halfyear: Option<i64>,
pub active_month: Option<i64>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct NodeInfoServices {
pub inbound: Option<Vec<String>>,
pub outbound: Option<Vec<String>>,
}

View file

@ -78,7 +78,7 @@ openssl = { version = "0.10.64", optional = true }
html2text = { version = "0.6.0", optional = true }
deser-hjson = { version = "2.2.4", optional = true }
smart-default = { version = "0.7.1", optional = true }
lettre = { version = "0.11.6", features = [
lettre = { version = "0.11.7", features = [
"tokio1",
"tokio1-native-tls",
], optional = true }

View file

@ -99,8 +99,6 @@ pub enum LemmyErrorType {
PersonIsBannedFromSite(String),
InvalidVoteValue,
PageDoesNotSpecifyCreator,
PageDoesNotSpecifyGroup,
NoCommunityFoundInCc,
NoEmailSetup,
LocalSiteNotSetup,
EmailSmtpServerNeedsAPort,
@ -176,6 +174,8 @@ pub enum LemmyErrorType {
InvalidUnixTime,
InvalidBotAction,
CantBlockLocalInstance,
UrlWithoutDomain,
InboxTimeout,
Unknown(String),
}

View file

@ -19,7 +19,7 @@ const ALLOWED_POST_URL_SCHEMES: [&str; 3] = ["http", "https", "magnet"];
const BODY_MAX_LENGTH: usize = 10000;
const POST_BODY_MAX_LENGTH: usize = 50000;
const BIO_MAX_LENGTH: usize = 300;
const ALT_TEXT_MAX_LENGTH: usize = 300;
const ALT_TEXT_MAX_LENGTH: usize = 1500;
const SITE_NAME_MAX_LENGTH: usize = 20;
const SITE_NAME_MIN_LENGTH: usize = 1;
const SITE_DESCRIPTION_MAX_LENGTH: usize = 150;

@ -1 +1 @@
Subproject commit 866e4056656755f7b31e20094b46391e6931e3e7
Subproject commit f0ab81deea347c433277a90ae752b10f68473719

View file

@ -114,6 +114,8 @@ services:
"-c",
"auto_explain.log_analyze=true",
"-c",
"auto_explain.log_triggers=true",
"-c",
"track_activity_query_size=1048576",
]
ports:

View file

@ -0,0 +1,3 @@
ALTER TABLE community
ALTER COLUMN followers_url SET NOT NULL;

View file

@ -0,0 +1,3 @@
ALTER TABLE community
ALTER COLUMN followers_url DROP NOT NULL;

View file

@ -0,0 +1,3 @@
SELECT
1;

View file

@ -0,0 +1,4 @@
-- This migration exists to trigger re-execution of replaceable_schema
SELECT
1;

View file

@ -4,39 +4,39 @@ set -e
echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete."
echo "Stopping lemmy and all services..."
sudo docker-compose stop
sudo docker compose stop
echo "Make sure postgres is started..."
sudo docker-compose up -d postgres
sudo docker compose up -d postgres
echo "Waiting..."
sleep 20s
echo "Exporting the Database to 15_16.dump.sql ..."
sudo docker-compose exec -T postgres pg_dumpall -c -U lemmy > 15_16_dump.sql
sudo docker compose exec -T postgres pg_dumpall -c -U lemmy | sudo tee 15_16_dump.sql > /dev/null
echo "Done."
echo "Stopping postgres..."
sudo docker-compose stop postgres
sudo docker compose stop postgres
echo "Waiting..."
sleep 20s
echo "Removing the old postgres folder"
sudo rm -rf volumes/postgres
echo "Updating docker-compose to use postgres version 16."
sed -i "s/image: postgres:.*/image: postgres:16-alpine/" ./docker-compose.yml
echo "Updating docker compose to use postgres version 16."
sudo sed -i "s/image: .*postgres:.*/image: docker.io/postgres:16-alpine/" ./docker-compose.yml
echo "Starting up new postgres..."
sudo docker-compose up -d postgres
sudo docker compose up -d postgres
echo "Waiting..."
sleep 20s
echo "Importing the database...."
cat 15_16_dump.sql | sudo docker-compose exec -T postgres psql -U lemmy
sudo cat 15_16_dump.sql | sudo docker compose exec -T postgres psql -U lemmy
echo "Done."
echo "Starting up lemmy..."
sudo docker-compose up -d
sudo docker compose up -d
echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly."
echo "Upgrade complete."

View file

@ -262,12 +262,22 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
// User
.service(
// Account action, I don't like that it's in /user maybe /accounts
// Handle /user/register separately to add the register() rate limitter
// Handle /user/register separately to add the register() rate limiter
web::resource("/user/register")
.guard(guard::Post())
.wrap(rate_limit.register())
.route(web::post().to(register)),
)
// User
.service(
// Handle /user/login separately to add the register() rate limiter
// TODO: pretty annoying way to apply rate limits for register and login, we should
// group them under a common path so that rate limit is only applied once (eg under /account).
web::resource("/user/login")
.guard(guard::Post())
.wrap(rate_limit.register())
.route(web::post().to(login)),
)
.service(
// Handle captcha separately
web::resource("/user/get_captcha")
@ -306,7 +316,6 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
.route("/banned", web::get().to(list_banned_users))
.route("/block", web::post().to(block_person))
// TODO Account actions. I don't like that they're in /user maybe /accounts
.route("/login", web::post().to(login))
.route("/logout", web::post().to(logout))
.route("/delete_account", web::post().to(delete_account))
.route("/password_reset", web::post().to(reset_password))

View file

@ -160,10 +160,10 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
rate_limit_cell.clone(),
);
if !args.disable_scheduled_tasks {
let scheduled_tasks = (!args.disable_scheduled_tasks).then(|| {
// Schedules various cleanup tasks for the DB
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone()));
}
tokio::task::spawn(scheduled_tasks::setup(context.clone()))
});
if let Some(prometheus) = SETTINGS.prometheus.clone() {
serve_prometheus(prometheus, context.clone())?;
@ -218,7 +218,7 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
if server.is_some() || federate.is_some() {
if server.is_some() || federate.is_some() || scheduled_tasks.is_some() {
tokio::select! {
_ = tokio::signal::ctrl_c() => {
tracing::warn!("Received ctrl-c, shutting down gracefully...");