Merge branch 'release/2.5.2' into 'stable'

Security release 2.5.2

See merge request pleroma/pleroma!3863
This commit is contained in:
Haelwenn 2023-05-26 19:35:31 +00:00
commit 2d193861db
33 changed files with 412 additions and 193 deletions

View file

@ -14,6 +14,26 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### Removed
## 2.5.2
### Security
- `/proxy` endpoint now sets a Content-Security-Policy (sandbox)
- WebSocket endpoint now respects unauthenticated restrictions for streams of public posts
- OEmbed HTML tags are now filtered
### Changed
- docs: Be more explicit about the level of compatibility of OTP releases
- Set default background worker timeout to 15 minutes
### Fixed
- Atom/RSS formatting (HTML truncation, published, missing summary)
- Remove `static_fe` pipeline for `/users/:nickname/feed`
- Stop oban from retrying if validating errors occur when processing incoming data
- Make sure object refetching as used by already received polls follows MRF rules
### Removed
- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
## 2.5.1
### Added

1
changelog.d/3126.fix Normal file
View file

@ -0,0 +1 @@
MediaProxy responses now return a sandbox CSP header

1
changelog.d/3883.fix Normal file
View file

@ -0,0 +1 @@
Fix abnormal behaviour when refetching a poll

1
changelog.d/3891.fix Normal file
View file

@ -0,0 +1 @@
OEmbed HTML tags are now filtered

View file

@ -0,0 +1 @@
Correctly handle the situation when a poll has both "anyOf" and "oneOf" but one of them being empty

View file

@ -2,15 +2,16 @@
{! backend/installation/otp_vs_from_source.include !}
This guide covers a installation using an OTP release. To install Pleroma from source, please check out the corresponding guide for your distro.
This guide covers a installation using OTP releases as built by the Pleroma project, it is meant as a fallback to distribution packages/recipes which are the preferred installation method.
To install Pleroma from source, please check out the corresponding guide for your distro.
## Pre-requisites
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
* A machine you have root access to running Debian GNU/Linux or compatible (eg. Ubuntu), or Alpine on `x86_64`, `aarch64` or `armv7l` CPU. If you are not sure what you are running see [Detecting flavour section](#detecting-flavour) below
* A (sub)domain pointed to the machine
You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo su`/`su`.
You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo -i`/`su`.
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu and Alpine.
Similarly to other binaries, OTP releases tend to be only compatible with the distro they are built on, as such this guide focuses only on Debian/Ubuntu and Alpine.
### Detecting flavour
@ -19,7 +20,7 @@ Paste the following into the shell:
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
```
If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source.
This should give your flavour string. If not this just means that we don't build releases for your platform, you can still try installing from source.
### Installing the required packages

View file

@ -8,77 +8,30 @@ defmodule Pleroma.Object.Fetcher do
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
require Logger
require Pleroma.Constants
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
has_history? = fn
%{"formerRepresentations" => %{"orderedItems" => list}} when is_list(list) -> true
_ -> false
end
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
remote_history_exists? = has_history?.(new_data)
# If the remote history exists, we treat that as the only source of truth.
new_data =
if has_history?.(old_data) and not remote_history_exists? do
Map.put(new_data, "formerRepresentations", old_data["formerRepresentations"])
else
new_data
end
# If the remote does not have history information, we need to manage it ourselves
new_data =
if not remote_history_exists? do
changed? =
Pleroma.Constants.status_updatable_fields()
|> Enum.any?(fn field -> Map.get(old_data, field) != Map.get(new_data, field) end)
%{updated_object: updated_object} =
new_data
|> Object.Updater.maybe_update_history(old_data,
updated: changed?,
use_history_in_new_object?: false
)
updated_object
else
new_data
end
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
defp reinject_object(%Object{data: %{}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
{:ok, new_data} <- MRF.filter(new_data),
{:ok, new_object, _} <-
Object.Updater.do_update_and_invalidate_cache(
object,
new_data,
_touch_changeset? = true
) do
{:ok, new_object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
@ -86,20 +39,11 @@ defmodule Pleroma.Object.Fetcher do
end
end
defp reinject_object(%Object{} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
defp reinject_object(_, new_data) do
with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
e -> e
end
end

View file

@ -5,6 +5,9 @@
defmodule Pleroma.Object.Updater do
require Pleroma.Constants
alias Pleroma.Object
alias Pleroma.Repo
def update_content_fields(orig_object_data, updated_object) do
Pleroma.Constants.status_updatable_fields()
|> Enum.reduce(
@ -97,12 +100,14 @@ defmodule Pleroma.Object.Updater do
end
defp maybe_update_poll(to_be_updated, updated_object) do
choice_key = fn data ->
if Map.has_key?(data, "anyOf"), do: "anyOf", else: "oneOf"
choice_key = fn
%{"anyOf" => [_ | _]} -> "anyOf"
%{"oneOf" => [_ | _]} -> "oneOf"
_ -> nil
end
with true <- to_be_updated["type"] == "Question",
key <- choice_key.(updated_object),
key when not is_nil(key) <- choice_key.(updated_object),
true <- key == choice_key.(to_be_updated),
orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])),
new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])),
@ -237,4 +242,49 @@ defmodule Pleroma.Object.Updater do
{:history_items, e} -> e
end
end
defp maybe_touch_changeset(changeset, true) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_touch_changeset(changeset, _), do: changeset
def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
orig_object_ap_id = updated_object["id"]
orig_object_data = orig_object.data
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
|> maybe_touch_changeset(touch_changeset?)
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
{:ok, new_object, updated}
end
end
end

View file

@ -428,37 +428,13 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
end
if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = Object.Updater.make_new_object_data_from_update_object(orig_object_data, updated_object)
{:ok, _, updated} =
Object.Updater.do_update_and_invalidate_cache(orig_object, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
if updated do
object
|> Activity.normalize()
|> ActivityPub.notify_and_stream()
end
if updated do
object
|> Activity.normalize()
|> ActivityPub.notify_and_stream()
end
end

View file

@ -6,7 +6,6 @@ defmodule Pleroma.Web.Feed.FeedView do
use Phoenix.HTML
use Pleroma.Web, :view
alias Pleroma.Formatter
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.Gettext
@ -72,7 +71,9 @@ defmodule Pleroma.Web.Feed.FeedView do
def last_activity(activities), do: List.last(activities)
def activity_title(%{"content" => content, "summary" => summary} = data, opts \\ %{}) do
def activity_title(%{"content" => content} = data, opts \\ %{}) do
summary = Map.get(data, "summary", "")
title =
cond do
summary != "" -> summary
@ -81,9 +82,8 @@ defmodule Pleroma.Web.Feed.FeedView do
end
title
|> Pleroma.Web.Metadata.Utils.scrub_html()
|> Pleroma.Emoji.Formatter.demojify()
|> Formatter.truncate(opts[:max_length], opts[:omission])
|> Pleroma.Web.Metadata.Utils.scrub_html_and_truncate(opts[:max_length], opts[:omission])
|> HtmlEntities.encode()
end
def activity_description(data) do

View file

@ -12,6 +12,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
alias Pleroma.Web.MediaProxy
alias Plug.Conn
plug(:sandbox)
def remote(conn, %{"sig" => sig64, "url" => url64}) do
with {_, true} <- {:enabled, MediaProxy.enabled?()},
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
@ -202,4 +204,9 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
defp media_proxy_opts do
Config.get([:media_proxy, :proxy_opts], [])
end
defp sandbox(conn, _params) do
conn
|> merge_resp_headers([{"content-security-policy", "sandbox;"}])
end
end

View file

@ -30,12 +30,13 @@ defmodule Pleroma.Web.Metadata.Utils do
|> scrub_html_and_truncate_object_field(object)
end
def scrub_html_and_truncate(content, max_length \\ 200) when is_binary(content) do
def scrub_html_and_truncate(content, max_length \\ 200, omission \\ "...")
when is_binary(content) do
content
|> scrub_html
|> Emoji.Formatter.demojify()
|> HtmlEntities.decode()
|> Formatter.truncate(max_length)
|> Formatter.truncate(max_length, omission)
end
def scrub_html(content) when is_binary(content) do

View file

@ -38,10 +38,6 @@ defmodule Pleroma.Web.Plugs.AuthenticationPlug do
def call(conn, _), do: conn
def checkpw(password, "$6" <> _ = password_hash) do
:crypt.crypt(password, password_hash) == password_hash
end
def checkpw(password, "$2" <> _ = password_hash) do
# Handle bcrypt passwords for Mastodon migration
Bcrypt.verify_pass(password, password_hash)
@ -60,10 +56,6 @@ defmodule Pleroma.Web.Plugs.AuthenticationPlug do
do_update_password(user, password)
end
def maybe_update_password(%User{password_hash: "$6" <> _} = user, password) do
do_update_password(user, password)
end
def maybe_update_password(user, _), do: {:ok, user}
defp do_update_password(user, password) do

View file

@ -6,8 +6,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.OEmbed do
def parse(html, _data) do
with elements = [_ | _] <- get_discovery_data(html),
oembed_url when is_binary(oembed_url) <- get_oembed_url(elements),
{:ok, oembed_data} <- get_oembed_data(oembed_url) do
oembed_data
{:ok, oembed_data = %{"html" => html}} <- get_oembed_data(oembed_url) do
%{oembed_data | "html" => Pleroma.HTML.filter_tags(html)}
else
_e -> %{}
end

View file

@ -835,8 +835,7 @@ defmodule Pleroma.Web.Router do
end
scope "/", Pleroma.Web do
# Note: html format is supported only if static FE is enabled
pipe_through([:accepts_html_xml, :static_fe])
pipe_through([:accepts_html_xml])
get("/users/:nickname/feed", Feed.UserController, :feed, as: :user_feed)
end

View file

@ -25,6 +25,7 @@ defmodule Pleroma.Web.Streamer do
def registry, do: @registry
@public_streams ["public", "public:local", "public:media", "public:local:media"]
@local_streams ["public:local", "public:local:media"]
@user_streams ["user", "user:notification", "direct", "user:pleroma_chat"]
@doc "Expands and authorizes a stream, and registers the process for streaming."
@ -41,14 +42,37 @@ defmodule Pleroma.Web.Streamer do
end
end
defp can_access_stream(user, oauth_token, kind) do
with {_, true} <- {:restrict?, Config.restrict_unauthenticated_access?(:timelines, kind)},
{_, %User{id: user_id}, %Token{user_id: user_id}} <- {:user, user, oauth_token},
{_, true} <-
{:scopes,
OAuthScopesPlug.filter_descendants(["read:statuses"], oauth_token.scopes) != []} do
true
else
{:restrict?, _} ->
true
_ ->
false
end
end
@doc "Expand and authorizes a stream"
@spec get_topic(stream :: String.t(), User.t() | nil, Token.t() | nil, Map.t()) ::
{:ok, topic :: String.t()} | {:error, :bad_topic}
def get_topic(stream, user, oauth_token, params \\ %{})
# Allow all public steams.
def get_topic(stream, _user, _oauth_token, _params) when stream in @public_streams do
{:ok, stream}
# Allow all public steams if the instance allows unauthenticated access.
# Otherwise, only allow users with valid oauth tokens.
def get_topic(stream, user, oauth_token, _params) when stream in @public_streams do
kind = if stream in @local_streams, do: :local, else: :federated
if can_access_stream(user, oauth_token, kind) do
{:ok, stream}
else
{:error, :unauthorized}
end
end
# Allow all hashtags streams.
@ -57,12 +81,20 @@ defmodule Pleroma.Web.Streamer do
end
# Allow remote instance streams.
def get_topic("public:remote", _user, _oauth_token, %{"instance" => instance} = _params) do
{:ok, "public:remote:" <> instance}
def get_topic("public:remote", user, oauth_token, %{"instance" => instance} = _params) do
if can_access_stream(user, oauth_token, :federated) do
{:ok, "public:remote:" <> instance}
else
{:error, :unauthorized}
end
end
def get_topic("public:remote:media", _user, _oauth_token, %{"instance" => instance} = _params) do
{:ok, "public:remote:media:" <> instance}
def get_topic("public:remote:media", user, oauth_token, %{"instance" => instance} = _params) do
if can_access_stream(user, oauth_token, :federated) do
{:ok, "public:remote:media:" <> instance}
else
{:error, :unauthorized}
end
end
# Expand user streams.

View file

@ -4,8 +4,8 @@
<id><%= @data["id"] %></id>
<title><%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %></title>
<content type="html"><%= activity_description(@data) %></content>
<published><%= to_rfc3339(@activity.data["published"]) %></published>
<updated><%= to_rfc3339(@activity.data["published"]) %></updated>
<published><%= to_rfc3339(@data["published"]) %></published>
<updated><%= to_rfc3339(@data["published"]) %></updated>
<ostatus:conversation ref="<%= activity_context(@activity) %>">
<%= activity_context(@activity) %>
</ostatus:conversation>

View file

@ -4,7 +4,7 @@
<guid><%= @data["id"] %></guid>
<title><%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %></title>
<description><%= activity_description(@data) %></description>
<pubDate><%= to_rfc2822(@activity.data["published"]) %></pubDate>
<pubDate><%= to_rfc2822(@data["published"]) %></pubDate>
<ostatus:conversation ref="<%= activity_context(@activity) %>">
<%= activity_context(@activity) %>
</ostatus:conversation>

View file

@ -7,8 +7,8 @@
<id><%= @data["id"] %></id>
<title><%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %></title>
<content type="html"><%= activity_description(@data) %></content>
<published><%= to_rfc3339(@activity.data["published"]) %></published>
<updated><%= to_rfc3339(@activity.data["published"]) %></updated>
<published><%= to_rfc3339(@data["published"]) %></published>
<updated><%= to_rfc3339(@data["published"]) %></updated>
<ostatus:conversation ref="<%= activity_context(@activity) %>">
<%= activity_context(@activity) %>
</ostatus:conversation>

View file

@ -4,7 +4,7 @@
<guid isPermalink="true"><%= activity_context(@activity) %></guid>
<link><%= activity_context(@activity) %></link>
<pubDate><%= to_rfc2822(@activity.data["published"]) %></pubDate>
<pubDate><%= to_rfc2822(@data["published"]) %></pubDate>
<description><%= activity_description(@data) %></description>
<%= for attachment <- @data["attachment"] || [] do %>

View file

@ -45,5 +45,5 @@ defmodule Pleroma.Workers.BackgroundWorker do
end
@impl Oban.Worker
def timeout(_job), do: :timer.seconds(5)
def timeout(_job), do: :timer.seconds(900)
end

View file

@ -13,6 +13,9 @@ defmodule Pleroma.Workers.ReceiverWorker do
{:ok, res}
else
{:error, :origin_containment_failed} -> {:cancel, :origin_containment_failed}
{:error, :already_present} -> {:cancel, :already_present}
{:error, {:validate_object, reason}} -> {:cancel, reason}
{:error, {:error, {:validate, reason}}} -> {:cancel, reason}
{:error, {:reject, reason}} -> {:cancel, reason}
e -> e
end

View file

@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
version: version("2.5.1"),
version: version("2.5.2"),
elixir: "~> 1.11",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
@ -150,7 +150,6 @@ defmodule Pleroma.Mixfile do
{:sweet_xml, "~> 0.7.2"},
{:earmark, "~> 1.4.22"},
{:bbcode_pleroma, "~> 0.2.0"},
{:crypt, "~> 1.0"},
{:cors_plug, "~> 2.0"},
{:web_push_encryption, "~> 0.3.1"},
{:swoosh, "~> 1.0"},

View file

@ -21,7 +21,6 @@
"cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"},
"credo": {:hex, :credo, "1.6.7", "323f5734350fd23a456f2688b9430e7d517afb313fbd38671b8a4449798a7854", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "41e110bfb007f7eda7f897c10bf019ceab9a0b269ce79f015d54b0dcf4fc7dd3"},
"crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
"crypt": {:hex, :crypt, "1.0.1", "a3567e1c651a2ec42c6650d9f3ab789e0f12a508c060653a9bbb5fafe60f043c", [:rebar3], [], "hexpm", "968dffe321c7a5d9f9b4577c4a4ff56a1c26d1a8a2270eb22c7636a0b43d3982"},
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
"db_connection": {:hex, :db_connection, "2.4.2", "f92e79aff2375299a16bcb069a14ee8615c3414863a6fef93156aee8e86c2ff3", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4fe53ca91b99f55ea249693a0229356a08f4d1a7931d8ffa79289b145fe83668"},
"decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},

View file

@ -9,8 +9,12 @@ defmodule Pleroma.Object.FetcherTest do
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.Web.ActivityPub.ObjectValidator
require Pleroma.Constants
import Mock
import Pleroma.Factory
import Tesla.Mock
setup do
@ -284,6 +288,8 @@ defmodule Pleroma.Object.FetcherTest do
describe "refetching" do
setup do
insert(:user, ap_id: "https://mastodon.social/users/emelie")
object1 = %{
"id" => "https://mastodon.social/1",
"actor" => "https://mastodon.social/users/emelie",
@ -293,10 +299,14 @@ defmodule Pleroma.Object.FetcherTest do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:20Z"
}
{:ok, local_object1, _} = ObjectValidator.validate(object1, [])
object2 = %{
"id" => "https://mastodon.social/2",
"actor" => "https://mastodon.social/users/emelie",
@ -306,8 +316,10 @@ defmodule Pleroma.Object.FetcherTest do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:25Z",
"formerRepresentations" => %{
"type" => "OrderedCollection",
"orderedItems" => [
@ -319,14 +331,18 @@ defmodule Pleroma.Object.FetcherTest do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:21Z"
}
],
"totalItems" => 1
}
}
{:ok, local_object2, _} = ObjectValidator.validate(object2, [])
mock(fn
%{
method: :get,
@ -335,7 +351,7 @@ defmodule Pleroma.Object.FetcherTest do
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/activity+json"}],
body: Jason.encode!(object1)
body: Jason.encode!(object1 |> Map.put("updated", "2023-05-09 23:44:20Z"))
}
%{
@ -345,7 +361,7 @@ defmodule Pleroma.Object.FetcherTest do
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/activity+json"}],
body: Jason.encode!(object2)
body: Jason.encode!(object2 |> Map.put("updated", "2023-05-09 23:44:20Z"))
}
%{
@ -370,7 +386,7 @@ defmodule Pleroma.Object.FetcherTest do
apply(HttpRequestMock, :request, [env])
end)
%{object1: object1, object2: object2}
%{object1: local_object1, object2: local_object2}
end
test "it keeps formerRepresentations if remote does not have this attr", %{object1: object1} do
@ -388,8 +404,9 @@ defmodule Pleroma.Object.FetcherTest do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z"
}
],
"totalItems" => 1
@ -467,6 +484,53 @@ defmodule Pleroma.Object.FetcherTest do
}
} = refetched.data
end
test "it keeps the history intact if only updated time has changed",
%{object1: object1} do
full_object1 =
object1
|> Map.merge(%{
"updated" => "2023-05-08 23:43:47Z",
"formerRepresentations" => %{
"type" => "OrderedCollection",
"orderedItems" => [
%{"type" => "Note", "content" => "mew mew 1"}
],
"totalItems" => 1
}
})
{:ok, o} = Object.create(full_object1)
assert {:ok, refetched} = Fetcher.refetch_object(o)
assert %{
"content" => "test 1",
"formerRepresentations" => %{
"orderedItems" => [
%{"content" => "mew mew 1"}
],
"totalItems" => 1
}
} = refetched.data
end
test "it goes through ObjectValidator and MRF", %{object2: object2} do
with_mock Pleroma.Web.ActivityPub.MRF, [:passthrough],
filter: fn
%{"type" => "Note"} = object ->
{:ok, Map.put(object, "content", "MRFd content")}
arg ->
passthrough([arg])
end do
{:ok, o} = Object.create(object2)
assert {:ok, refetched} = Fetcher.refetch_object(o)
assert %{"content" => "MRFd content"} = refetched.data
end
end
end
describe "fetch with history" do

View file

@ -133,7 +133,7 @@ defmodule Pleroma.Web.FederatorTest do
assert {:ok, _activity} = ObanHelpers.perform(job)
assert {:ok, job} = Federator.incoming_ap_doc(params)
assert {:error, :already_present} = ObanHelpers.perform(job)
assert {:cancel, :already_present} = ObanHelpers.perform(job)
end
test "rejects incoming AP docs with incorrect origin" do

View file

@ -57,9 +57,23 @@ defmodule Pleroma.Web.Feed.UserControllerTest do
)
note_activity2 = insert(:note_activity, note: note2)
note3 =
insert(:note,
user: user,
data: %{
"content" => "This note tests whether HTML entities are truncated properly",
"summary" => "Won't, didn't fail",
"inReplyTo" => note_activity2.id
}
)
_note_activity3 = insert(:note_activity, note: note3)
object = Object.normalize(note_activity, fetch: false)
[user: user, object: object, max_id: note_activity2.id]
encoded_title = FeedView.activity_title(note3.data)
[user: user, object: object, max_id: note_activity2.id, encoded_title: encoded_title]
end
test "gets an atom feed", %{conn: conn, user: user, object: object, max_id: max_id} do
@ -74,7 +88,7 @@ defmodule Pleroma.Web.Feed.UserControllerTest do
|> SweetXml.parse()
|> SweetXml.xpath(~x"//entry/title/text()"l)
assert activity_titles == ['2hu', '2hu & as']
assert activity_titles == ['Won\'t, didn\'...', '2hu', '2hu & as']
assert resp =~ FeedView.escape(object.data["content"])
assert resp =~ FeedView.escape(object.data["summary"])
assert resp =~ FeedView.escape(object.data["context"])
@ -105,7 +119,7 @@ defmodule Pleroma.Web.Feed.UserControllerTest do
|> SweetXml.parse()
|> SweetXml.xpath(~x"//item/title/text()"l)
assert activity_titles == ['2hu', '2hu & as']
assert activity_titles == ['Won\'t, didn\'...', '2hu', '2hu & as']
assert resp =~ FeedView.escape(object.data["content"])
assert resp =~ FeedView.escape(object.data["summary"])
assert resp =~ FeedView.escape(object.data["context"])
@ -176,6 +190,30 @@ defmodule Pleroma.Web.Feed.UserControllerTest do
|> get("/users/#{user.nickname}/feed.rss")
|> response(200)
end
test "does not mangle HTML entities midway", %{
conn: conn,
user: user,
object: object,
encoded_title: encoded_title
} do
resp =
conn
|> put_req_header("accept", "application/atom+xml")
|> get(user_feed_path(conn, :feed, user.nickname))
|> response(200)
activity_titles =
resp
|> SweetXml.parse()
|> SweetXml.xpath(~x"//entry/title/text()"l)
assert activity_titles == ['Won\'t, didn\'...', '2hu', '2hu & as']
assert resp =~ FeedView.escape(object.data["content"])
assert resp =~ FeedView.escape(object.data["summary"])
assert resp =~ FeedView.escape(object.data["context"])
assert resp =~ encoded_title
end
end
# Note: see ActivityPubControllerTest for JSON format tests

View file

@ -6,7 +6,9 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
use Pleroma.Web.ConnCase
import Mock
import Mox
alias Pleroma.ReverseProxy.ClientMock
alias Pleroma.Web.MediaProxy
alias Plug.Conn
@ -74,6 +76,20 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
assert %Conn{status: 404, resp_body: "Not Found"} = get(conn, url)
end
end
test "it applies sandbox CSP to MediaProxy requests", %{conn: conn} do
media_url = "https://lain.com/image.png"
media_proxy_url = MediaProxy.encode_url(media_url)
ClientMock
|> expect(:request, fn :get, ^media_url, _, _, _ ->
{:ok, 200, [{"content-type", "image/png"}]}
end)
%Conn{resp_headers: headers} = get(conn, media_proxy_url)
assert {"content-security-policy", "sandbox;"} in headers
end
end
describe "Media Preview Proxy" do

View file

@ -72,7 +72,7 @@ defmodule Pleroma.Web.Metadata.UtilsTest do
end
end
describe "scrub_html_and_truncate/2" do
describe "scrub_html_and_truncate/3" do
test "it returns text without encode HTML" do
assert Utils.scrub_html_and_truncate("Pleroma's really cool!") == "Pleroma's really cool!"
end

View file

@ -70,28 +70,6 @@ defmodule Pleroma.Web.Plugs.AuthenticationPlugTest do
assert "$pbkdf2" <> _ = user.password_hash
end
@tag :skip_on_mac
test "with a crypt hash, it updates to a pkbdf2 hash", %{conn: conn} do
user =
insert(:user,
password_hash:
"$6$9psBWV8gxkGOZWBz$PmfCycChoxeJ3GgGzwvhlgacb9mUoZ.KUXNCssekER4SJ7bOK53uXrHNb2e4i8yPFgSKyzaW9CcmrDXWIEMtD1"
)
conn =
conn
|> assign(:auth_user, user)
|> assign(:auth_credentials, %{password: "password"})
|> AuthenticationPlug.call(%{})
assert conn.assigns.user.id == conn.assigns.auth_user.id
assert conn.assigns.token == nil
assert PlugHelper.plug_skipped?(conn, OAuthScopesPlug)
user = User.get_by_id(user.id)
assert "$pbkdf2" <> _ = user.password_hash
end
describe "checkpw/2" do
test "check pbkdf2 hash" do
hash =
@ -101,14 +79,6 @@ defmodule Pleroma.Web.Plugs.AuthenticationPlugTest do
refute AuthenticationPlug.checkpw("test-password1", hash)
end
@tag :skip_on_mac
test "check sha512-crypt hash" do
hash =
"$6$9psBWV8gxkGOZWBz$PmfCycChoxeJ3GgGzwvhlgacb9mUoZ.KUXNCssekER4SJ7bOK53uXrHNb2e4i8yPFgSKyzaW9CcmrDXWIEMtD1"
assert AuthenticationPlug.checkpw("password", hash)
end
test "check bcrypt hash" do
hash = "$2a$10$uyhC/R/zoE1ndwwCtMusK.TLVzkQ/Ugsbqp3uXI.CTTz0gBw.24jS"

View file

@ -129,7 +129,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
}}
end
test "parses OEmbed" do
test "parses OEmbed and filters HTML tags" do
assert Parser.parse("http://example.com/oembed") ==
{:ok,
%{
@ -139,7 +139,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
"flickr_type" => "photo",
"height" => "768",
"html" =>
"<a data-flickr-embed=\"true\" href=\"https://www.flickr.com/photos/bees/2362225867/\" title=\"Bacon Lollys by \u202E\u202D\u202Cbees\u202C, on Flickr\"><img src=\"https://farm4.staticflickr.com/3040/2362225867_4a87ab8baf_b.jpg\" width=\"1024\" height=\"768\" alt=\"Bacon Lollys\"></a><script async src=\"https://embedr.flickr.com/assets/client-code.js\" charset=\"utf-8\"></script>",
"<a href=\"https://www.flickr.com/photos/bees/2362225867/\" title=\"Bacon Lollys by \u202E\u202D\u202Cbees\u202C, on Flickr\"><img src=\"https://farm4.staticflickr.com/3040/2362225867_4a87ab8baf_b.jpg\" width=\"1024\" height=\"768\" alt=\"Bacon Lollys\"/></a>",
"license" => "All Rights Reserved",
"license_id" => 0,
"provider_name" => "Flickr",

View file

@ -29,6 +29,26 @@ defmodule Pleroma.Web.StreamerTest do
assert {:ok, "public:local:media"} = Streamer.get_topic("public:local:media", nil, nil)
end
test "rejects local public streams if restricted_unauthenticated is on" do
clear_config([:restrict_unauthenticated, :timelines, :local], true)
assert {:error, :unauthorized} = Streamer.get_topic("public:local", nil, nil)
assert {:error, :unauthorized} = Streamer.get_topic("public:local:media", nil, nil)
end
test "rejects remote public streams if restricted_unauthenticated is on" do
clear_config([:restrict_unauthenticated, :timelines, :federated], true)
assert {:error, :unauthorized} = Streamer.get_topic("public", nil, nil)
assert {:error, :unauthorized} = Streamer.get_topic("public:media", nil, nil)
assert {:error, :unauthorized} =
Streamer.get_topic("public:remote", nil, nil, %{"instance" => "lain.com"})
assert {:error, :unauthorized} =
Streamer.get_topic("public:remote:media", nil, nil, %{"instance" => "lain.com"})
end
test "allows instance streams" do
assert {:ok, "public:remote:lain.com"} =
Streamer.get_topic("public:remote", nil, nil, %{"instance" => "lain.com"})
@ -69,6 +89,63 @@ defmodule Pleroma.Web.StreamerTest do
end
end
test "allows local public streams if restricted_unauthenticated is on", %{
user: user,
token: oauth_token
} do
clear_config([:restrict_unauthenticated, :timelines, :local], true)
%{token: read_notifications_token} = oauth_access(["read:notifications"], user: user)
%{token: badly_scoped_token} = oauth_access(["irrelevant:scope"], user: user)
assert {:ok, "public:local"} = Streamer.get_topic("public:local", user, oauth_token)
assert {:ok, "public:local:media"} =
Streamer.get_topic("public:local:media", user, oauth_token)
for token <- [read_notifications_token, badly_scoped_token] do
assert {:error, :unauthorized} = Streamer.get_topic("public:local", user, token)
assert {:error, :unauthorized} = Streamer.get_topic("public:local:media", user, token)
end
end
test "allows remote public streams if restricted_unauthenticated is on", %{
user: user,
token: oauth_token
} do
clear_config([:restrict_unauthenticated, :timelines, :federated], true)
%{token: read_notifications_token} = oauth_access(["read:notifications"], user: user)
%{token: badly_scoped_token} = oauth_access(["irrelevant:scope"], user: user)
assert {:ok, "public"} = Streamer.get_topic("public", user, oauth_token)
assert {:ok, "public:media"} = Streamer.get_topic("public:media", user, oauth_token)
assert {:ok, "public:remote:lain.com"} =
Streamer.get_topic("public:remote", user, oauth_token, %{"instance" => "lain.com"})
assert {:ok, "public:remote:media:lain.com"} =
Streamer.get_topic("public:remote:media", user, oauth_token, %{
"instance" => "lain.com"
})
for token <- [read_notifications_token, badly_scoped_token] do
assert {:error, :unauthorized} = Streamer.get_topic("public", user, token)
assert {:error, :unauthorized} = Streamer.get_topic("public:media", user, token)
assert {:error, :unauthorized} =
Streamer.get_topic("public:remote", user, token, %{
"instance" => "lain.com"
})
assert {:error, :unauthorized} =
Streamer.get_topic("public:remote:media", user, token, %{
"instance" => "lain.com"
})
end
end
test "allows user streams (with proper OAuth token scopes)", %{
user: user,
token: read_oauth_token

View file

@ -11,7 +11,7 @@ defmodule Pleroma.Workers.ReceiverWorkerTest do
alias Pleroma.Workers.ReceiverWorker
test "it ignores MRF reject" do
test "it does not retry MRF reject" do
params = insert(:note).data
with_mock Pleroma.Web.ActivityPub.Transmogrifier,
@ -22,4 +22,31 @@ defmodule Pleroma.Workers.ReceiverWorkerTest do
})
end
end
test "it does not retry ObjectValidator reject" do
params =
insert(:note_activity).data
|> Map.put("id", Pleroma.Web.ActivityPub.Utils.generate_activity_id())
|> Map.put("object", %{
"type" => "Note",
"id" => Pleroma.Web.ActivityPub.Utils.generate_object_id()
})
with_mock Pleroma.Web.ActivityPub.ObjectValidator, [:passthrough],
validate: fn _, _ -> {:error, %Ecto.Changeset{}} end do
assert {:cancel, {:error, %Ecto.Changeset{}}} =
ReceiverWorker.perform(%Oban.Job{
args: %{"op" => "incoming_ap_doc", "params" => params}
})
end
end
test "it does not retry duplicates" do
params = insert(:note_activity).data
assert {:cancel, :already_present} =
ReceiverWorker.perform(%Oban.Job{
args: %{"op" => "incoming_ap_doc", "params" => params}
})
end
end