akkoma/lib/pleroma/web/common_api/utils.ex

482 lines
14 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.CommonAPI.Utils do
import Pleroma.Web.Gettext
2018-12-11 12:31:52 +00:00
alias Calendar.Strftime
2019-02-09 15:16:26 +00:00
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Conversation.Participation
2019-02-09 15:16:26 +00:00
alias Pleroma.Formatter
alias Pleroma.Object
alias Pleroma.Repo
2018-12-11 12:31:52 +00:00
alias Pleroma.User
2017-05-17 16:00:20 +00:00
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
2020-10-02 17:00:50 +00:00
alias Pleroma.Web.CommonAPI.ActivityDraft
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.Utils.Params
2017-05-17 16:00:20 +00:00
2019-04-02 09:25:51 +00:00
require Logger
require Pleroma.Constants
2017-05-17 16:00:20 +00:00
Restrict media usage to owners In Mastodon media can only be used by owners and only be associated with a single post. We currently allow media to be associated with several posts and until now did not limit their usage in posts to media owners. However, media update and GET lookup was already limited to owners. (In accordance with allowing media reuse, we also still allow GET lookups of media already used in a post unlike Mastodon) Allowing reuse isn’t problematic per se, but allowing use by non-owners can be problematic if media ids of private-scoped posts can be guessed since creating a new post with this media id will reveal the uploaded file content and alt text. Given media ids are currently just part of a sequentieal series shared with some other objects, guessing media ids is with some persistence indeed feasible. E.g. sampline some public media ids from a real-world instance with 112 total and 61 monthly-active users: 17.465.096 at t0 17.472.673 at t1 = t0 + 4h 17.473.248 at t2 = t1 + 20min This gives about 30 new ids per minute of which most won't be local media but remote and local posts, poll answers etc. Assuming the default ratelimit of 15 post actions per 10s, scraping all media for the 4h interval takes about 84 minutes and scraping the 20min range mere 6.3 minutes. (Until the preceding commit, post updates were not rate limited at all, allowing even faster scraping.) If an attacker can infer (e.g. via reply to a follower-only post not accessbile to the attacker) some sensitive information was uploaded during a specific time interval and has some pointers regarding the nature of the information, identifying the specific upload out of all scraped media for this timerange is not impossible. Thus restrict media usage to owners. Checking ownership just in ActivitDraft would already be sufficient, since when a scheduled status actually gets posted it goes through ActivityDraft again, but would erroneously return a success status when scheduling an illegal post. Independently discovered and fixed by mint in Pleroma https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 15:46:18 +00:00
def attachments_from_ids(user, %{media_ids: ids}) do
attachments_from_ids(user, ids, [])
end
Restrict media usage to owners In Mastodon media can only be used by owners and only be associated with a single post. We currently allow media to be associated with several posts and until now did not limit their usage in posts to media owners. However, media update and GET lookup was already limited to owners. (In accordance with allowing media reuse, we also still allow GET lookups of media already used in a post unlike Mastodon) Allowing reuse isn’t problematic per se, but allowing use by non-owners can be problematic if media ids of private-scoped posts can be guessed since creating a new post with this media id will reveal the uploaded file content and alt text. Given media ids are currently just part of a sequentieal series shared with some other objects, guessing media ids is with some persistence indeed feasible. E.g. sampline some public media ids from a real-world instance with 112 total and 61 monthly-active users: 17.465.096 at t0 17.472.673 at t1 = t0 + 4h 17.473.248 at t2 = t1 + 20min This gives about 30 new ids per minute of which most won't be local media but remote and local posts, poll answers etc. Assuming the default ratelimit of 15 post actions per 10s, scraping all media for the 4h interval takes about 84 minutes and scraping the 20min range mere 6.3 minutes. (Until the preceding commit, post updates were not rate limited at all, allowing even faster scraping.) If an attacker can infer (e.g. via reply to a follower-only post not accessbile to the attacker) some sensitive information was uploaded during a specific time interval and has some pointers regarding the nature of the information, identifying the specific upload out of all scraped media for this timerange is not impossible. Thus restrict media usage to owners. Checking ownership just in ActivitDraft would already be sufficient, since when a scheduled status actually gets posted it goes through ActivityDraft again, but would erroneously return a success status when scheduling an illegal post. Independently discovered and fixed by mint in Pleroma https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 15:46:18 +00:00
def attachments_from_ids(_, _), do: []
2019-08-05 15:37:05 +00:00
Restrict media usage to owners In Mastodon media can only be used by owners and only be associated with a single post. We currently allow media to be associated with several posts and until now did not limit their usage in posts to media owners. However, media update and GET lookup was already limited to owners. (In accordance with allowing media reuse, we also still allow GET lookups of media already used in a post unlike Mastodon) Allowing reuse isn’t problematic per se, but allowing use by non-owners can be problematic if media ids of private-scoped posts can be guessed since creating a new post with this media id will reveal the uploaded file content and alt text. Given media ids are currently just part of a sequentieal series shared with some other objects, guessing media ids is with some persistence indeed feasible. E.g. sampline some public media ids from a real-world instance with 112 total and 61 monthly-active users: 17.465.096 at t0 17.472.673 at t1 = t0 + 4h 17.473.248 at t2 = t1 + 20min This gives about 30 new ids per minute of which most won't be local media but remote and local posts, poll answers etc. Assuming the default ratelimit of 15 post actions per 10s, scraping all media for the 4h interval takes about 84 minutes and scraping the 20min range mere 6.3 minutes. (Until the preceding commit, post updates were not rate limited at all, allowing even faster scraping.) If an attacker can infer (e.g. via reply to a follower-only post not accessbile to the attacker) some sensitive information was uploaded during a specific time interval and has some pointers regarding the nature of the information, identifying the specific upload out of all scraped media for this timerange is not impossible. Thus restrict media usage to owners. Checking ownership just in ActivitDraft would already be sufficient, since when a scheduled status actually gets posted it goes through ActivityDraft again, but would erroneously return a success status when scheduling an illegal post. Independently discovered and fixed by mint in Pleroma https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 15:46:18 +00:00
defp attachments_from_ids(_user, [], acc), do: Enum.reverse(acc)
2017-05-17 16:00:20 +00:00
Restrict media usage to owners In Mastodon media can only be used by owners and only be associated with a single post. We currently allow media to be associated with several posts and until now did not limit their usage in posts to media owners. However, media update and GET lookup was already limited to owners. (In accordance with allowing media reuse, we also still allow GET lookups of media already used in a post unlike Mastodon) Allowing reuse isn’t problematic per se, but allowing use by non-owners can be problematic if media ids of private-scoped posts can be guessed since creating a new post with this media id will reveal the uploaded file content and alt text. Given media ids are currently just part of a sequentieal series shared with some other objects, guessing media ids is with some persistence indeed feasible. E.g. sampline some public media ids from a real-world instance with 112 total and 61 monthly-active users: 17.465.096 at t0 17.472.673 at t1 = t0 + 4h 17.473.248 at t2 = t1 + 20min This gives about 30 new ids per minute of which most won't be local media but remote and local posts, poll answers etc. Assuming the default ratelimit of 15 post actions per 10s, scraping all media for the 4h interval takes about 84 minutes and scraping the 20min range mere 6.3 minutes. (Until the preceding commit, post updates were not rate limited at all, allowing even faster scraping.) If an attacker can infer (e.g. via reply to a follower-only post not accessbile to the attacker) some sensitive information was uploaded during a specific time interval and has some pointers regarding the nature of the information, identifying the specific upload out of all scraped media for this timerange is not impossible. Thus restrict media usage to owners. Checking ownership just in ActivitDraft would already be sufficient, since when a scheduled status actually gets posted it goes through ActivityDraft again, but would erroneously return a success status when scheduling an illegal post. Independently discovered and fixed by mint in Pleroma https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 15:46:18 +00:00
defp attachments_from_ids(user, [media_id | ids], acc) do
with {_, %Object{} = object} <- {:get, get_attachment(media_id)},
:ok <- Object.authorize_access(object, user) do
attachments_from_ids(user, ids, [object.data | acc])
else
{:get, _} -> attachments_from_ids(user, ids, acc)
{:error, reason} -> {:error, reason}
end
end
2017-05-17 16:00:20 +00:00
def get_attachment(media_id) do
with %Object{} = object <- Repo.get(Object, media_id),
true <- object.data["type"] in Pleroma.Constants.attachment_types() do
object
else
_ -> nil
end
end
2020-10-02 17:00:50 +00:00
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
2020-10-02 17:00:50 +00:00
def get_to_and_cc(%{in_reply_to_conversation: %Participation{} = participation}) do
participation = Repo.preload(participation, :recipients)
{Enum.map(participation.recipients, & &1.ap_id), []}
end
2020-11-11 14:47:57 +00:00
def get_to_and_cc(%{visibility: visibility} = draft) when visibility in ["public", "local"] do
to =
case visibility do
"public" -> [Pleroma.Constants.as_public() | draft.mentions]
"local" -> [Utils.as_local_public() | draft.mentions]
2020-11-11 14:47:57 +00:00
end
2020-10-02 17:00:50 +00:00
cc = [draft.user.follower_address]
2018-03-30 13:01:53 +00:00
2020-10-02 17:00:50 +00:00
if draft.in_reply_to do
{Enum.uniq([draft.in_reply_to.data["actor"] | to]), cc}
2017-08-28 17:17:38 +00:00
else
{to, cc}
end
end
2020-10-02 17:00:50 +00:00
def get_to_and_cc(%{visibility: "unlisted"} = draft) do
to = [draft.user.follower_address | draft.mentions]
2020-11-11 14:47:57 +00:00
cc = [Pleroma.Constants.as_public()]
2020-10-02 17:00:50 +00:00
if draft.in_reply_to do
{Enum.uniq([draft.in_reply_to.data["actor"] | to]), cc}
else
{to, cc}
end
end
2020-10-02 17:00:50 +00:00
def get_to_and_cc(%{visibility: "private"} = draft) do
{to, cc} = get_to_and_cc(struct(draft, visibility: "direct"))
{[draft.user.follower_address | to], cc}
end
2020-10-02 17:00:50 +00:00
def get_to_and_cc(%{visibility: "direct"} = draft) do
# If the OP is a DM already, add the implicit actor.
2020-10-02 17:00:50 +00:00
if draft.in_reply_to && Visibility.is_direct?(draft.in_reply_to) do
{Enum.uniq([draft.in_reply_to.data["actor"] | draft.mentions]), []}
else
2020-10-02 17:00:50 +00:00
{draft.mentions, []}
2017-08-28 17:17:38 +00:00
end
end
2020-10-02 17:00:50 +00:00
def get_to_and_cc(%{visibility: {:list, _}, mentions: mentions}), do: {mentions, []}
2019-06-03 16:17:08 +00:00
def get_addressed_users(_, to) when is_list(to) do
User.get_ap_ids_by_nicknames(to)
end
def get_addressed_users(mentioned_users, _), do: mentioned_users
2019-05-01 09:11:17 +00:00
def maybe_add_list_data(activity_params, user, {:list, list_id}) do
2019-07-11 09:36:08 +00:00
case Pleroma.List.get(list_id, user) do
%Pleroma.List{} = list ->
activity_params
|> put_in([:additional, "bcc"], [list.ap_id])
|> put_in([:additional, "listMessage"], list.ap_id)
|> put_in([:object, "listMessage"], list.ap_id)
2019-07-11 09:36:08 +00:00
_ ->
activity_params
2019-07-11 09:36:08 +00:00
end
2019-05-01 09:11:17 +00:00
end
def maybe_add_list_data(activity_params, _, _), do: activity_params
2019-05-01 09:11:17 +00:00
2019-09-24 09:10:54 +00:00
def make_poll_data(%{"poll" => %{"expires_in" => expires_in}} = data)
when is_binary(expires_in) do
# In some cases mastofe sends out strings instead of integers
data
|> put_in(["poll", "expires_in"], String.to_integer(expires_in))
|> make_poll_data()
end
2020-05-12 19:59:26 +00:00
def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
when is_list(options) do
2020-07-09 15:53:51 +00:00
limits = Config.get([:instance, :poll_limits])
2023-03-26 03:20:07 +00:00
options = options |> Enum.uniq()
2019-09-24 09:10:54 +00:00
with :ok <- validate_poll_expiration(expires_in, limits),
:ok <- validate_poll_options_amount(options, limits),
:ok <- validate_poll_options_length(options, limits) do
{option_notes, emoji} =
Enum.map_reduce(options, %{}, fn option, emoji ->
2019-09-24 09:10:54 +00:00
note = %{
"name" => option,
"type" => "Note",
"replies" => %{"type" => "Collection", "totalItems" => 0}
}
{note, Map.merge(emoji, Pleroma.Emoji.Formatter.get_emoji_map(option))}
2019-09-24 09:10:54 +00:00
end)
end_time =
DateTime.utc_now()
|> DateTime.add(expires_in)
|> DateTime.to_iso8601()
key = if Params.truthy_param?(data.poll[:multiple]), do: "anyOf", else: "oneOf"
2019-09-24 09:10:54 +00:00
poll = %{"type" => "Question", key => option_notes, "closed" => end_time}
2019-09-24 09:10:54 +00:00
{:ok, {poll, emoji}}
end
end
2019-05-21 11:19:03 +00:00
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
2019-09-24 09:10:54 +00:00
{:error, "Invalid poll"}
end
def make_poll_data(_data) do
2019-09-24 09:10:54 +00:00
{:ok, {%{}, %{}}}
end
defp validate_poll_options_amount(options, %{max_options: max_options}) do
2023-03-26 03:20:07 +00:00
cond do
Enum.count(options) < 2 ->
{:error, "Poll must contain at least 2 options"}
Enum.count(options) > max_options ->
{:error, "Poll can't contain more than #{max_options} options"}
true ->
:ok
2019-09-24 09:10:54 +00:00
end
end
defp validate_poll_options_length(options, %{max_option_chars: max_option_chars}) do
if Enum.any?(options, &(String.length(&1) > max_option_chars)) do
{:error, "Poll options cannot be longer than #{max_option_chars} characters each"}
else
:ok
end
end
defp validate_poll_expiration(expires_in, %{min_expiration: min, max_expiration: max}) do
cond do
expires_in > max -> {:error, "Expiration date is too far in the future"}
expires_in < min -> {:error, "Expiration date is too soon"}
true -> :ok
end
end
2020-10-02 17:00:50 +00:00
def make_content_html(%ActivityDraft{} = draft) do
attachment_links =
2020-10-02 17:00:50 +00:00
draft.params
|> Map.get("attachment_links", Config.get([:instance, :attachment_links]))
|> Params.truthy_param?()
2019-02-26 23:32:26 +00:00
2020-10-02 17:00:50 +00:00
content_type = get_content_type(draft.params[:content_type])
2019-02-26 23:32:26 +00:00
2019-03-20 20:09:36 +00:00
options =
2020-10-02 17:00:50 +00:00
if draft.visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
2019-03-20 20:09:36 +00:00
[safe_mention: true]
else
[]
end
2020-10-02 17:00:50 +00:00
draft.status
2019-03-20 20:09:36 +00:00
|> format_input(content_type, options)
2020-10-02 17:00:50 +00:00
|> maybe_add_attachments(draft.attachments, attachment_links)
2019-02-26 23:32:26 +00:00
end
def get_content_type(content_type) do
2019-02-26 23:32:26 +00:00
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
content_type
else
"text/plain"
end
end
def make_context(%{in_reply_to_conversation: %Participation{} = participation}) do
Repo.preload(participation, :conversation).conversation.ap_id
end
def make_context(%{in_reply_to: %Activity{data: %{"context" => context}}}), do: context
def make_context(%{quote: %Activity{data: %{"context" => context}}}), do: context
def make_context(_), do: Utils.generate_context_id()
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
2018-03-30 13:01:53 +00:00
2019-02-26 23:32:26 +00:00
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
text = add_attachments(text, attachments)
{text, mentions, tags}
2017-12-07 18:44:09 +00:00
end
2018-03-30 13:01:53 +00:00
2017-05-17 16:00:20 +00:00
def add_attachments(text, attachments) do
2019-08-05 15:37:05 +00:00
attachment_text = Enum.map(attachments, &build_attachment_link/1)
Enum.join([text | attachment_text], "<br>")
2017-05-17 16:00:20 +00:00
end
2019-08-05 15:37:05 +00:00
defp build_attachment_link(%{"url" => [%{"href" => href} | _]} = attachment) do
name = attachment["name"] || URI.decode(Path.basename(href))
href = MediaProxy.url(href)
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
end
defp build_attachment_link(_), do: ""
2019-02-26 23:32:26 +00:00
def format_input(text, format, options \\ [])
2018-12-14 09:41:55 +00:00
@doc """
2020-10-07 14:58:45 +00:00
Formatting text to plain text, BBCode, HTML, or Markdown
2018-12-14 09:41:55 +00:00
"""
def format_input(text, "text/plain", options) do
2017-12-07 19:34:25 +00:00
text
|> Formatter.html_escape("text/plain")
2019-02-26 23:32:26 +00:00
|> Formatter.linkify(options)
|> (fn {text, mentions, tags} ->
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
end).()
2017-09-17 13:21:44 +00:00
end
2019-04-26 10:17:57 +00:00
def format_input(text, "text/bbcode", options) do
text
|> String.replace(~r/\r/, "")
|> Formatter.html_escape("text/plain")
|> BBCode.to_html()
|> (fn {:ok, html} -> html end).()
|> Formatter.linkify(options)
end
2019-02-26 23:32:26 +00:00
def format_input(text, "text/html", options) do
text
|> Formatter.html_escape("text/html")
2019-02-26 23:32:26 +00:00
|> Formatter.linkify(options)
end
def format_input(text, "text/x.misskeymarkdown", options) do
text
|> Formatter.markdown_to_html(%{breaks: true})
|> MfmParser.Parser.parse()
|> MfmParser.Encoder.to_html()
|> Formatter.linkify(options)
|> Formatter.html_escape("text/html")
end
2019-02-26 23:32:26 +00:00
def format_input(text, "text/markdown", options) do
text
|> Formatter.mentions_escape(options)
2020-10-13 19:27:50 +00:00
|> Formatter.markdown_to_html()
2019-02-26 23:32:26 +00:00
|> Formatter.linkify(options)
|> Formatter.html_escape("text/html")
2017-05-17 16:00:20 +00:00
end
2017-06-19 21:12:37 +00:00
def format_naive_asctime(date) do
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
end
def format_asctime(date) do
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
end
2019-04-02 09:25:51 +00:00
def date_to_asctime(date) when is_binary(date) do
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
2017-06-19 21:12:37 +00:00
format_asctime(date)
2018-03-30 13:01:53 +00:00
else
_e ->
Logger.warning("Date #{date} in wrong format, must be ISO 8601")
2017-06-19 21:12:37 +00:00
""
end
end
2017-06-19 21:12:37 +00:00
def date_to_asctime(date) do
Logger.warning("Date #{date} in wrong format, must be ISO 8601")
2019-04-02 09:25:51 +00:00
""
2017-06-19 21:12:37 +00:00
end
2017-09-15 15:50:47 +00:00
def to_masto_date(%NaiveDateTime{} = date) do
# NOTE: Elixirs ISO 8601 format is a superset of the real standard
# It supports negative years for example.
# ISO8601 only supports years before 1583 with mutual agreement
if date.year < 1583 do
"1970-01-01T00:00:00Z"
else
date
|> NaiveDateTime.to_iso8601()
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
end
2017-09-15 15:50:47 +00:00
end
2019-08-05 15:37:05 +00:00
def to_masto_date(date) when is_binary(date) do
with {:ok, date} <- NaiveDateTime.from_iso8601(date) do
to_masto_date(date)
else
_ -> "1970-01-01T00:00:00Z"
2017-09-15 15:50:47 +00:00
end
end
def to_masto_date(_), do: "1970-01-01T00:00:00Z"
2019-08-05 15:37:05 +00:00
defp shortname(name) do
2020-05-22 15:19:25 +00:00
with max_length when max_length > 0 <-
Config.get([Pleroma.Upload, :filename_display_max_length], 30),
2020-05-22 15:19:25 +00:00
true <- String.length(name) > max_length do
String.slice(name, 0..max_length) <> ""
else
2020-05-22 15:19:25 +00:00
_ -> name
end
end
@spec confirm_current_password(User.t(), String.t()) :: {:ok, User.t()} | {:error, String.t()}
2018-05-21 21:17:34 +00:00
def confirm_current_password(user, password) do
2019-04-22 07:20:43 +00:00
with %User{local: true} = db_user <- User.get_cached_by_id(user.id),
true <- Pleroma.Password.checkpw(password, db_user.password_hash) do
{:ok, db_user}
else
_ -> {:error, dgettext("errors", "Invalid password.")}
end
end
def maybe_notify_to_recipients(
recipients,
%Activity{data: %{"to" => to, "type" => _type}} = _activity
) do
recipients ++ to
end
2019-11-12 11:48:14 +00:00
def maybe_notify_to_recipients(recipients, _), do: recipients
def maybe_notify_mentioned_recipients(
recipients,
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
)
when type == "Create" do
object = Object.normalize(activity, fetch: false)
object_data =
cond do
2019-08-05 15:37:05 +00:00
not is_nil(object) ->
object.data
is_map(data["object"]) ->
data["object"]
true ->
%{}
end
tagged_mentions = maybe_extract_mentions(object_data)
recipients ++ tagged_mentions
end
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
2019-04-05 13:20:13 +00:00
def maybe_notify_subscribers(
recipients,
%Activity{data: %{"actor" => actor, "type" => "Create"}} = activity
) do
# Do not notify subscribers if author is making a reply
with %Object{data: object} <- Object.normalize(activity, fetch: false),
nil <- object["inReplyTo"],
%User{} = user <- User.get_cached_by_ap_id(actor) do
2019-04-05 13:20:13 +00:00
subscriber_ids =
user
|> User.subscriber_users()
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
2019-04-05 13:20:13 +00:00
|> Enum.map(& &1.ap_id)
recipients ++ subscriber_ids
else
_e -> recipients
2019-04-05 13:20:13 +00:00
end
end
def maybe_notify_subscribers(recipients, _), do: recipients
2019-11-12 11:48:14 +00:00
def maybe_notify_followers(recipients, %Activity{data: %{"type" => "Move"}} = activity) do
with %User{} = user <- User.get_cached_by_ap_id(activity.actor) do
user
|> User.get_followers()
|> Enum.map(& &1.ap_id)
|> Enum.concat(recipients)
else
_e -> recipients
2019-11-12 11:48:14 +00:00
end
end
def maybe_notify_followers(recipients, _), do: recipients
def maybe_extract_mentions(%{"tag" => tag}) do
tag
2019-08-05 15:37:05 +00:00
|> Enum.filter(fn x -> is_map(x) && x["type"] == "Mention" end)
|> Enum.map(fn x -> x["href"] end)
2019-08-05 15:37:05 +00:00
|> Enum.uniq()
end
def maybe_extract_mentions(_), do: []
2019-02-20 16:51:25 +00:00
2019-02-26 23:32:26 +00:00
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
2019-02-20 16:51:25 +00:00
def make_report_content_html(comment) do
2020-07-09 15:53:51 +00:00
max_size = Config.get([:instance, :max_report_comment_size], 1000)
2019-02-20 16:51:25 +00:00
if String.length(comment) <= max_size do
2019-02-26 23:32:26 +00:00
{:ok, format_input(comment, "text/plain")}
2019-02-20 16:51:25 +00:00
else
{:error,
dgettext("errors", "Comment must be up to %{max_size} characters", max_size: max_size)}
2019-02-20 16:51:25 +00:00
end
end
2020-04-28 12:50:37 +00:00
def get_report_statuses(%User{ap_id: actor}, %{status_ids: status_ids})
when is_list(status_ids) do
2019-02-20 16:51:25 +00:00
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
end
def get_report_statuses(_, _), do: {:ok, nil}
2019-09-24 09:10:54 +00:00
def validate_character_limit("" = _full_payload, [] = _attachments) do
{:error, dgettext("errors", "Cannot post an empty status without attachments")}
end
def validate_character_limit(full_payload, _attachments) do
2020-07-09 15:53:51 +00:00
limit = Config.get([:instance, :limit])
length = String.length(full_payload)
2020-03-10 18:08:00 +00:00
if length <= limit do
2019-09-24 09:10:54 +00:00
:ok
else
{:error, dgettext("errors", "The status is over the character limit")}
end
end
2017-05-17 16:00:20 +00:00
end