diff --git a/.gitignore b/.gitignore
index 128cd525f..c651ebdeb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -81,3 +81,4 @@ docs/venv
/*.iml
/static
/etc
+/docker-compose.override.yml
diff --git a/.woodpecker/test.yml b/.woodpecker/test.yml
index 16a4067fe..c8b819ce8 100644
--- a/.woodpecker/test.yml
+++ b/.woodpecker/test.yml
@@ -7,6 +7,7 @@ matrix:
ELIXIR_VERSION:
- 1.14
- 1.15
+ - 1.16
OTP_VERSION:
- 25
- 26
@@ -17,6 +18,8 @@ matrix:
OTP_VERSION: 25
- ELIXIR_VERSION: 1.15
OTP_VERSION: 26
+ - ELIXIR_VERSION: 1.16
+ OTP_VERSION: 26
variables:
- &scw-secrets
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9033050a1..e397a75d0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,59 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+## Unreleased
+
+## Added
+- Support for [FEP-fffd](https://codeberg.org/fediverse/fep/src/branch/main/fep/fffd/fep-fffd.md) (proxy objects)
+- Verified support for elixir 1.16
+
+## Changed
+
+## Fixed
+- Issue preventing fetching anything from IPv6-only instances
+- Issue allowing post content to leak via opengraph tags despite :estrict\_unauthenticated being set
+
+## 2024.03
+
+## Added
+- CLI tasks best-effort checking for past abuse of the recent spoofing exploit
+- new `:mrf_steal_emoji, :download_unknown_size` option; defaults to `false`
+
+## Changed
+- `Pleroma.Upload, :base_url` now MUST be configured explicitly if used;
+ use of the same domain as the instance is **strongly** discouraged
+- `:media_proxy, :base_url` now MUST be configured explicitly if used;
+ use of the same domain as the instance is **strongly** discouraged
+- StealEmoji:
+ - now uses the pack.json format;
+ existing users must migrate with an out-of-band script (check release notes)
+ - only steals shortcodes recognised as valid
+ - URLs of stolen emoji is no longer predictable
+- The `Dedupe` upload filter is now always active;
+ `AnonymizeFilenames` is again opt-in
+- received AP data is sanity checked before we attempt to parse it as a user
+- Uploads, emoji and media proxy now restrict Content-Type headers to a safe subset
+- Akkoma will no longer fetch and parse objects hosted on the same domain
+
+## Fixed
+- Critical security issue allowing Akkoma to be used as a vector for
+ (depending on configuration) impersonation of other users or creation
+ of bogus users and posts on the upload domain
+- Critical security issue letting Akkoma fall for the above impersonation
+ payloads due to lack of strict id checking
+- Critical security issue allowing domains redirect to to pose as the initial domain
+ (e.g. with media proxy's fallback redirects)
+- refetched objects can no longer attribute themselves to third-party actors
+ (this had no externally visible effect since actor info is read from the Create activity)
+- our litepub JSON-LD schema is now served with the correct content type
+- remote APNG attachments are now recognised as images
+
+## Upgrade Notes
+
+- As mentioned in "Changed", `Pleroma.Upload, :base_url` **MUST** be configured. Uploads will fail without it.
+ - Akkoma will refuse to start if this is not set.
+- Same with media proxy.
+
## 2024.02
## Added
diff --git a/SECURITY.md b/SECURITY.md
index c009d21d9..d37a8c9ca 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -1,16 +1,21 @@
-# Pleroma backend security policy
-
-## Supported versions
-
-Currently, Pleroma offers bugfixes and security patches only for the latest minor release.
-
-| Version | Support
-|---------| --------
-| 2.2 | Bugfixes and security patches
+# Akkoma backend security handling
## Reporting a vulnerability
-Please use confidential issues (tick the "This issue is confidential and should only be visible to team members with at least Reporter access." box when submitting) at our [bugtracker](https://git.pleroma.social/pleroma/pleroma/-/issues/new) for reporting vulnerabilities.
+Please send an email (preferably encrypted) or
+a DM via our IRC to one of the following people:
+
+| Forgejo nick | IRC nick | Email | GPG |
+| ------------ | ------------- | ------------- | --------------------------------------- |
+| floatinghost | FloatingGhost | *see GPG key* | https://coffee-and-dreams.uk/pubkey.asc |
+
## Announcements
-New releases are announced at [pleroma.social](https://pleroma.social/announcements/). All security releases are tagged with ["Security"](https://pleroma.social/announcements/tags/security/). You can be notified of them by subscribing to an Atom feed at .
+New releases and security issues are announced at
+[meta.akkoma.dev](https://meta.akkoma.dev/c/releases) and
+[@akkoma@ihatebeinga.live](https://ihatebeinga.live/akkoma).
+
+Both also offer RSS feeds
+([meta](https://meta.akkoma.dev/c/releases/7.rss),
+[fedi](https://ihatebeinga.live/users/akkoma.rss))
+so you can keep an eye on it without any accounts.
diff --git a/config/config.exs b/config/config.exs
index 568e9d89e..3395a2452 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -61,11 +61,12 @@
# Upload configuration
config :pleroma, Pleroma.Upload,
uploader: Pleroma.Uploaders.Local,
- filters: [Pleroma.Upload.Filter.Dedupe],
+ filters: [],
link_name: false,
proxy_remote: false,
filename_display_max_length: 30,
- base_url: nil
+ base_url: nil,
+ allowed_mime_types: ["image", "audio", "video"]
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
@@ -148,19 +149,39 @@
format: "$metadata[$level] $message",
metadata: [:request_id]
+# ———————————————————————————————————————————————————————————————
+# W A R N I N G
+# ———————————————————————————————————————————————————————————————
+#
+# Whenever adding a privileged new custom type for e.g.
+# ActivityPub objects, ALWAYS map their extension back
+# to "application/octet-stream".
+# Else files served by us can automatically end up with
+# those privileged types causing severe security hazards.
+# (We need those mappings so Phoenix can assoiate its format
+# (the "extension") to incoming requests of those MIME types)
+#
+# ———————————————————————————————————————————————————————————————
config :mime, :types, %{
"application/xml" => ["xml"],
"application/xrd+xml" => ["xrd+xml"],
"application/opensearchdescription+xml" => ["xml"],
"application/jrd+json" => ["jrd+json"],
"application/activity+json" => ["activity+json"],
- "application/ld+json" => ["activity+json"]
+ "application/ld+json" => ["activity+json"],
+ # Can be removed when bumping MIME past 2.0.5
+ # see https://akkoma.dev/AkkomaGang/akkoma/issues/657
+ "image/apng" => ["apng"]
}
config :mime, :extensions, %{
- "activity+json" => "application/activity+json"
+ "xrd+xml" => "text/plain",
+ "jrd+json" => "text/plain",
+ "activity+json" => "text/plain"
}
+# ———————————————————————————————————————————————————————————————
+
config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}
# Configures http settings, upstream proxy etc.
diff --git a/config/description.exs b/config/description.exs
index e108aaae8..ec5050be6 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -100,9 +100,22 @@
label: "Base URL",
type: :string,
description:
- "Base URL for the uploads. Required if you use a CDN or host attachments under a different domain.",
+ "Base URL for the uploads. Required if you use a CDN or host attachments under a different domain - it is HIGHLY recommended that you **do not** set this to be the same as the domain akkoma is hosted on.",
suggestions: [
- "https://cdn-host.com"
+ "https://media.akkoma.dev/media/"
+ ]
+ },
+ %{
+ key: :allowed_mime_types,
+ label: "Allowed MIME types",
+ type: {:list, :string},
+ description:
+ "List of MIME (main) types uploads are allowed to identify themselves with. Other types may still be uploaded, but will identify as a generic binary to clients. WARNING: Loosening this over the defaults can lead to security issues. Removing types is safe, but only add to the list if you are sure you know what you are doing.",
+ suggestions: [
+ "image",
+ "audio",
+ "video",
+ "font"
]
},
%{
diff --git a/config/test.exs b/config/test.exs
index 6d502e852..07a4b8c36 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -22,9 +22,12 @@
config :pleroma, :auth, oauth_consumer_strategies: []
config :pleroma, Pleroma.Upload,
+ base_url: "http://localhost:4001/media/",
filters: [],
link_name: false
+config :pleroma, :media_proxy, base_url: "http://localhost:4001"
+
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test, enabled: true
diff --git a/docs/docs/administration/CLI_tasks/security.md b/docs/docs/administration/CLI_tasks/security.md
new file mode 100644
index 000000000..a0208c4e5
--- /dev/null
+++ b/docs/docs/administration/CLI_tasks/security.md
@@ -0,0 +1,56 @@
+# Security-related tasks
+
+{! administration/CLI_tasks/general_cli_task_info.include !}
+
+!!! danger
+ Many of these tasks were written in response to a patched exploit.
+ It is recommended to run those very soon after installing its respective security update.
+ Over time with db migrations they might become less accurate or be removed altogether.
+ If you never ran an affected version, there’s no point in running them.
+
+## Spoofed AcitivityPub objects exploit (2024-03, fixed in 3.11.1)
+
+### Search for uploaded spoofing payloads
+
+Scans local uploads for spoofing payloads.
+If the instance is not using the local uploader it was not affected.
+Attachments wil be scanned anyway in case local uploader was used in the past.
+
+!!! note
+ This cannot reliably detect payloads attached to deleted posts.
+
+=== "OTP"
+
+ ```sh
+ ./bin/pleroma_ctl security spoof-uploaded
+ ```
+
+=== "From Source"
+
+ ```sh
+ mix pleroma.security spoof-uploaded
+ ```
+
+### Search for counterfeit posts in database
+
+Scans all notes in the database for signs of being spoofed.
+
+!!! note
+ Spoofs targeting local accounts can be detected rather reliably
+ (with some restrictions documented in the task’s logs).
+ Counterfeit posts from remote users cannot. A best-effort attempt is made, but
+ a thorough attacker can avoid this and it may yield a small amount of false positives.
+
+ Should you find counterfeit posts of local users, let other admins know so they can delete the too.
+
+=== "OTP"
+
+ ```sh
+ ./bin/pleroma_ctl security spoof-inserted
+ ```
+
+=== "From Source"
+
+ ```sh
+ mix pleroma.security spoof-inserted
+ ```
diff --git a/docs/docs/configuration/cheatsheet.md b/docs/docs/configuration/cheatsheet.md
index 9e312dfd0..59cf5177a 100644
--- a/docs/docs/configuration/cheatsheet.md
+++ b/docs/docs/configuration/cheatsheet.md
@@ -236,7 +236,9 @@ config :pleroma, :mrf_user_allowlist, %{
#### :mrf_steal_emoji
* `hosts`: List of hosts to steal emojis from
* `rejected_shortcodes`: Regex-list of shortcodes to reject
-* `size_limit`: File size limit (in bytes), checked before an emoji is saved to the disk
+* `size_limit`: File size limit (in bytes), checked before download if possible (and remote server honest),
+ otherwise or again checked before saving emoji to the disk
+* `download_unknown_size`: whether to download an emoji when the remote server doesn’t report its size in advance
#### :mrf_activity_expiration
@@ -396,7 +398,8 @@ This section describe PWA manifest instance-specific values. Currently this opti
## :media_proxy
* `enabled`: Enables proxying of remote media to the instance’s proxy
-* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
+* `base_url`: The base URL to access a user-uploaded file.
+ Using a (sub)domain distinct from the instance endpoint is **strongly** recommended.
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
* `whitelist`: List of hosts with scheme to bypass the mediaproxy (e.g. `https://example.com`)
* `invalidation`: options for remove media from cache after delete object:
@@ -597,8 +600,9 @@ the source code is here: [kocaptcha](https://github.com/koto-bank/kocaptcha). Th
* `uploader`: Which one of the [uploaders](#uploaders) to use.
* `filters`: List of [upload filters](#upload-filters) to use.
-* `link_name`: When enabled Akkoma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers when using filters like `Pleroma.Upload.Filter.Dedupe`
-* `base_url`: The base URL to access a user-uploaded file. Useful when you want to host the media files via another domain or are using a 3rd party S3 provider.
+* `link_name`: When enabled Akkoma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers
+* `base_url`: The base URL to access a user-uploaded file; MUST be configured explicitly.
+ Using a (sub)domain distinct from the instance endpoint is **strongly** recommended. A good value might be `https://media.myakkoma.instance/media/`.
* `proxy_remote`: If you're using a remote uploader, Akkoma will proxy media requests instead of redirecting to it.
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
@@ -638,17 +642,18 @@ config :ex_aws, :s3,
### Upload filters
-#### Pleroma.Upload.Filter.AnonymizeFilename
-
-This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
-`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
-
-* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
-
#### Pleroma.Upload.Filter.Dedupe
+**Always** active; cannot be turned off.
+Renames files to their hash and prevents duplicate files filling up the disk.
No specific configuration.
+#### Pleroma.Upload.Filter.AnonymizeFilename
+
+This filter replaces the declared filename (not the path) of an upload.
+
+* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
+
#### Pleroma.Upload.Filter.Exiftool
This filter only strips the GPS and location metadata with Exiftool leaving color profiles and attributes intact.
diff --git a/docs/docs/configuration/hardening.md b/docs/docs/configuration/hardening.md
index 521183f7d..f8ba048dd 100644
--- a/docs/docs/configuration/hardening.md
+++ b/docs/docs/configuration/hardening.md
@@ -17,6 +17,16 @@ This sets the Akkoma application server to only listen to the localhost interfac
This sets the `secure` flag on Akkoma’s session cookie. This makes sure, that the cookie is only accepted over encrypted HTTPs connections. This implicitly renames the cookie from `pleroma_key` to `__Host-pleroma-key` which enforces some restrictions. (see [cookie prefixes](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie#Cookie_prefixes))
+### `Pleroma.Upload, :uploader, :base_url`
+
+> Recommended value: *anything on a different domain than the instance endpoint; e.g. https://media.myinstance.net/*
+
+Uploads are user controlled and (unless you’re running a true single-user
+instance) should therefore not be considered trusted. But the domain is used
+as a pivilege boundary e.g. by HTTP content security policy and ActivityPub.
+Having uploads on the same domain enabled several past vulnerabilities
+able to be exploited by malicious users.
+
### `:http_security`
> Recommended value: `true`
diff --git a/docs/docs/configuration/howto_mediaproxy.md b/docs/docs/configuration/howto_mediaproxy.md
index 8ad81bdfb..223ad7eed 100644
--- a/docs/docs/configuration/howto_mediaproxy.md
+++ b/docs/docs/configuration/howto_mediaproxy.md
@@ -6,7 +6,16 @@ With the `mediaproxy` function you can use nginx to cache this content, so users
## Activate it
-* Edit your nginx config and add the following location:
+* Edit your nginx config and add the following location to your main server block:
+```
+location /proxy {
+ return 404;
+}
+```
+
+* Set up a subdomain for the proxy with its nginx config on the same machine
+ *(the latter is not strictly required, but for simplicity we’ll assume so)*
+* In this subdomain’s server block add
```
location /proxy {
proxy_cache akkoma_media_cache;
@@ -26,9 +35,9 @@ config :pleroma, :media_proxy,
enabled: true,
proxy_opts: [
redirect_on_failure: true
- ]
- #base_url: "https://cache.akkoma.social"
+ ],
+ base_url: "https://cache.akkoma.social"
```
-If you want to use a subdomain to serve the files, uncomment `base_url`, change the url and add a comma after `true` in the previous line.
+You **really** should use a subdomain to serve proxied files; while we will fix bugs resulting from this, serving arbitrary remote content on your main domain namespace is a significant attack surface.
* Restart nginx and Akkoma
diff --git a/docs/docs/installation/docker_en.md b/docs/docs/installation/docker_en.md
index 52c056173..9551b034a 100644
--- a/docs/docs/installation/docker_en.md
+++ b/docs/docs/installation/docker_en.md
@@ -125,7 +125,26 @@ cp docker-resources/Caddyfile.example docker-resources/Caddyfile
Then edit the TLD in your caddyfile to the domain you're serving on.
-Uncomment the `caddy` section in the docker compose file,
+Copy the commented out `caddy` section in `docker-compose.yml` into a new file called `docker-compose.override.yml` like so:
+```yaml
+version: "3.7"
+
+services:
+ proxy:
+ image: caddy:2-alpine
+ restart: unless-stopped
+ links:
+ - akkoma
+ ports: [
+ "443:443",
+ "80:80"
+ ]
+ volumes:
+ - ./docker-resources/Caddyfile:/etc/caddy/Caddyfile
+ - ./caddy-data:/data
+ - ./caddy-config:/config
+```
+
then run `docker compose up -d` again.
#### Running a reverse proxy on the host
@@ -155,6 +174,12 @@ git pull
docker compose restart akkoma db
```
+### Modifying the Docker services
+If you want to modify the services defined in the docker compose file, you can
+create a new file called `docker-compose.override.yml`. There you can add any
+overrides or additional services without worrying about git conflicts when a
+new release comes out.
+
#### Further reading
{! installation/further_reading.include !}
diff --git a/docs/docs/installation/generic_dependencies.include b/docs/docs/installation/generic_dependencies.include
index b23736d85..d7701a28f 100644
--- a/docs/docs/installation/generic_dependencies.include
+++ b/docs/docs/installation/generic_dependencies.include
@@ -1,8 +1,8 @@
## Required dependencies
* PostgreSQL 9.6+
-* Elixir 1.14+
-* Erlang OTP 25+
+* Elixir 1.14+ (currently tested up to 1.16)
+* Erlang OTP 25+ (currently tested up to OTP26)
* git
* file / libmagic
* gcc (clang might also work)
diff --git a/installation/nginx/akkoma.nginx b/installation/nginx/akkoma.nginx
index 18d92f30f..1d91ce22f 100644
--- a/installation/nginx/akkoma.nginx
+++ b/installation/nginx/akkoma.nginx
@@ -75,9 +75,48 @@ server {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ location ~ ^/(media|proxy) {
+ return 404;
+ }
+
location / {
proxy_pass http://phoenix;
}
+}
+
+# Upload and MediaProxy Subdomain
+# (see main domain setup for more details)
+server {
+ server_name media.example.tld;
+
+ listen 80;
+ listen [::]:80;
+
+ location / {
+ return 301 https://$server_name$request_uri;
+ }
+}
+
+server {
+ server_name media.example.tld;
+
+ listen 443 ssl http2;
+ listen [::]:443 ssl http2;
+
+ ssl_trusted_certificate /etc/letsencrypt/live/media.example.tld/chain.pem;
+ ssl_certificate /etc/letsencrypt/live/media.example.tld/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/media.example.tld/privkey.pem;
+ # .. copy all other the ssl_* and gzip_* stuff from main domain
+
+ # the nginx default is 1m, not enough for large media uploads
+ client_max_body_size 16m;
+ ignore_invalid_headers off;
+
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
location ~ ^/(media|proxy) {
proxy_cache akkoma_media_cache;
@@ -91,4 +130,8 @@ server {
chunked_transfer_encoding on;
proxy_pass http://phoenix;
}
+
+ location / {
+ return 404;
+ }
}
diff --git a/lib/mix/tasks/pleroma/instance.ex b/lib/mix/tasks/pleroma/instance.ex
index 6f4d201d9..58b7f8ada 100644
--- a/lib/mix/tasks/pleroma/instance.ex
+++ b/lib/mix/tasks/pleroma/instance.ex
@@ -20,6 +20,7 @@ def run(["gen" | rest]) do
output: :string,
output_psql: :string,
domain: :string,
+ media_url: :string,
instance_name: :string,
admin_email: :string,
notify_email: :string,
@@ -67,6 +68,14 @@ def run(["gen" | rest]) do
":"
) ++ [443]
+ media_url =
+ get_option(
+ options,
+ :media_url,
+ "What base url will uploads use? (e.g https://media.example.com/media)\n" <>
+ " Generally this should NOT use the same domain as the instance "
+ )
+
name =
get_option(
options,
@@ -249,6 +258,7 @@ def run(["gen" | rest]) do
EEx.eval_file(
template_dir <> "/sample_config.eex",
domain: domain,
+ media_url: media_url,
port: port,
email: email,
notify_email: notify_email,
diff --git a/lib/mix/tasks/pleroma/security.ex b/lib/mix/tasks/pleroma/security.ex
new file mode 100644
index 000000000..f039e0980
--- /dev/null
+++ b/lib/mix/tasks/pleroma/security.ex
@@ -0,0 +1,330 @@
+# Akkoma: Magically expressive social media
+# Copyright © 2024 Akkoma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Mix.Tasks.Pleroma.Security do
+ use Mix.Task
+ import Ecto.Query
+ import Mix.Pleroma
+
+ alias Pleroma.Config
+
+ require Logger
+
+ @shortdoc """
+ Security-related tasks, like e.g. checking for signs past exploits were abused.
+ """
+
+ # Constants etc
+ defp local_id_prefix(), do: Pleroma.Web.Endpoint.url() <> "/"
+
+ defp local_id_pattern(), do: local_id_prefix() <> "%"
+
+ @activity_exts ["activity+json", "activity%2Bjson"]
+
+ defp activity_ext_url_patterns() do
+ for e <- @activity_exts do
+ for suf <- ["", "?%"] do
+ # Escape literal % for use in SQL patterns
+ ee = String.replace(e, "%", "\\%")
+ "%.#{ee}#{suf}"
+ end
+ end
+ |> List.flatten()
+ end
+
+ # Search for malicious uploads exploiting the lack of Content-Type sanitisation from before 2024-03
+ def run(["spoof-uploaded"]) do
+ Logger.put_process_level(self(), :notice)
+ start_pleroma()
+
+ IO.puts("""
+ +------------------------+
+ | SPOOF SEARCH UPLOADS |
+ +------------------------+
+ Checking if any uploads are using privileged types.
+ NOTE if attachment deletion is enabled, payloads used
+ in the past may no longer exist.
+ """)
+
+ do_spoof_uploaded()
+ end
+
+ # Fuzzy search for potentially counterfeit activities in the database resulting from the same exploit
+ def run(["spoof-inserted"]) do
+ Logger.put_process_level(self(), :notice)
+ start_pleroma()
+
+ IO.puts("""
+ +----------------------+
+ | SPOOF SEARCH NOTES |
+ +----------------------+
+ Starting fuzzy search for counterfeit activities.
+ NOTE this can not guarantee detecting all counterfeits
+ and may yield a small percentage of false positives.
+ """)
+
+ do_spoof_inserted()
+ end
+
+ # +-----------------------------+
+ # | S P O O F - U P L O A D E D |
+ # +-----------------------------+
+ defp do_spoof_uploaded() do
+ files =
+ case Config.get!([Pleroma.Upload, :uploader]) do
+ Pleroma.Uploaders.Local ->
+ uploads_search_spoofs_local_dir(Config.get!([Pleroma.Uploaders.Local, :uploads]))
+
+ _ ->
+ IO.puts("""
+ NOTE:
+ Not using local uploader; thus not affected by this exploit.
+ It's impossible to check for files, but in case local uploader was used before
+ or to check if anyone futilely attempted a spoof, notes will still be scanned.
+ """)
+
+ []
+ end
+
+ emoji = uploads_search_spoofs_local_dir(Config.get!([:instance, :static_dir]))
+
+ post_attachs = uploads_search_spoofs_notes()
+
+ not_orphaned_urls =
+ post_attachs
+ |> Enum.map(fn {_u, _a, url} -> url end)
+ |> MapSet.new()
+
+ orphaned_attachs = upload_search_orphaned_attachments(not_orphaned_urls)
+
+ IO.puts("\nSearch concluded; here are the results:")
+ pretty_print_list_with_title(emoji, "Emoji")
+ pretty_print_list_with_title(files, "Uploaded Files")
+ pretty_print_list_with_title(post_attachs, "(Not Deleted) Post Attachments")
+ pretty_print_list_with_title(orphaned_attachs, "Orphaned Uploads")
+
+ IO.puts("""
+ In total found
+ #{length(emoji)} emoji
+ #{length(files)} uploads
+ #{length(post_attachs)} not deleted posts
+ #{length(orphaned_attachs)} orphaned attachments
+ """)
+ end
+
+ defp uploads_search_spoofs_local_dir(dir) do
+ local_dir = String.replace_suffix(dir, "/", "")
+
+ IO.puts("Searching for suspicious files in #{local_dir}...")
+
+ glob_ext = "{" <> Enum.join(@activity_exts, ",") <> "}"
+
+ Path.wildcard(local_dir <> "/**/*." <> glob_ext, match_dot: true)
+ |> Enum.map(fn path ->
+ String.replace_prefix(path, local_dir <> "/", "")
+ end)
+ |> Enum.sort()
+ end
+
+ defp uploads_search_spoofs_notes() do
+ IO.puts("Now querying DB for posts with spoofing attachments. This might take a while...")
+
+ patterns = [local_id_pattern() | activity_ext_url_patterns()]
+
+ # if jsonb_array_elemsts in FROM can be used with normal Ecto functions, idk how
+ """
+ SELECT DISTINCT a.data->>'actor', a.id, url->>'href'
+ FROM public.objects AS o JOIN public.activities AS a
+ ON o.data->>'id' = a.data->>'object',
+ jsonb_array_elements(o.data->'attachment') AS attachs,
+ jsonb_array_elements(attachs->'url') AS url
+ WHERE o.data->>'type' = 'Note' AND
+ o.data->>'id' LIKE $1::text AND (
+ url->>'href' LIKE $2::text OR
+ url->>'href' LIKE $3::text OR
+ url->>'href' LIKE $4::text OR
+ url->>'href' LIKE $5::text
+ )
+ ORDER BY a.data->>'actor', a.id, url->>'href';
+ """
+ |> Pleroma.Repo.query!(patterns, timeout: :infinity)
+ |> map_raw_id_apid_tuple()
+ end
+
+ defp upload_search_orphaned_attachments(not_orphaned_urls) do
+ IO.puts("""
+ Now querying DB for orphaned spoofing attachment (i.e. their post was deleted,
+ but if :cleanup_attachments was not enabled traces remain in the database)
+ This might take a bit...
+ """)
+
+ patterns = activity_ext_url_patterns()
+
+ """
+ SELECT DISTINCT attach.id, url->>'href'
+ FROM public.objects AS attach,
+ jsonb_array_elements(attach.data->'url') AS url
+ WHERE (attach.data->>'type' = 'Image' OR
+ attach.data->>'type' = 'Document')
+ AND (
+ url->>'href' LIKE $1::text OR
+ url->>'href' LIKE $2::text OR
+ url->>'href' LIKE $3::text OR
+ url->>'href' LIKE $4::text
+ )
+ ORDER BY attach.id, url->>'href';
+ """
+ |> Pleroma.Repo.query!(patterns, timeout: :infinity)
+ |> then(fn res -> Enum.map(res.rows, fn [id, url] -> {id, url} end) end)
+ |> Enum.filter(fn {_, url} -> !(url in not_orphaned_urls) end)
+ end
+
+ # +-----------------------------+
+ # | S P O O F - I N S E R T E D |
+ # +-----------------------------+
+ defp do_spoof_inserted() do
+ IO.puts("""
+ Searching for local posts whose Create activity has no ActivityPub id...
+ This is a pretty good indicator, but only for spoofs of local actors
+ and only if the spoofing happened after around late 2021.
+ """)
+
+ idless_create =
+ search_local_notes_without_create_id()
+ |> Enum.sort()
+
+ IO.puts("Done.\n")
+
+ IO.puts("""
+ Now trying to weed out other poorly hidden spoofs.
+ This can't detect all and may have some false positives.
+ """)
+
+ likely_spoofed_posts_set = MapSet.new(idless_create)
+
+ sus_pattern_posts =
+ search_sus_notes_by_id_patterns()
+ |> Enum.filter(fn r -> !(r in likely_spoofed_posts_set) end)
+
+ IO.puts("Done.\n")
+
+ IO.puts("""
+ Finally, searching for spoofed, local user accounts.
+ (It's impossible to detect spoofed remote users)
+ """)
+
+ spoofed_users = search_bogus_local_users()
+
+ pretty_print_list_with_title(sus_pattern_posts, "Maybe Spoofed Posts")
+ pretty_print_list_with_title(idless_create, "Likely Spoofed Posts")
+ pretty_print_list_with_title(spoofed_users, "Spoofed local user accounts")
+
+ IO.puts("""
+ In total found:
+ #{length(spoofed_users)} bogus users
+ #{length(idless_create)} likely spoofed posts
+ #{length(sus_pattern_posts)} maybe spoofed posts
+ """)
+ end
+
+ defp search_local_notes_without_create_id() do
+ Pleroma.Object
+ |> where([o], fragment("?->>'id' LIKE ?", o.data, ^local_id_pattern()))
+ |> join(:inner, [o], a in Pleroma.Activity,
+ on: fragment("?->>'object' = ?->>'id'", a.data, o.data)
+ )
+ |> where([o, a], fragment("NOT (? \\? 'id') OR ?->>'id' IS NULL", a.data, a.data))
+ |> select([o, a], {a.id, fragment("?->>'id'", o.data)})
+ |> order_by([o, a], a.id)
+ |> Pleroma.Repo.all(timeout: :infinity)
+ end
+
+ defp search_sus_notes_by_id_patterns() do
+ [ep1, ep2, ep3, ep4] = activity_ext_url_patterns()
+
+ Pleroma.Object
+ |> where(
+ [o],
+ # for local objects we know exactly how a genuine id looks like
+ # (though a thorough attacker can emulate this)
+ # for remote posts, use some best-effort patterns
+ fragment(
+ """
+ (?->>'id' LIKE ? AND ?->>'id' NOT SIMILAR TO
+ ? || 'objects/[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}')
+ """,
+ o.data,
+ ^local_id_pattern(),
+ o.data,
+ ^local_id_prefix()
+ ) or
+ fragment("?->>'id' LIKE ?", o.data, "%/emoji/%") or
+ fragment("?->>'id' LIKE ?", o.data, "%/media/%") or
+ fragment("?->>'id' LIKE ?", o.data, "%/proxy/%") or
+ fragment("?->>'id' LIKE ?", o.data, ^ep1) or
+ fragment("?->>'id' LIKE ?", o.data, ^ep2) or
+ fragment("?->>'id' LIKE ?", o.data, ^ep3) or
+ fragment("?->>'id' LIKE ?", o.data, ^ep4)
+ )
+ |> join(:inner, [o], a in Pleroma.Activity,
+ on: fragment("?->>'object' = ?->>'id'", a.data, o.data)
+ )
+ |> select([o, a], {a.id, fragment("?->>'id'", o.data)})
+ |> order_by([o, a], a.id)
+ |> Pleroma.Repo.all(timeout: :infinity)
+ end
+
+ defp search_bogus_local_users() do
+ Pleroma.User.Query.build(%{})
+ |> where([u], u.local == false and like(u.ap_id, ^local_id_pattern()))
+ |> order_by([u], u.ap_id)
+ |> select([u], u.ap_id)
+ |> Pleroma.Repo.all(timeout: :infinity)
+ end
+
+ # +-----------------------------------+
+ # | module-specific utility functions |
+ # +-----------------------------------+
+ defp pretty_print_list_with_title(list, title) do
+ title_len = String.length(title)
+ title_underline = String.duplicate("=", title_len)
+ IO.puts(title)
+ IO.puts(title_underline)
+ pretty_print_list(list)
+ end
+
+ defp pretty_print_list([]), do: IO.puts("")
+
+ defp pretty_print_list([{a, o} | rest])
+ when (is_binary(a) or is_number(a)) and is_binary(o) do
+ IO.puts(" {#{a}, #{o}}")
+ pretty_print_list(rest)
+ end
+
+ defp pretty_print_list([{u, a, o} | rest])
+ when is_binary(a) and is_binary(u) and is_binary(o) do
+ IO.puts(" {#{u}, #{a}, #{o}}")
+ pretty_print_list(rest)
+ end
+
+ defp pretty_print_list([e | rest]) when is_binary(e) do
+ IO.puts(" #{e}")
+ pretty_print_list(rest)
+ end
+
+ defp pretty_print_list([e | rest]), do: pretty_print_list([inspect(e) | rest])
+
+ defp map_raw_id_apid_tuple(res) do
+ user_prefix = local_id_prefix() <> "users/"
+
+ Enum.map(res.rows, fn
+ [uid, aid, oid] ->
+ {
+ String.replace_prefix(uid, user_prefix, ""),
+ FlakeId.to_string(aid),
+ oid
+ }
+ end)
+ end
+end
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index 3200175d1..28a86d0aa 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -288,6 +288,7 @@ defp http_children do
|> Config.get([])
|> Pleroma.HTTP.AdapterHelper.add_pool_size(pool_size)
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy_pool(proxy)
+ |> Pleroma.HTTP.AdapterHelper.ensure_ipv6()
|> Keyword.put(:name, MyFinch)
[{Finch, config}]
diff --git a/lib/pleroma/config/deprecation_warnings.ex b/lib/pleroma/config/deprecation_warnings.ex
index beb3f5e7f..c299a4947 100644
--- a/lib/pleroma/config/deprecation_warnings.ex
+++ b/lib/pleroma/config/deprecation_warnings.ex
@@ -182,7 +182,9 @@ def warn do
check_quarantined_instances_tuples(),
check_transparency_exclusions_tuples(),
check_simple_policy_tuples(),
- check_http_adapter()
+ check_http_adapter(),
+ check_uploader_base_url_set(),
+ check_uploader_base_url_is_not_base_domain()
]
|> Enum.reduce(:ok, fn
:ok, :ok -> :ok
@@ -337,4 +339,54 @@ def check_uploders_s3_public_endpoint do
:ok
end
end
+
+ def check_uploader_base_url_set() do
+ uses_local_uploader? = Config.get([Pleroma.Upload, :uploader]) == Pleroma.Uploaders.Local
+ base_url = Pleroma.Config.get([Pleroma.Upload, :base_url])
+
+ if base_url || !uses_local_uploader? do
+ :ok
+ else
+ Logger.error("""
+ !!!WARNING!!!
+ Your config does not specify a base_url for uploads!
+ Please make the following change:\n
+ \n* `config :pleroma, Pleroma.Upload, base_url: "https://example.com/media/`
+ \n
+ \nPlease note that it is HEAVILY recommended to use a subdomain to host user-uploaded media!
+ """)
+
+ # This is a hard exit - the uploader will not work without a base_url
+ raise ArgumentError, message: "No base_url set for uploads - please set one in your config!"
+ end
+ end
+
+ def check_uploader_base_url_is_not_base_domain() do
+ uses_local_uploader? = Config.get([Pleroma.Upload, :uploader]) == Pleroma.Uploaders.Local
+
+ uploader_host =
+ [Pleroma.Upload, :base_url]
+ |> Pleroma.Config.get()
+ |> URI.parse()
+ |> Map.get(:host)
+
+ akkoma_host =
+ [Pleroma.Web.Endpoint, :url]
+ |> Pleroma.Config.get()
+ |> Keyword.get(:host)
+
+ if uploader_host == akkoma_host && uses_local_uploader? do
+ Logger.error("""
+ !!!WARNING!!!
+ Your Akkoma Host and your Upload base_url's host are the same!
+ This can potentially be insecure!
+
+ It is HIGHLY recommended that you migrate your media uploads
+ to a subdomain at your earliest convenience
+ """)
+ end
+
+ # This isn't actually an error condition, just a warning
+ :ok
+ end
end
diff --git a/lib/pleroma/emoji/pack.ex b/lib/pleroma/emoji/pack.ex
index 9049d9097..142208854 100644
--- a/lib/pleroma/emoji/pack.ex
+++ b/lib/pleroma/emoji/pack.ex
@@ -26,12 +26,37 @@ defmodule Pleroma.Emoji.Pack do
alias Pleroma.Emoji.Pack
alias Pleroma.Utils
+ # Invalid/Malicious names are supposed to be filtered out before path joining,
+ # but there are many entrypoints to affected functions so as the code changes
+ # we might accidentally let an unsanitised name slip through.
+ # To make sure, use the below which crash the process otherwise.
+
+ # ALWAYS use this when constructing paths from external name!
+ # (name meaning it must be only a single path component)
+ defp path_join_name_safe(dir, name) do
+ if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do
+ raise "Invalid or malicious pack name: #{name}"
+ else
+ Path.join(dir, name)
+ end
+ end
+
+ # ALWAYS use this to join external paths
+ # (which are allowed to have several components)
+ defp path_join_safe(dir, path) do
+ {:ok, safe_path} = Path.safe_relative(path)
+ Path.join(dir, safe_path)
+ end
+
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
def create(name) do
with :ok <- validate_not_empty([name]),
- dir <- Path.join(emoji_path(), name),
+ dir <- path_join_name_safe(emoji_path(), name),
:ok <- File.mkdir(dir) do
- save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")})
+ save_pack(%__MODULE__{
+ path: dir,
+ pack_file: Path.join(dir, "pack.json")
+ })
end
end
@@ -65,7 +90,7 @@ def show(opts) do
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
def delete(name) do
with :ok <- validate_not_empty([name]),
- pack_path <- Path.join(emoji_path(), name) do
+ pack_path <- path_join_name_safe(emoji_path(), name) do
File.rm_rf(pack_path)
end
end
@@ -89,7 +114,7 @@ defp unpack_zip_emojies(zip_files) do
end)
end
- @spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) ::
+ @spec add_file(t(), String.t(), Path.t(), Plug.Upload.t() | binary()) ::
{:ok, t()}
| {:error, File.posix() | atom()}
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
@@ -107,7 +132,7 @@ def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"}
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
emoji_file = %Plug.Upload{
filename: item[:filename],
- path: Path.join(tmp_dir, item[:path])
+ path: path_join_safe(tmp_dir, item[:path])
}
{:ok, updated_pack} =
@@ -137,6 +162,14 @@ def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"}
end
def add_file(%Pack{} = pack, shortcode, filename, %Plug.Upload{} = file) do
+ try_add_file(pack, shortcode, filename, file)
+ end
+
+ def add_file(%Pack{} = pack, shortcode, filename, filedata) when is_binary(filedata) do
+ try_add_file(pack, shortcode, filename, filedata)
+ end
+
+ defp try_add_file(%Pack{} = pack, shortcode, filename, file) do
with :ok <- validate_not_empty([shortcode, filename]),
:ok <- validate_emoji_not_exists(shortcode),
{:ok, updated_pack} <- do_add_file(pack, shortcode, filename, file) do
@@ -189,6 +222,7 @@ def import_from_filesystem do
{:ok, results} <- File.ls(emoji_path) do
names =
results
+ # items come from File.ls, thus safe
|> Enum.map(&Path.join(emoji_path, &1))
|> Enum.reject(fn path ->
File.dir?(path) and File.exists?(Path.join(path, "pack.json"))
@@ -287,8 +321,8 @@ def update_metadata(name, data) do
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
def load_pack(name) do
- name = Path.basename(name)
- pack_file = Path.join([emoji_path(), name, "pack.json"])
+ pack_dir = path_join_name_safe(emoji_path(), name)
+ pack_file = Path.join(pack_dir, "pack.json")
with {:ok, _} <- File.stat(pack_file),
{:ok, pack_data} <- File.read(pack_file) do
@@ -412,7 +446,13 @@ defp downloadable?(pack) do
end
defp create_archive_and_cache(pack, hash) do
- files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
+ files = [
+ ~c"pack.json"
+ | Enum.map(pack.files, fn {_, file} ->
+ {:ok, file} = Path.safe_relative(file)
+ to_charlist(file)
+ end)
+ ]
{:ok, {_, result}} =
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
@@ -474,7 +514,7 @@ defp validate_not_empty(list) do
end
defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
- file_path = Path.join(pack.path, filename)
+ file_path = path_join_safe(pack.path, filename)
create_subdirs(file_path)
with {:ok, _} <- File.copy(upload_path, file_path) do
@@ -482,6 +522,12 @@ defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
end
end
+ defp save_file(file_data, pack, filename) when is_binary(file_data) do
+ file_path = path_join_safe(pack.path, filename)
+ create_subdirs(file_path)
+ File.write(file_path, file_data, [:binary])
+ end
+
defp put_emoji(pack, shortcode, filename) do
files = Map.put(pack.files, shortcode, filename)
%{pack | files: files, files_count: length(Map.keys(files))}
@@ -493,8 +539,8 @@ defp delete_emoji(pack, shortcode) do
end
defp rename_file(pack, filename, new_filename) do
- old_path = Path.join(pack.path, filename)
- new_path = Path.join(pack.path, new_filename)
+ old_path = path_join_safe(pack.path, filename)
+ new_path = path_join_safe(pack.path, new_filename)
create_subdirs(new_path)
with :ok <- File.rename(old_path, new_path) do
@@ -512,7 +558,7 @@ defp create_subdirs(file_path) do
defp remove_file(pack, shortcode) do
with {:ok, filename} <- get_filename(pack, shortcode),
- emoji <- Path.join(pack.path, filename),
+ emoji <- path_join_safe(pack.path, filename),
:ok <- File.rm(emoji) do
remove_dir_if_empty(emoji, filename)
end
@@ -530,7 +576,7 @@ defp remove_dir_if_empty(emoji, filename) do
defp get_filename(pack, shortcode) do
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
- file_path <- Path.join(pack.path, filename),
+ file_path <- path_join_safe(pack.path, filename),
{:ok, _} <- File.stat(file_path) do
{:ok, filename}
else
@@ -568,7 +614,7 @@ defp validate_downloadable(pack) do
end
defp copy_as(remote_pack, local_name) do
- path = Path.join(emoji_path(), local_name)
+ path = path_join_name_safe(emoji_path(), local_name)
%__MODULE__{
name: local_name,
diff --git a/lib/pleroma/http/adapter_helper.ex b/lib/pleroma/http/adapter_helper.ex
index 303ccdf2a..c1d95b3a4 100644
--- a/lib/pleroma/http/adapter_helper.ex
+++ b/lib/pleroma/http/adapter_helper.ex
@@ -65,6 +65,15 @@ def add_pool_size(opts, pool_size) do
|> put_in([:pools, :default, :size], pool_size)
end
+ def ensure_ipv6(opts) do
+ # Default transport opts already enable IPv6, so just ensure they're loaded
+ opts
+ |> maybe_add_pools()
+ |> maybe_add_default_pool()
+ |> maybe_add_conn_opts()
+ |> maybe_add_transport_opts()
+ end
+
defp maybe_add_pools(opts) do
if Keyword.has_key?(opts, :pools) do
opts
@@ -96,11 +105,15 @@ defp maybe_add_conn_opts(opts) do
defp maybe_add_transport_opts(opts) do
transport_opts = get_in(opts, [:pools, :default, :conn_opts, :transport_opts])
- unless is_nil(transport_opts) do
- opts
- else
- put_in(opts, [:pools, :default, :conn_opts, :transport_opts], [])
- end
+ opts =
+ unless is_nil(transport_opts) do
+ opts
+ else
+ put_in(opts, [:pools, :default, :conn_opts, :transport_opts], [])
+ end
+
+ # IPv6 is disabled and IPv4 enabled by default; ensure we can use both
+ put_in(opts, [:pools, :default, :conn_opts, :transport_opts, :inet6], true)
end
@doc """
diff --git a/lib/pleroma/object/containment.ex b/lib/pleroma/object/containment.ex
index 040537acf..37bc20e4d 100644
--- a/lib/pleroma/object/containment.ex
+++ b/lib/pleroma/object/containment.ex
@@ -11,6 +11,9 @@ defmodule Pleroma.Object.Containment do
Object containment is an important step in validating remote objects to prevent
spoofing, therefore removal of object containment functions is NOT recommended.
"""
+
+ alias Pleroma.Web.ActivityPub.Transmogrifier
+
def get_actor(%{"actor" => actor}) when is_binary(actor) do
actor
end
@@ -47,6 +50,31 @@ def get_object(_) do
defp compare_uris(%URI{host: host} = _id_uri, %URI{host: host} = _other_uri), do: :ok
defp compare_uris(_id_uri, _other_uri), do: :error
+ defp compare_uris_exact(uri, uri), do: :ok
+
+ defp compare_uris_exact(%URI{} = id, %URI{} = other),
+ do: compare_uris_exact(URI.to_string(id), URI.to_string(other))
+
+ defp compare_uris_exact(id_uri, other_uri)
+ when is_binary(id_uri) and is_binary(other_uri) do
+ norm_id = String.replace_suffix(id_uri, "/", "")
+ norm_other = String.replace_suffix(other_uri, "/", "")
+ if norm_id == norm_other, do: :ok, else: :error
+ end
+
+ @doc """
+ Checks whether an URL to fetch from is from the local server.
+
+ We never want to fetch from ourselves; if it’s not in the database
+ it can’t be authentic and must be a counterfeit.
+ """
+ def contain_local_fetch(id) do
+ case compare_uris(URI.parse(id), Pleroma.Web.Endpoint.struct_url()) do
+ :ok -> :error
+ _ -> :ok
+ end
+ end
+
@doc """
Checks that an imported AP object's actor matches the host it came from.
"""
@@ -62,8 +90,31 @@ def contain_origin(id, %{"actor" => _actor} = params) do
def contain_origin(id, %{"attributedTo" => actor} = params),
do: contain_origin(id, Map.put(params, "actor", actor))
- def contain_origin(_id, _data), do: :error
+ def contain_origin(_id, _data), do: :ok
+ @doc """
+ Check whether the fetch URL (after redirects) exactly (sans tralining slash) matches either
+ the canonical ActivityPub id or the objects url field (for display URLs from *key and Mastodon)
+
+ Since this is meant to be used for fetches, anonymous or transient objects are not accepted here.
+ """
+ def contain_id_to_fetch(url, %{"id" => id} = data) when is_binary(id) do
+ with {:id, :error} <- {:id, compare_uris_exact(id, url)},
+ # "url" can be a "Link" object and this is checked before full normalisation
+ display_url <- Transmogrifier.fix_url(data)["url"],
+ true <- display_url != nil do
+ compare_uris_exact(display_url, url)
+ else
+ {:id, :ok} -> :ok
+ _ -> :error
+ end
+ end
+
+ def contain_id_to_fetch(_url, _data), do: :error
+
+ @doc """
+ Check whether the object id is from the same host as another id
+ """
def contain_origin_from_id(id, %{"id" => other_id} = _params) when is_binary(other_id) do
id_uri = URI.parse(id)
other_uri = URI.parse(other_id)
@@ -85,4 +136,12 @@ def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}),
do: contain_origin(id, object)
def contain_child(_), do: :ok
+
+ @doc "Checks whether two URIs belong to the same domain"
+ def same_origin(id1, id2) do
+ uri1 = URI.parse(id1)
+ uri2 = URI.parse(id2)
+
+ compare_uris(uri1, uri2)
+ end
end
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index 9e62ca69f..267a82b27 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -18,6 +18,16 @@ defmodule Pleroma.Object.Fetcher do
require Logger
require Pleroma.Constants
+ @moduledoc """
+ This module deals with correctly fetching Acitivity Pub objects in a safe way.
+
+ The core function is `fetch_and_contain_remote_object_from_id/1` which performs
+ the actual fetch and common safety and authenticity checks. Other `fetch_*`
+ function use the former and perform some additional tasks
+ """
+
+ @mix_env Mix.env()
+
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
@@ -103,18 +113,26 @@ defp reinject_object(%Object{} = object, new_data) do
end
end
+ @doc "Assumes object already is in our database and refetches from remote to update (e.g. for polls)"
def refetch_object(%Object{data: %{"id" => id}} = object) do
with {:local, false} <- {:local, Object.local?(object)},
{:ok, new_data} <- fetch_and_contain_remote_object_from_id(id),
+ {:id, true} <- {:id, new_data["id"] == id},
{:ok, object} <- reinject_object(object, new_data) do
{:ok, object}
else
{:local, true} -> {:ok, object}
+ {:id, false} -> {:error, "Object id changed on refetch"}
e -> {:error, e}
end
end
- # Note: will create a Create activity, which we need internally at the moment.
+ @doc """
+ Fetches a new object and puts it through the processing pipeline for inbound objects
+
+ Note: will also insert a fake Create activity, since atm we internally
+ need everything to be traced back to a Create activity.
+ """
def fetch_object_from_id(id, options \\ []) do
with %URI{} = uri <- URI.parse(id),
# let's check the URI is even vaguely valid first
@@ -127,7 +145,6 @@ def fetch_object_from_id(id, options \\ []) do
{_, {:ok, data}} <- {:fetch, fetch_and_contain_remote_object_from_id(id)},
{_, nil} <- {:normalize, Object.normalize(data, fetch: false)},
params <- prepare_activity_params(data),
- {_, :ok} <- {:containment, Containment.contain_origin(id, params)},
{_, {:ok, activity}} <-
{:transmogrifier, Transmogrifier.handle_incoming(params, options)},
{_, _data, %Object{} = object} <-
@@ -140,9 +157,6 @@ def fetch_object_from_id(id, options \\ []) do
{:scheme, false} ->
{:error, "URI Scheme Invalid"}
- {:containment, _} ->
- {:error, "Object containment failed."}
-
{:transmogrifier, {:error, {:reject, e}}} ->
{:reject, e}
@@ -185,6 +199,7 @@ defp prepare_activity_params(data) do
|> Maps.put_if_present("bcc", data["bcc"])
end
+ @doc "Identical to `fetch_object_from_id/2` but just directly returns the object or on error `nil`"
def fetch_object_from_id!(id, options \\ []) do
with {:ok, object} <- fetch_object_from_id(id, options) do
object
@@ -235,6 +250,7 @@ defp maybe_date_fetch(headers, date) do
end
end
+ @doc "Fetches arbitrary remote object and performs basic safety and authenticity checks"
def fetch_and_contain_remote_object_from_id(id)
def fetch_and_contain_remote_object_from_id(%{"id" => id}),
@@ -244,18 +260,29 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
Logger.debug("Fetching object #{id} via AP")
with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")},
- {:ok, body} <- get_object(id),
+ {_, :ok} <- {:local_fetch, Containment.contain_local_fetch(id)},
+ {:ok, final_id, body} <- get_object(id),
{:ok, data} <- safe_json_decode(body),
- :ok <- Containment.contain_origin_from_id(id, data) do
- unless Instances.reachable?(id) do
- Instances.set_reachable(id)
+ {_, :ok} <- {:strict_id, Containment.contain_id_to_fetch(final_id, data)},
+ {_, :ok} <- {:containment, Containment.contain_origin(final_id, data)} do
+ unless Instances.reachable?(final_id) do
+ Instances.set_reachable(final_id)
end
{:ok, data}
else
+ {:strict_id, _} ->
+ {:error, "Object's ActivityPub id/url does not match final fetch URL"}
+
{:scheme, _} ->
{:error, "Unsupported URI scheme"}
+ {:local_fetch, _} ->
+ {:error, "Trying to fetch local resource"}
+
+ {:containment, _} ->
+ {:error, "Object containment failed."}
+
{:error, e} ->
{:error, e}
@@ -267,45 +294,80 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
def fetch_and_contain_remote_object_from_id(_id),
do: {:error, "id must be a string"}
+ defp check_crossdomain_redirect(final_host, original_url)
+
+ # HOPEFULLY TEMPORARY
+ # Basically none of our Tesla mocks in tests set the (supposed to
+ # exist for Tesla proper) url parameter for their responses
+ # causing almost every fetch in test to fail otherwise
+ if @mix_env == :test do
+ defp check_crossdomain_redirect(nil, _) do
+ {:cross_domain_redirect, false}
+ end
+ end
+
+ defp check_crossdomain_redirect(final_host, original_url) do
+ {:cross_domain_redirect, final_host != URI.parse(original_url).host}
+ end
+
+ if @mix_env == :test do
+ defp get_final_id(nil, initial_url), do: initial_url
+ defp get_final_id("", initial_url), do: initial_url
+ end
+
+ defp get_final_id(final_url, _intial_url) do
+ final_url
+ end
+
+ @doc "Do NOT use; only public for use in tests"
def get_object(id) do
date = Pleroma.Signature.signed_date()
headers =
- [{"accept", "application/activity+json"}]
+ [
+ # The first is required by spec, the second provided as a fallback for buggy implementations
+ {"accept", "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""},
+ {"accept", "application/activity+json"}
+ ]
|> maybe_date_fetch(date)
|> sign_fetch(id, date)
- case HTTP.get(id, headers) do
- {:ok, %{body: body, status: code, headers: headers}} when code in 200..299 ->
- case List.keyfind(headers, "content-type", 0) do
- {_, content_type} ->
- case Plug.Conn.Utils.media_type(content_type) do
- {:ok, "application", "activity+json", _} ->
- {:ok, body}
+ with {:ok, %{body: body, status: code, headers: headers, url: final_url}}
+ when code in 200..299 <-
+ HTTP.get(id, headers),
+ remote_host <-
+ URI.parse(final_url).host,
+ {:cross_domain_redirect, false} <-
+ check_crossdomain_redirect(remote_host, id),
+ {:has_content_type, {_, content_type}} <-
+ {:has_content_type, List.keyfind(headers, "content-type", 0)},
+ {:parse_content_type, {:ok, "application", subtype, type_params}} <-
+ {:parse_content_type, Plug.Conn.Utils.media_type(content_type)} do
+ final_id = get_final_id(final_url, id)
- {:ok, "application", "ld+json",
- %{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
- {:ok, body}
+ case {subtype, type_params} do
+ {"activity+json", _} ->
+ {:ok, final_id, body}
- # pixelfed sometimes (and only sometimes) responds with http instead of https
- {:ok, "application", "ld+json",
- %{"profile" => "http://www.w3.org/ns/activitystreams"}} ->
- {:ok, body}
-
- _ ->
- {:error, {:content_type, content_type}}
- end
-
- _ ->
- {:error, {:content_type, nil}}
- end
+ {"ld+json", %{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
+ {:ok, final_id, body}
+ _ ->
+ {:error, {:content_type, content_type}}
+ end
+ else
{:ok, %{status: code}} when code in [404, 410] ->
{:error, {"Object has been deleted", id, code}}
{:error, e} ->
{:error, e}
+ {:has_content_type, _} ->
+ {:error, {:content_type, nil}}
+
+ {:parse_content_type, e} ->
+ {:error, {:content_type, e}}
+
e ->
{:error, e}
end
diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex
index bb4f4def3..f017bf51b 100644
--- a/lib/pleroma/reverse_proxy.ex
+++ b/lib/pleroma/reverse_proxy.ex
@@ -17,6 +17,8 @@ defmodule Pleroma.ReverseProxy do
@failed_request_ttl :timer.seconds(60)
@methods ~w(GET HEAD)
+ @allowed_mime_types Pleroma.Config.get([Pleroma.Upload, :allowed_mime_types], [])
+
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
def max_read_duration_default, do: @max_read_duration
@@ -253,6 +255,7 @@ defp build_resp_headers(headers, opts) do
headers
|> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
|> build_resp_cache_headers(opts)
+ |> sanitise_content_type()
|> build_resp_content_disposition_header(opts)
|> build_csp_headers()
|> Keyword.merge(Keyword.get(opts, :resp_headers, []))
@@ -282,6 +285,21 @@ defp build_resp_cache_headers(headers, _opts) do
end
end
+ defp sanitise_content_type(headers) do
+ original_ct = get_content_type(headers)
+
+ safe_ct =
+ Pleroma.Web.Plugs.Utils.get_safe_mime_type(
+ %{allowed_mime_types: @allowed_mime_types},
+ original_ct
+ )
+
+ [
+ {"content-type", safe_ct}
+ | Enum.filter(headers, fn {k, _v} -> k != "content-type" end)
+ ]
+ end
+
defp build_resp_content_disposition_header(headers, opts) do
opt = Keyword.get(opts, :inline_content_types, @inline_content_types)
diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex
index 99b6b5215..b6a32d87a 100644
--- a/lib/pleroma/upload.ex
+++ b/lib/pleroma/upload.ex
@@ -64,7 +64,7 @@ defmodule Pleroma.Upload do
path: String.t()
}
- @always_enabled_filters [Pleroma.Upload.Filter.AnonymizeFilename]
+ @always_enabled_filters [Pleroma.Upload.Filter.Dedupe]
defstruct [:id, :name, :tempfile, :content_type, :width, :height, :blurhash, :path]
@@ -235,7 +235,7 @@ def base_url do
case uploader do
Pleroma.Uploaders.Local ->
- upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
+ upload_base_url
Pleroma.Uploaders.S3 ->
bucket = Config.get([Pleroma.Uploaders.S3, :bucket])
@@ -261,7 +261,7 @@ def base_url do
end
_ ->
- public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
+ public_endpoint || upload_base_url
end
end
end
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 8449af620..14414adc4 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -969,15 +969,16 @@ defp maybe_send_registration_email(%User{email: email} = user) when is_binary(em
defp maybe_send_registration_email(_), do: {:ok, :noop}
- def needs_update?(%User{local: true}), do: false
+ def needs_update?(user, options \\ [])
+ def needs_update?(%User{local: true}, _options), do: false
+ def needs_update?(%User{local: false, last_refreshed_at: nil}, _options), do: true
- def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
-
- def needs_update?(%User{local: false} = user) do
- NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400
+ def needs_update?(%User{local: false} = user, options) do
+ NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >=
+ Keyword.get(options, :maximum_age, 86_400)
end
- def needs_update?(_), do: true
+ def needs_update?(_, _options), do: true
# "Locked" (self-locked) users demand explicit authorization of follow requests
@spec can_direct_follow_local(User.t(), User.t()) :: true | false
@@ -1980,10 +1981,10 @@ def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
- def get_or_fetch_by_ap_id(ap_id) do
+ def get_or_fetch_by_ap_id(ap_id, options \\ []) do
cached_user = get_cached_by_ap_id(ap_id)
- maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
+ maybe_fetched_user = needs_update?(cached_user, options) && fetch_by_ap_id(ap_id)
case {cached_user, maybe_fetched_user} do
{_, {:ok, %User{} = user}} ->
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index 4a8ce2d3d..1e06bc809 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -22,6 +22,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.Upload
alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF
+ alias Pleroma.Web.ActivityPub.ObjectValidators.UserValidator
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Streamer
alias Pleroma.Web.WebFinger
@@ -1722,6 +1723,7 @@ def user_data_from_user_object(data, additional \\ []) do
def fetch_and_prepare_user_from_ap_id(ap_id, additional \\ []) do
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
+ {:valid, {:ok, _, _}} <- {:valid, UserValidator.validate(data, [])},
{:ok, data} <- user_data_from_user_object(data, additional) do
{:ok, maybe_update_follow_information(data)}
else
@@ -1734,6 +1736,10 @@ def fetch_and_prepare_user_from_ap_id(ap_id, additional \\ []) do
Logger.debug("Rejected user #{ap_id}: #{inspect(reason)}")
{:error, e}
+ {:valid, reason} ->
+ Logger.debug("Data is not a valid user #{ap_id}: #{inspect(reason)}")
+ {:error, "Not a user"}
+
{:error, e} ->
Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
{:error, e}
@@ -1834,6 +1840,13 @@ def make_user_from_ap_id(ap_id, additional \\ []) do
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
+ user =
+ if data.ap_id != ap_id do
+ User.get_cached_by_ap_id(data.ap_id)
+ else
+ user
+ end
+
if user do
user
|> User.remote_user_changeset(data)
diff --git a/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex b/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex
index 20432410b..35682f994 100644
--- a/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex
@@ -6,14 +6,29 @@ defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy do
@moduledoc "Force a quote line into the message content."
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ alias Pleroma.Object
+
defp build_inline_quote(prefix, url) do
"
#{prefix}: #{url}"
end
- defp has_inline_quote?(content, quote_url) do
+ defp resolve_urls(quote_url) do
+ # Fetching here can cause infinite recursion as we run this logic on inbound objects too
+ # This is probably not a problem - its an exceptional corner case for a local user to quote
+ # a post which doesn't exist
+ with %Object{} = obj <- Object.normalize(quote_url, fetch: false) do
+ id = obj.data["id"]
+ url = Map.get(obj.data, "url", id)
+ {id, url, [id, url, quote_url]}
+ else
+ _ -> {quote_url, quote_url, [quote_url]}
+ end
+ end
+
+ defp has_inline_quote?(content, urls) do
cond do
# Does the quote URL exist in the content?
- content =~ quote_url -> true
+ Enum.any?(urls, fn url -> content =~ url end) -> true
# Does the content already have a .quote-inline span?
content =~ "" -> true
# No inline quote found
@@ -22,18 +37,22 @@ defp has_inline_quote?(content, quote_url) do
end
defp filter_object(%{"quoteUri" => quote_url} = object) do
+ {id, preferred_url, all_urls} = resolve_urls(quote_url)
+ object = Map.put(object, "quoteUri", id)
+
content = object["content"] || ""
- if has_inline_quote?(content, quote_url) do
+ if has_inline_quote?(content, all_urls) do
object
else
prefix = Pleroma.Config.get([:mrf_inline_quote, :prefix])
content =
if String.ends_with?(content, "
") do
- String.trim_trailing(content, "") <> build_inline_quote(prefix, quote_url) <> ""
+ String.trim_trailing(content, "") <>
+ build_inline_quote(prefix, preferred_url) <> ""
else
- content <> build_inline_quote(prefix, quote_url)
+ content <> build_inline_quote(prefix, preferred_url)
end
Map.put(object, "content", content)
diff --git a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
index 02a107c27..26d3dc592 100644
--- a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
@@ -6,10 +6,54 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
require Logger
alias Pleroma.Config
+ alias Pleroma.Emoji.Pack
@moduledoc "Detect new emojis by their shortcode and steals them"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ @pack_name "stolen"
+
+ # Config defaults
+ @size_limit 50_000
+ @download_unknown_size false
+
+ defp create_pack() do
+ with {:ok, pack} = Pack.create(@pack_name) do
+ Pack.save_metadata(
+ %{
+ "description" => "Collection of emoji auto-stolen from other instances",
+ "homepage" => Pleroma.Web.Endpoint.url(),
+ "can-download" => false,
+ "share-files" => false
+ },
+ pack
+ )
+ end
+ end
+
+ defp load_or_create_pack() do
+ case Pack.load_pack(@pack_name) do
+ {:ok, pack} -> {:ok, pack}
+ {:error, :enoent} -> create_pack()
+ e -> e
+ end
+ end
+
+ defp add_emoji(shortcode, extension, filedata) do
+ {:ok, pack} = load_or_create_pack()
+ # Make final path infeasible to predict to thwart certain kinds of attacks
+ # (48 bits is slighty more than 8 base62 chars, thus 9 chars)
+ salt =
+ :crypto.strong_rand_bytes(6)
+ |> :crypto.bytes_to_integer()
+ |> Base62.encode()
+ |> String.pad_leading(9, "0")
+
+ filename = shortcode <> "-" <> salt <> "." <> extension
+
+ Pack.add_file(pack, shortcode, filename, filedata)
+ end
+
defp accept_host?(host), do: host in Config.get([:mrf_steal_emoji, :hosts], [])
defp shortcode_matches?(shortcode, pattern) when is_binary(pattern) do
@@ -20,31 +64,69 @@ defp shortcode_matches?(shortcode, pattern) do
String.match?(shortcode, pattern)
end
- defp steal_emoji({shortcode, url}, emoji_dir_path) do
+ defp reject_emoji?({shortcode, _url}, installed_emoji) do
+ valid_shortcode? = String.match?(shortcode, ~r/^[a-zA-Z0-9_-]+$/)
+
+ rejected_shortcode? =
+ [:mrf_steal_emoji, :rejected_shortcodes]
+ |> Config.get([])
+ |> Enum.any?(fn pattern -> shortcode_matches?(shortcode, pattern) end)
+
+ emoji_installed? = Enum.member?(installed_emoji, shortcode)
+
+ !valid_shortcode? or rejected_shortcode? or emoji_installed?
+ end
+
+ defp steal_emoji(%{} = response, {shortcode, extension}) do
+ case add_emoji(shortcode, extension, response.body) do
+ {:ok, _} ->
+ shortcode
+
+ e ->
+ Logger.warning(
+ "MRF.StealEmojiPolicy: Failed to add #{shortcode} as #{extension}: #{inspect(e)}"
+ )
+
+ nil
+ end
+ end
+
+ defp get_extension_if_safe(response) do
+ content_type =
+ :proplists.get_value("content-type", response.headers, MIME.from_path(response.url))
+
+ case content_type do
+ "image/" <> _ -> List.first(MIME.extensions(content_type))
+ _ -> nil
+ end
+ end
+
+ defp is_remote_size_within_limit?(url) do
+ with {:ok, %{status: status, headers: headers} = _response} when status in 200..299 <-
+ Pleroma.HTTP.request(:head, url, nil, [], []) do
+ content_length = :proplists.get_value("content-length", headers, nil)
+ size_limit = Config.get([:mrf_steal_emoji, :size_limit], @size_limit)
+
+ accept_unknown =
+ Config.get([:mrf_steal_emoji, :download_unknown_size], @download_unknown_size)
+
+ content_length <= size_limit or
+ (content_length == nil and accept_unknown)
+ else
+ _ -> false
+ end
+ end
+
+ defp maybe_steal_emoji({shortcode, url}) do
url = Pleroma.Web.MediaProxy.url(url)
- with {:ok, %{status: status} = response} when status in 200..299 <- Pleroma.HTTP.get(url) do
- size_limit = Config.get([:mrf_steal_emoji, :size_limit], 50_000)
+ with {:remote_size, true} <- {:remote_size, is_remote_size_within_limit?(url)},
+ {:ok, %{status: status} = response} when status in 200..299 <- Pleroma.HTTP.get(url) do
+ size_limit = Config.get([:mrf_steal_emoji, :size_limit], @size_limit)
+ extension = get_extension_if_safe(response)
- if byte_size(response.body) <= size_limit do
- extension =
- url
- |> URI.parse()
- |> Map.get(:path)
- |> Path.basename()
- |> Path.extname()
-
- shortcode = Path.basename(shortcode)
- file_path = Path.join(emoji_dir_path, shortcode <> (extension || ".png"))
-
- case File.write(file_path, response.body) do
- :ok ->
- shortcode
-
- e ->
- Logger.warning("MRF.StealEmojiPolicy: Failed to write to #{file_path}: #{inspect(e)}")
- nil
- end
+ if byte_size(response.body) <= size_limit and extension do
+ steal_emoji(response, {shortcode, extension})
else
Logger.debug(
"MRF.StealEmojiPolicy: :#{shortcode}: at #{url} (#{byte_size(response.body)} B) over size limit (#{size_limit} B)"
@@ -66,29 +148,10 @@ def filter(%{"object" => %{"emoji" => foreign_emojis, "actor" => actor}} = messa
if host != Pleroma.Web.Endpoint.host() and accept_host?(host) do
installed_emoji = Pleroma.Emoji.get_all() |> Enum.map(fn {k, _} -> k end)
- emoji_dir_path =
- Config.get(
- [:mrf_steal_emoji, :path],
- Path.join(Config.get([:instance, :static_dir]), "emoji/stolen")
- )
-
- File.mkdir_p(emoji_dir_path)
-
new_emojis =
foreign_emojis
- |> Enum.reject(fn {shortcode, _url} -> shortcode in installed_emoji end)
- |> Enum.reject(fn {shortcode, _url} ->
- String.contains?(shortcode, ["/", "\\", ".", ":"])
- end)
- |> Enum.filter(fn {shortcode, _url} ->
- reject_emoji? =
- [:mrf_steal_emoji, :rejected_shortcodes]
- |> Config.get([])
- |> Enum.find(false, fn pattern -> shortcode_matches?(shortcode, pattern) end)
-
- !reject_emoji?
- end)
- |> Enum.map(&steal_emoji(&1, emoji_dir_path))
+ |> Enum.reject(&reject_emoji?(&1, installed_emoji))
+ |> Enum.map(&maybe_steal_emoji(&1))
|> Enum.filter(& &1)
if !Enum.empty?(new_emojis) do
diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
index 4d1e80868..d1cd496db 100644
--- a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
@@ -53,6 +53,13 @@ def cast_data(data) do
defp fix_url(%{"url" => url} = data) when is_bitstring(url), do: data
defp fix_url(%{"url" => url} = data) when is_map(url), do: Map.put(data, "url", url["href"])
+
+ defp fix_url(%{"url" => url} = data) when is_list(url) do
+ data
+ |> Map.put("url", List.first(url))
+ |> fix_url()
+ end
+
defp fix_url(data), do: data
defp fix_tag(%{"tag" => tag} = data) when is_list(tag) do
diff --git a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
index 80ec65cd7..bda67feee 100644
--- a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
@@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
alias Pleroma.Emoji
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
+ alias Pleroma.Web.ActivityPub.Transmogrifier
import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@@ -52,6 +53,7 @@ def changeset(struct, data) do
defp fix(data) do
data =
data
+ |> Transmogrifier.fix_tag()
|> fix_emoji_qualification()
|> CommonFixes.fix_actor()
|> CommonFixes.fix_activity_addressing()
diff --git a/lib/pleroma/web/activity_pub/object_validators/user_validator.ex b/lib/pleroma/web/activity_pub/object_validators/user_validator.ex
new file mode 100644
index 000000000..90b5404f3
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/object_validators/user_validator.ex
@@ -0,0 +1,92 @@
+# Akkoma: Magically expressive social media
+# Copyright © 2024 Akkoma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.ObjectValidators.UserValidator do
+ @moduledoc """
+ Checks whether ActivityPub data represents a valid user
+
+ Users don't go through the same ingest pipeline like activities or other objects.
+ To ensure this can only match a user and no users match in the other pipeline,
+ this is a separate from the generic ObjectValidator.
+ """
+
+ @behaviour Pleroma.Web.ActivityPub.ObjectValidator.Validating
+
+ alias Pleroma.Object.Containment
+ alias Pleroma.Signature
+
+ @impl true
+ def validate(object, meta)
+
+ def validate(%{"type" => type, "id" => _id} = data, meta)
+ when type in ["Person", "Organization", "Group", "Application"] do
+ with :ok <- validate_pubkey(data),
+ :ok <- validate_inbox(data),
+ :ok <- contain_collection_origin(data) do
+ {:ok, data, meta}
+ else
+ {:error, e} -> {:error, e}
+ e -> {:error, e}
+ end
+ end
+
+ def validate(_, _), do: {:error, "Not a user object"}
+
+ defp mabye_validate_owner(nil, _actor), do: :ok
+ defp mabye_validate_owner(actor, actor), do: :ok
+ defp mabye_validate_owner(_owner, _actor), do: :error
+
+ defp validate_pubkey(
+ %{"id" => id, "publicKey" => %{"id" => pk_id, "publicKeyPem" => _key}} = data
+ )
+ when id != nil do
+ with {_, {:ok, kactor}} <- {:key, Signature.key_id_to_actor_id(pk_id)},
+ true <- id == kactor,
+ :ok <- mabye_validate_owner(Map.get(data, "owner"), id) do
+ :ok
+ else
+ {:key, _} ->
+ {:error, "Unable to determine actor id from key id"}
+
+ false ->
+ {:error, "Key id does not relate to user id"}
+
+ _ ->
+ {:error, "Actor does not own its public key"}
+ end
+ end
+
+ # pubkey is optional atm
+ defp validate_pubkey(_data), do: :ok
+
+ defp validate_inbox(%{"id" => id, "inbox" => inbox}) do
+ case Containment.same_origin(id, inbox) do
+ :ok -> :ok
+ :error -> {:error, "Inbox on different doamin"}
+ end
+ end
+
+ defp validate_inbox(_), do: {:error, "No inbox"}
+
+ defp check_field_value(%{"id" => id} = _data, value) do
+ Containment.same_origin(id, value)
+ end
+
+ defp maybe_check_field(data, field) do
+ with val when val != nil <- data[field],
+ :ok <- check_field_value(data, val) do
+ :ok
+ else
+ nil -> :ok
+ _ -> {:error, "#{field} on different domain"}
+ end
+ end
+
+ defp contain_collection_origin(data) do
+ Enum.reduce(["followers", "following", "featured"], :ok, fn
+ field, :ok -> maybe_check_field(data, field)
+ _, error -> error
+ end)
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index 435343bf0..a72a431b2 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -576,7 +576,12 @@ def handle_incoming(
_options
) do
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
- {:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
+ # Use a dramatically shortened maximum age before refresh here because it is reasonable
+ # for a user to
+ # 1. Add the alias to their new account and then
+ # 2. Press the button on their new account
+ # within a very short period of time and expect it to work
+ {:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor, maximum_age: 5),
true <- origin_actor in target_user.also_known_as do
ActivityPub.move(origin_user, target_user, false)
else
diff --git a/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex b/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
index 456ab14db..0f202201d 100644
--- a/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
@@ -150,7 +150,7 @@ def update_notificaton_settings_operation do
"removes the contents of a message from the push notification"
)
],
- requestBody: nil,
+ requestBody: request_body("Parameters", update_notification_settings_request()),
responses: %{
200 =>
Operation.response("Success", "application/json", %Schema{
@@ -432,4 +432,22 @@ defp delete_account_request do
}
}
end
+
+ defp update_notification_settings_request do
+ %Schema{
+ title: "UpdateNotificationSettings",
+ description: "PUT paramenters (query, form or JSON) for updating notification settings",
+ type: :object,
+ properties: %{
+ block_from_strangers: %Schema{
+ type: :boolean,
+ description: "blocks notifications from accounts you do not follow"
+ },
+ hide_notification_contents: %Schema{
+ type: :boolean,
+ description: "removes the contents of a message from the push notification"
+ }
+ }
+ }
+ end
end
diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex
index 64593767d..6628fcaf3 100644
--- a/lib/pleroma/web/endpoint.ex
+++ b/lib/pleroma/web/endpoint.ex
@@ -98,6 +98,10 @@ defmodule Pleroma.Web.Endpoint do
at: "/",
from: :pleroma,
only: Pleroma.Web.static_paths(),
+ # JSON-LD is accepted by some servers for AP objects and activities,
+ # thus only enable it here instead of a global extension mapping
+ # (it's our only *.jsonld file anyway)
+ content_types: %{"litepub-0.1.jsonld" => "application/ld+json"},
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
gzip: true,
cache_control_for_etags: @static_cache_control,
diff --git a/lib/pleroma/web/media_proxy.ex b/lib/pleroma/web/media_proxy.ex
index c5087c42c..61b6f2a62 100644
--- a/lib/pleroma/web/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy.ex
@@ -145,7 +145,7 @@ def filename(url_or_path) do
end
def base_url do
- Config.get([:media_proxy, :base_url], Endpoint.url())
+ Config.get!([:media_proxy, :base_url])
end
defp proxy_url(path, sig_base64, url_base64, filename) do
diff --git a/lib/pleroma/web/metadata/providers/open_graph.ex b/lib/pleroma/web/metadata/providers/open_graph.ex
index df0cca74a..27e761bc2 100644
--- a/lib/pleroma/web/metadata/providers/open_graph.ex
+++ b/lib/pleroma/web/metadata/providers/open_graph.ex
@@ -12,14 +12,38 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do
@behaviour Provider
@media_types ["image", "audio", "video"]
+ defp user_avatar_tags(user) do
+ if Utils.visible?(user) do
+ [
+ {:meta, [property: "og:image", content: MediaProxy.preview_url(User.avatar_url(user))],
+ []},
+ {:meta, [property: "og:image:width", content: 150], []},
+ {:meta, [property: "og:image:height", content: 150], []}
+ ]
+ else
+ []
+ end
+ end
+
@impl Provider
def build_tags(%{
object: object,
url: url,
user: user
}) do
- attachments = build_attachments(object)
- scrubbed_content = Utils.scrub_html_and_truncate(object)
+ attachments =
+ if Utils.visible?(object) do
+ build_attachments(object)
+ else
+ []
+ end
+
+ scrubbed_content =
+ if Utils.visible?(object) do
+ Utils.scrub_html_and_truncate(object)
+ else
+ "Content cannot be displayed."
+ end
[
{:meta,
@@ -36,12 +60,7 @@ def build_tags(%{
{:meta, [property: "og:type", content: "article"], []}
] ++
if attachments == [] or Metadata.activity_nsfw?(object) do
- [
- {:meta, [property: "og:image", content: MediaProxy.preview_url(User.avatar_url(user))],
- []},
- {:meta, [property: "og:image:width", content: 150], []},
- {:meta, [property: "og:image:height", content: 150], []}
- ]
+ user_avatar_tags(user)
else
attachments
end
@@ -49,7 +68,9 @@ def build_tags(%{
@impl Provider
def build_tags(%{user: user}) do
- with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
+ if Utils.visible?(user) do
+ truncated_bio = Utils.scrub_html_and_truncate(user.bio)
+
[
{:meta,
[
@@ -58,12 +79,10 @@ def build_tags(%{user: user}) do
], []},
{:meta, [property: "og:url", content: user.uri || user.ap_id], []},
{:meta, [property: "og:description", content: truncated_bio], []},
- {:meta, [property: "og:type", content: "article"], []},
- {:meta, [property: "og:image", content: MediaProxy.preview_url(User.avatar_url(user))],
- []},
- {:meta, [property: "og:image:width", content: 150], []},
- {:meta, [property: "og:image:height", content: 150], []}
- ]
+ {:meta, [property: "og:type", content: "article"], []}
+ ] ++ user_avatar_tags(user)
+ else
+ []
end
end
diff --git a/lib/pleroma/web/metadata/providers/twitter_card.ex b/lib/pleroma/web/metadata/providers/twitter_card.ex
index ab48ea272..c6d8464e7 100644
--- a/lib/pleroma/web/metadata/providers/twitter_card.ex
+++ b/lib/pleroma/web/metadata/providers/twitter_card.ex
@@ -17,8 +17,19 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do
@impl Provider
def build_tags(%{activity_id: id, object: object, user: user}) do
- attachments = build_attachments(id, object)
- scrubbed_content = Utils.scrub_html_and_truncate(object)
+ attachments =
+ if Utils.visible?(object) do
+ build_attachments(id, object)
+ else
+ []
+ end
+
+ scrubbed_content =
+ if Utils.visible?(object) do
+ Utils.scrub_html_and_truncate(object)
+ else
+ "Content cannot be displayed."
+ end
[
title_tag(user),
@@ -36,13 +47,17 @@ def build_tags(%{activity_id: id, object: object, user: user}) do
@impl Provider
def build_tags(%{user: user}) do
- with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
- [
- title_tag(user),
- {:meta, [name: "twitter:description", content: truncated_bio], []},
- image_tag(user),
- {:meta, [name: "twitter:card", content: "summary"], []}
- ]
+ if Utils.visible?(user) do
+ with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
+ [
+ title_tag(user),
+ {:meta, [name: "twitter:description", content: truncated_bio], []},
+ image_tag(user),
+ {:meta, [name: "twitter:card", content: "summary"], []}
+ ]
+ end
+ else
+ []
end
end
@@ -51,7 +66,11 @@ defp title_tag(user) do
end
def image_tag(user) do
- {:meta, [name: "twitter:image", content: MediaProxy.preview_url(User.avatar_url(user))], []}
+ if Utils.visible?(user) do
+ {:meta, [name: "twitter:image", content: MediaProxy.preview_url(User.avatar_url(user))], []}
+ else
+ {:meta, [name: "twitter:image", content: ""], []}
+ end
end
defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex
index 92622ef15..55855139b 100644
--- a/lib/pleroma/web/metadata/utils.ex
+++ b/lib/pleroma/web/metadata/utils.ex
@@ -7,6 +7,15 @@ defmodule Pleroma.Web.Metadata.Utils do
alias Pleroma.Emoji
alias Pleroma.Formatter
alias Pleroma.HTML
+ alias Pleroma.Web.ActivityPub.Visibility
+
+ def visible?(%Pleroma.User{} = object) do
+ Visibility.restrict_unauthenticated_access?(object) == :visible
+ end
+
+ def visible?(object) do
+ Visibility.visible_for_user?(object, nil)
+ end
defp scrub_html_and_truncate_object_field(field, object) do
field
diff --git a/lib/pleroma/web/plugs/instance_static.ex b/lib/pleroma/web/plugs/instance_static.ex
index 5f9a6ee83..b72b604a1 100644
--- a/lib/pleroma/web/plugs/instance_static.ex
+++ b/lib/pleroma/web/plugs/instance_static.ex
@@ -3,8 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.InstanceStatic do
+ import Plug.Conn
+
require Pleroma.Constants
+ alias Pleroma.Web.Plugs.Utils
+
@moduledoc """
This is a shim to call `Plug.Static` but with runtime `from` configuration.
@@ -43,11 +47,25 @@ def call(conn, _) do
conn
end
- defp call_static(conn, opts, from) do
+ defp set_static_content_type(conn, "/emoji/" <> _ = request_path) do
+ real_mime = MIME.from_path(request_path)
+ safe_mime = Utils.get_safe_mime_type(%{allowed_mime_types: ["image"]}, real_mime)
+
+ put_resp_header(conn, "content-type", safe_mime)
+ end
+
+ defp set_static_content_type(conn, request_path) do
+ put_resp_header(conn, "content-type", MIME.from_path(request_path))
+ end
+
+ defp call_static(%{request_path: request_path} = conn, opts, from) do
opts =
opts
|> Map.put(:from, from)
+ |> Map.put(:set_content_type, false)
- Plug.Static.call(conn, opts)
+ conn
+ |> set_static_content_type(request_path)
+ |> Pleroma.Web.Plugs.StaticNoCT.call(opts)
end
end
diff --git a/lib/pleroma/web/plugs/static_no_content_type.ex b/lib/pleroma/web/plugs/static_no_content_type.ex
new file mode 100644
index 000000000..ea00a2d5d
--- /dev/null
+++ b/lib/pleroma/web/plugs/static_no_content_type.ex
@@ -0,0 +1,469 @@
+# This is almost identical to Plug.Static from Plug 1.15.3 (2024-01-16)
+# It being copied is a temporary measure to fix an urgent bug without
+# needing to wait for merge of a suitable patch upstream
+# The differences are:
+# - this leading comment
+# - renaming of the module from 'Plug.Static' to 'Pleroma.Web.Plugs.StaticNoCT'
+# - additon of set_content_type option
+
+defmodule Pleroma.Web.Plugs.StaticNoCT do
+ @moduledoc """
+ A plug for serving static assets.
+
+ It requires two options:
+
+ * `:at` - the request path to reach for static assets.
+ It must be a string.
+
+ * `:from` - the file system path to read static assets from.
+ It can be either: a string containing a file system path, an
+ atom representing the application name (where assets will
+ be served from `priv/static`), a tuple containing the
+ application name and the directory to serve assets from (besides
+ `priv/static`), or an MFA tuple.
+
+ The preferred form is to use `:from` with an atom or tuple, since
+ it will make your application independent from the starting directory.
+ For example, if you pass:
+
+ plug Plug.Static, from: "priv/app/path"
+
+ Plug.Static will be unable to serve assets if you build releases
+ or if you change the current directory. Instead do:
+
+ plug Plug.Static, from: {:app_name, "priv/app/path"}
+
+ If a static asset cannot be found, `Plug.Static` simply forwards
+ the connection to the rest of the pipeline.
+
+ ## Cache mechanisms
+
+ `Plug.Static` uses etags for HTTP caching. This means browsers/clients
+ should cache assets on the first request and validate the cache on
+ following requests, not downloading the static asset once again if it
+ has not changed. The cache-control for etags is specified by the
+ `cache_control_for_etags` option and defaults to `"public"`.
+
+ However, `Plug.Static` also supports direct cache control by using
+ versioned query strings. If the request query string starts with
+ "?vsn=", `Plug.Static` assumes the application is versioning assets
+ and does not set the `ETag` header, meaning the cache behaviour will
+ be specified solely by the `cache_control_for_vsn_requests` config,
+ which defaults to `"public, max-age=31536000"`.
+
+ ## Options
+
+ * `:encodings` - list of 2-ary tuples where first value is value of
+ the `Accept-Encoding` header and second is extension of the file to
+ be served if given encoding is accepted by client. Entries will be tested
+ in order in list, so entries higher in list will be preferred. Defaults
+ to: `[]`.
+
+ In addition to setting this value directly it supports 2 additional
+ options for compatibility reasons:
+
+ + `:brotli` - will append `{"br", ".br"}` to the encodings list.
+ + `:gzip` - will append `{"gzip", ".gz"}` to the encodings list.
+
+ Additional options will be added in the above order (Brotli takes
+ preference over Gzip) to reflect older behaviour which was set due
+ to fact that Brotli in general provides better compression ratio than
+ Gzip.
+
+ * `:cache_control_for_etags` - sets the cache header for requests
+ that use etags. Defaults to `"public"`.
+
+ * `:etag_generation` - specify a `{module, function, args}` to be used
+ to generate an etag. The `path` of the resource will be passed to
+ the function, as well as the `args`. If this option is not supplied,
+ etags will be generated based off of file size and modification time.
+ Note it is [recommended for the etag value to be quoted](https://tools.ietf.org/html/rfc7232#section-2.3),
+ which Plug won't do automatically.
+
+ * `:cache_control_for_vsn_requests` - sets the cache header for
+ requests starting with "?vsn=" in the query string. Defaults to
+ `"public, max-age=31536000"`.
+
+ * `:only` - filters which requests to serve. This is useful to avoid
+ file system access on every request when this plug is mounted
+ at `"/"`. For example, if `only: ["images", "favicon.ico"]` is
+ specified, only files in the "images" directory and the
+ "favicon.ico" file will be served by `Plug.Static`.
+ Note that `Plug.Static` matches these filters against request
+ uri and not against the filesystem. When requesting
+ a file with name containing non-ascii or special characters,
+ you should use urlencoded form. For example, you should write
+ `only: ["file%20name"]` instead of `only: ["file name"]`.
+ Defaults to `nil` (no filtering).
+
+ * `:only_matching` - a relaxed version of `:only` that will
+ serve any request as long as one of the given values matches the
+ given path. For example, `only_matching: ["images", "favicon"]`
+ will match any request that starts at "images" or "favicon",
+ be it "/images/foo.png", "/images-high/foo.png", "/favicon.ico"
+ or "/favicon-high.ico". Such matches are useful when serving
+ digested files at the root. Defaults to `nil` (no filtering).
+
+ * `:headers` - other headers to be set when serving static assets. Specify either
+ an enum of key-value pairs or a `{module, function, args}` to return an enum. The
+ `conn` will be passed to the function, as well as the `args`.
+
+ * `:content_types` - custom MIME type mapping. As a map with filename as key
+ and content type as value. For example:
+ `content_types: %{"apple-app-site-association" => "application/json"}`.
+
+ * `:set_content_type` - by default Plug.Static (re)sets the content type header
+ using auto-detection and the `:content_types` map. But when set to `false`
+ no content-type header will be inserted instead retaining the original
+ value or lack thereof. This can be useful when custom logic for appropiate
+ content types is needed which cannot be reasonably expressed as a static
+ filename map.
+
+ ## Examples
+
+ This plug can be mounted in a `Plug.Builder` pipeline as follows:
+
+ defmodule MyPlug do
+ use Plug.Builder
+
+ plug Plug.Static,
+ at: "/public",
+ from: :my_app,
+ only: ~w(images robots.txt)
+ plug :not_found
+
+ def not_found(conn, _) do
+ send_resp(conn, 404, "not found")
+ end
+ end
+
+ """
+
+ @behaviour Plug
+ @allowed_methods ~w(GET HEAD)
+
+ import Plug.Conn
+ alias Plug.Conn
+
+ # In this module, the `:prim_file` Erlang module along with the `:file_info`
+ # record are used instead of the more common and Elixir-y `File` module and
+ # `File.Stat` struct, respectively. The reason behind this is performance: all
+ # the `File` operations pass through a single process in order to support node
+ # operations that we simply don't need when serving assets.
+
+ require Record
+ Record.defrecordp(:file_info, Record.extract(:file_info, from_lib: "kernel/include/file.hrl"))
+
+ defmodule InvalidPathError do
+ defexception message: "invalid path for static asset", plug_status: 400
+ end
+
+ @impl true
+ def init(opts) do
+ from =
+ case Keyword.fetch!(opts, :from) do
+ {_, _} = from -> from
+ {_, _, _} = from -> from
+ from when is_atom(from) -> {from, "priv/static"}
+ from when is_binary(from) -> from
+ _ -> raise ArgumentError, ":from must be an atom, a binary or a tuple"
+ end
+
+ encodings =
+ opts
+ |> Keyword.get(:encodings, [])
+ |> maybe_add("br", ".br", Keyword.get(opts, :brotli, false))
+ |> maybe_add("gzip", ".gz", Keyword.get(opts, :gzip, false))
+
+ %{
+ encodings: encodings,
+ only_rules: {Keyword.get(opts, :only, []), Keyword.get(opts, :only_matching, [])},
+ qs_cache: Keyword.get(opts, :cache_control_for_vsn_requests, "public, max-age=31536000"),
+ et_cache: Keyword.get(opts, :cache_control_for_etags, "public"),
+ et_generation: Keyword.get(opts, :etag_generation, nil),
+ headers: Keyword.get(opts, :headers, %{}),
+ content_types: Keyword.get(opts, :content_types, %{}),
+ set_content_type: Keyword.get(opts, :set_content_type, true),
+ from: from,
+ at: opts |> Keyword.fetch!(:at) |> Plug.Router.Utils.split()
+ }
+ end
+
+ @impl true
+ def call(
+ conn = %Conn{method: meth},
+ %{at: at, only_rules: only_rules, from: from, encodings: encodings} = options
+ )
+ when meth in @allowed_methods do
+ segments = subset(at, conn.path_info)
+
+ if allowed?(only_rules, segments) do
+ segments = Enum.map(segments, &uri_decode/1)
+
+ if invalid_path?(segments) do
+ raise InvalidPathError, "invalid path for static asset: #{conn.request_path}"
+ end
+
+ path = path(from, segments)
+ range = get_req_header(conn, "range")
+ encoding = file_encoding(conn, path, range, encodings)
+ serve_static(encoding, conn, segments, range, options)
+ else
+ conn
+ end
+ end
+
+ def call(conn, _options) do
+ conn
+ end
+
+ defp uri_decode(path) do
+ # TODO: Remove rescue as this can't fail from Elixir v1.13
+ try do
+ URI.decode(path)
+ rescue
+ ArgumentError ->
+ raise InvalidPathError
+ end
+ end
+
+ defp allowed?(_only_rules, []), do: false
+ defp allowed?({[], []}, _list), do: true
+
+ defp allowed?({full, prefix}, [h | _]) do
+ h in full or (prefix != [] and match?({0, _}, :binary.match(h, prefix)))
+ end
+
+ defp maybe_put_content_type(conn, false, _, _), do: conn
+
+ defp maybe_put_content_type(conn, _, types, filename) do
+ content_type = Map.get(types, filename) || MIME.from_path(filename)
+
+ conn
+ |> put_resp_header("content-type", content_type)
+ end
+
+ defp serve_static({content_encoding, file_info, path}, conn, segments, range, options) do
+ %{
+ qs_cache: qs_cache,
+ et_cache: et_cache,
+ et_generation: et_generation,
+ headers: headers,
+ content_types: types,
+ set_content_type: set_content_type
+ } = options
+
+ case put_cache_header(conn, qs_cache, et_cache, et_generation, file_info, path) do
+ {:stale, conn} ->
+ filename = List.last(segments)
+
+ conn
+ |> maybe_put_content_type(set_content_type, types, filename)
+ |> put_resp_header("accept-ranges", "bytes")
+ |> maybe_add_encoding(content_encoding)
+ |> merge_headers(headers)
+ |> serve_range(file_info, path, range, options)
+
+ {:fresh, conn} ->
+ conn
+ |> maybe_add_vary(options)
+ |> send_resp(304, "")
+ |> halt()
+ end
+ end
+
+ defp serve_static(:error, conn, _segments, _range, _options) do
+ conn
+ end
+
+ defp serve_range(conn, file_info, path, [range], options) do
+ file_info(size: file_size) = file_info
+
+ with %{"bytes" => bytes} <- Plug.Conn.Utils.params(range),
+ {range_start, range_end} <- start_and_end(bytes, file_size) do
+ send_range(conn, path, range_start, range_end, file_size, options)
+ else
+ _ -> send_entire_file(conn, path, options)
+ end
+ end
+
+ defp serve_range(conn, _file_info, path, _range, options) do
+ send_entire_file(conn, path, options)
+ end
+
+ defp start_and_end("-" <> rest, file_size) do
+ case Integer.parse(rest) do
+ {last, ""} when last > 0 and last <= file_size -> {file_size - last, file_size - 1}
+ _ -> :error
+ end
+ end
+
+ defp start_and_end(range, file_size) do
+ case Integer.parse(range) do
+ {first, "-"} when first >= 0 ->
+ {first, file_size - 1}
+
+ {first, "-" <> rest} when first >= 0 ->
+ case Integer.parse(rest) do
+ {last, ""} when last >= first -> {first, min(last, file_size - 1)}
+ _ -> :error
+ end
+
+ _ ->
+ :error
+ end
+ end
+
+ defp send_range(conn, path, 0, range_end, file_size, options) when range_end == file_size - 1 do
+ send_entire_file(conn, path, options)
+ end
+
+ defp send_range(conn, path, range_start, range_end, file_size, _options) do
+ length = range_end - range_start + 1
+
+ conn
+ |> put_resp_header("content-range", "bytes #{range_start}-#{range_end}/#{file_size}")
+ |> send_file(206, path, range_start, length)
+ |> halt()
+ end
+
+ defp send_entire_file(conn, path, options) do
+ conn
+ |> maybe_add_vary(options)
+ |> send_file(200, path)
+ |> halt()
+ end
+
+ defp maybe_add_encoding(conn, nil), do: conn
+ defp maybe_add_encoding(conn, ce), do: put_resp_header(conn, "content-encoding", ce)
+
+ defp maybe_add_vary(conn, %{encodings: encodings}) do
+ # If we serve gzip or brotli at any moment, we need to set the proper vary
+ # header regardless of whether we are serving gzip content right now.
+ # See: http://www.fastly.com/blog/best-practices-for-using-the-vary-header/
+ if encodings != [] do
+ update_in(conn.resp_headers, &[{"vary", "Accept-Encoding"} | &1])
+ else
+ conn
+ end
+ end
+
+ defp put_cache_header(
+ %Conn{query_string: "vsn=" <> _} = conn,
+ qs_cache,
+ _et_cache,
+ _et_generation,
+ _file_info,
+ _path
+ )
+ when is_binary(qs_cache) do
+ {:stale, put_resp_header(conn, "cache-control", qs_cache)}
+ end
+
+ defp put_cache_header(conn, _qs_cache, et_cache, et_generation, file_info, path)
+ when is_binary(et_cache) do
+ etag = etag_for_path(file_info, et_generation, path)
+
+ conn =
+ conn
+ |> put_resp_header("cache-control", et_cache)
+ |> put_resp_header("etag", etag)
+
+ if etag in get_req_header(conn, "if-none-match") do
+ {:fresh, conn}
+ else
+ {:stale, conn}
+ end
+ end
+
+ defp put_cache_header(conn, _, _, _, _, _) do
+ {:stale, conn}
+ end
+
+ defp etag_for_path(file_info, et_generation, path) do
+ case et_generation do
+ {module, function, args} ->
+ apply(module, function, [path | args])
+
+ nil ->
+ file_info(size: size, mtime: mtime) = file_info
+ <", {size, mtime} |> :erlang.phash2() |> Integer.to_string(16)::binary, ?">>
+ end
+ end
+
+ defp file_encoding(conn, path, [_range], _encodings) do
+ # We do not support compression for range queries.
+ file_encoding(conn, path, nil, [])
+ end
+
+ defp file_encoding(conn, path, _range, encodings) do
+ encoded =
+ Enum.find_value(encodings, fn {encoding, ext} ->
+ if file_info = accept_encoding?(conn, encoding) && regular_file_info(path <> ext) do
+ {encoding, file_info, path <> ext}
+ end
+ end)
+
+ cond do
+ not is_nil(encoded) ->
+ encoded
+
+ file_info = regular_file_info(path) ->
+ {nil, file_info, path}
+
+ true ->
+ :error
+ end
+ end
+
+ defp regular_file_info(path) do
+ case :prim_file.read_file_info(path) do
+ {:ok, file_info(type: :regular) = file_info} ->
+ file_info
+
+ _ ->
+ nil
+ end
+ end
+
+ defp accept_encoding?(conn, encoding) do
+ encoding? = &String.contains?(&1, [encoding, "*"])
+
+ Enum.any?(get_req_header(conn, "accept-encoding"), fn accept ->
+ accept |> Plug.Conn.Utils.list() |> Enum.any?(encoding?)
+ end)
+ end
+
+ defp maybe_add(list, key, value, true), do: list ++ [{key, value}]
+ defp maybe_add(list, _key, _value, false), do: list
+
+ defp path({module, function, arguments}, segments)
+ when is_atom(module) and is_atom(function) and is_list(arguments),
+ do: Enum.join([apply(module, function, arguments) | segments], "/")
+
+ defp path({app, from}, segments) when is_atom(app) and is_binary(from),
+ do: Enum.join([Application.app_dir(app), from | segments], "/")
+
+ defp path(from, segments),
+ do: Enum.join([from | segments], "/")
+
+ defp subset([h | expected], [h | actual]), do: subset(expected, actual)
+ defp subset([], actual), do: actual
+ defp subset(_, _), do: []
+
+ defp invalid_path?(list) do
+ invalid_path?(list, :binary.compile_pattern(["/", "\\", ":", "\0"]))
+ end
+
+ defp invalid_path?([h | _], _match) when h in [".", "..", ""], do: true
+ defp invalid_path?([h | t], match), do: String.contains?(h, match) or invalid_path?(t)
+ defp invalid_path?([], _match), do: false
+
+ defp merge_headers(conn, {module, function, args}) do
+ merge_headers(conn, apply(module, function, [conn | args]))
+ end
+
+ defp merge_headers(conn, headers) do
+ merge_resp_headers(conn, headers)
+ end
+end
diff --git a/lib/pleroma/web/plugs/uploaded_media.ex b/lib/pleroma/web/plugs/uploaded_media.ex
index 300c33068..746203087 100644
--- a/lib/pleroma/web/plugs/uploaded_media.ex
+++ b/lib/pleroma/web/plugs/uploaded_media.ex
@@ -11,6 +11,7 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
require Logger
alias Pleroma.Web.MediaProxy
+ alias Pleroma.Web.Plugs.Utils
@behaviour Plug
# no slashes
@@ -28,10 +29,21 @@ def init(_opts) do
|> Keyword.put(:at, "/__unconfigured_media_plug")
|> Plug.Static.init()
- %{static_plug_opts: static_plug_opts}
+ config = Pleroma.Config.get(Pleroma.Upload)
+ allowed_mime_types = Keyword.fetch!(config, :allowed_mime_types)
+ uploader = Keyword.fetch!(config, :uploader)
+
+ %{
+ static_plug_opts: static_plug_opts,
+ allowed_mime_types: allowed_mime_types,
+ uploader: uploader
+ }
end
- def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
+ def call(
+ %{request_path: <<"/", @path, "/", file::binary>>} = conn,
+ %{uploader: uploader} = opts
+ ) do
conn =
case fetch_query_params(conn) do
%{query_params: %{"name" => name}} = conn ->
@@ -44,10 +56,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
end
|> merge_resp_headers([{"content-security-policy", "sandbox"}])
- config = Pleroma.Config.get(Pleroma.Upload)
-
- with uploader <- Keyword.fetch!(config, :uploader),
- {:ok, get_method} <- uploader.get_file(file),
+ with {:ok, get_method} <- uploader.get_file(file),
false <- media_is_banned(conn, get_method) do
get_media(conn, get_method, opts)
else
@@ -68,13 +77,23 @@ defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url)
defp media_is_banned(_, _), do: false
+ defp set_content_type(conn, opts, filepath) do
+ real_mime = MIME.from_path(filepath)
+ clean_mime = Utils.get_safe_mime_type(opts, real_mime)
+ put_resp_header(conn, "content-type", clean_mime)
+ end
+
defp get_media(conn, {:static_dir, directory}, opts) do
static_opts =
Map.get(opts, :static_plug_opts)
|> Map.put(:at, [@path])
|> Map.put(:from, directory)
+ |> Map.put(:set_content_type, false)
- conn = Plug.Static.call(conn, static_opts)
+ conn =
+ conn
+ |> set_content_type(opts, conn.request_path)
+ |> Pleroma.Web.Plugs.StaticNoCT.call(static_opts)
if conn.halted do
conn
diff --git a/lib/pleroma/web/plugs/utils.ex b/lib/pleroma/web/plugs/utils.ex
new file mode 100644
index 000000000..770a3eeb2
--- /dev/null
+++ b/lib/pleroma/web/plugs/utils.ex
@@ -0,0 +1,14 @@
+# Akkoma: Magically expressive social media
+# Copyright © 2024 Akkoma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.Plugs.Utils do
+ @moduledoc """
+ Some helper functions shared across several plugs
+ """
+
+ def get_safe_mime_type(%{allowed_mime_types: allowed_mime_types} = _opts, mime) do
+ [maintype | _] = String.split(mime, "/", parts: 2)
+ if maintype in allowed_mime_types, do: mime, else: "application/octet-stream"
+ end
+end
diff --git a/lib/pleroma/web/twitter_api/controllers/util_controller.ex b/lib/pleroma/web/twitter_api/controllers/util_controller.ex
index f2b571fff..5f1a3518d 100644
--- a/lib/pleroma/web/twitter_api/controllers/util_controller.ex
+++ b/lib/pleroma/web/twitter_api/controllers/util_controller.ex
@@ -184,7 +184,13 @@ def emoji(conn, _params) do
json(conn, emoji)
end
- def update_notificaton_settings(%{assigns: %{user: user}} = conn, params) do
+ def update_notificaton_settings(
+ %{assigns: %{user: user}, body_params: body_params} = conn,
+ params
+ ) do
+ # OpenApiSpex 3.x prevents Plug's usual parameter premerging
+ params = Map.merge(params, body_params)
+
with {:ok, _} <- User.update_notification_settings(user, params) do
json(conn, %{status: "success"})
end
diff --git a/lib/pleroma/web/web_finger.ex b/lib/pleroma/web/web_finger.ex
index 9be8ef50e..9d5efbb3e 100644
--- a/lib/pleroma/web/web_finger.ex
+++ b/lib/pleroma/web/web_finger.ex
@@ -65,7 +65,7 @@ defp gather_links(%User{} = user) do
end
defp gather_aliases(%User{} = user) do
- [user.ap_id | user.also_known_as]
+ [user.ap_id]
end
def represent_user(user, "JSON") do
diff --git a/mix.exs b/mix.exs
index 9b718da9f..fd955ace9 100644
--- a/mix.exs
+++ b/mix.exs
@@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
- version: version("3.11.0"),
+ version: version("3.12.2"),
elixir: "~> 1.14",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(),
@@ -22,13 +22,13 @@ def project do
docs: [
source_url_pattern:
"https://git.itepechi.me/itepechi/akkoma/src/branch/itepechi/%{path}#L%{line}",
- logo: "priv/static/images/logo.png",
+ logo: "priv/static/logo-512.png",
extras: ["README.md", "CHANGELOG.md"] ++ Path.wildcard("docs/**/*.md"),
groups_for_extras: [
- "Installation manuals": Path.wildcard("docs/installation/*.md"),
- Configuration: Path.wildcard("docs/config/*.md"),
- Administration: Path.wildcard("docs/admin/*.md"),
- "Pleroma's APIs and Mastodon API extensions": Path.wildcard("docs/api/*.md")
+ "Installation manuals": Path.wildcard("docs/docs/installation/*.md"),
+ Configuration: Path.wildcard("docs/docs/config/*.md"),
+ Administration: Path.wildcard("docs/docs/admin/*.md"),
+ "Pleroma's APIs and Mastodon API extensions": Path.wildcard("docs/docs/api/*.md")
],
main: "readme",
output: "priv/static/doc"
diff --git a/mix.lock b/mix.lock
index 810356345..c62df06f8 100644
--- a/mix.lock
+++ b/mix.lock
@@ -3,55 +3,55 @@
"base62": {:hex, :base62, "1.2.2", "85c6627eb609317b70f555294045895ffaaeb1758666ab9ef9ca38865b11e629", [:mix], [{:custom_base, "~> 0.2.1", [hex: :custom_base, repo: "hexpm", optional: false]}], "hexpm", "d41336bda8eaa5be197f1e4592400513ee60518e5b9f4dcf38f4b4dae6f377bb"},
"bbcode_pleroma": {:hex, :bbcode_pleroma, "0.2.0", "d36f5bca6e2f62261c45be30fa9b92725c0655ad45c99025cb1c3e28e25803ef", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "19851074419a5fedb4ef49e1f01b30df504bb5dbb6d6adfc135238063bebd1c3"},
"bcrypt_elixir": {:hex, :bcrypt_elixir, "3.0.1", "9be815469e6bfefec40fa74658ecbbe6897acfb57614df1416eeccd4903f602c", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "486bb95efb645d1efc6794c1ddd776a186a9a713abf06f45708a6ce324fb96cf"},
- "benchee": {:hex, :benchee, "1.2.0", "afd2f0caec06ce3a70d9c91c514c0b58114636db9d83c2dc6bfd416656618353", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "ee729e53217898b8fd30aaad3cce61973dab61574ae6f48229fe7ff42d5e4457"},
- "bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
+ "benchee": {:hex, :benchee, "1.3.0", "f64e3b64ad3563fa9838146ddefb2d2f94cf5b473bdfd63f5ca4d0657bf96694", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "34f4294068c11b2bd2ebf2c59aac9c7da26ffa0068afdf3419f1b176e16c5f81"},
+ "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.1.201603 or ~> 0.5.20 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "90f6ce7672f70f56708792a98d98bd05176c9176", [ref: "90f6ce7672f70f56708792a98d98bd05176c9176"]},
- "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"},
+ "castore": {:hex, :castore, "1.0.6", "ffc42f110ebfdafab0ea159cd43d31365fa0af0ce4a02ecebf1707ae619ee727", [:mix], [], "hexpm", "374c6e7ca752296be3d6780a6d5b922854ffcc74123da90f2f328996b962d33a"},
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
"comeonin": {:hex, :comeonin, "5.4.0", "246a56ca3f41d404380fc6465650ddaa532c7f98be4bda1b4656b3a37cc13abe", [:mix], [], "hexpm", "796393a9e50d01999d56b7b8420ab0481a7538d0caf80919da493b4a6e51faf1"},
"concurrent_limiter": {:git, "https://akkoma.dev/AkkomaGang/concurrent-limiter.git", "a9e0b3d64574bdba761f429bb4fba0cf687b3338", [ref: "a9e0b3d64574bdba761f429bb4fba0cf687b3338"]},
"connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
"cors_plug": {:hex, :cors_plug, "3.0.3", "7c3ac52b39624bc616db2e937c282f3f623f25f8d550068b6710e58d04a0e330", [:mix], [{:plug, "~> 1.13", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "3f2d759e8c272ed3835fab2ef11b46bddab8c1ab9528167bd463b6452edf830d"},
- "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"},
+ "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"},
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
- "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"},
- "credo": {:hex, :credo, "1.7.1", "6e26bbcc9e22eefbff7e43188e69924e78818e2fe6282487d0703652bc20fd62", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "e9871c6095a4c0381c89b6aa98bc6260a8ba6addccf7f6a53da8849c748a58a2"},
+ "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"},
+ "credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"},
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
"db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
- "dialyxir": {:hex, :dialyxir, "1.4.2", "764a6e8e7a354f0ba95d58418178d486065ead1f69ad89782817c296d0d746a5", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "516603d8067b2fd585319e4b13d3674ad4f314a5902ba8130cd97dc902ce6bbd"},
+ "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
"earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"},
"earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
"eblurhash": {:hex, :eblurhash, "1.2.2", "7da4255aaea984b31bb71155f673257353b0e0554d0d30dcf859547e74602582", [:rebar3], [], "hexpm", "8c20ca00904de023a835a9dcb7b7762fed32264c85a80c3cafa85288e405044c"},
"ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"},
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
- "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.14", "7a20cfe913b0476542b43870e67386461258734896035e3f284039fd18bd4c4c", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "22f5f98592dd597db9416fcef00effae0787669fdcb6faf447e982b553798e98"},
+ "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.15", "0fc29dbae0e444a29bd6abeee4cf3c4c037e692a272478a234a1cc765077dbb1", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "b6127f3a5c6fc3d84895e4768cc7c199f22b48b67d6c99b13fbf4a374e73f039"},
"ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"},
"elasticsearch": {:git, "https://akkoma.dev/AkkomaGang/elasticsearch-elixir.git", "6cd946f75f6ab9042521a009d1d32d29a90113ca", [ref: "main"]},
- "elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
- "elixir_xml_to_map": {:hex, :elixir_xml_to_map, "3.0.0", "67dcff30ecf72aed37ab08525133e4420717a749436e22bfece431e7dddeea7e", [:mix], [{:erlsom, "~> 1.4", [hex: :erlsom, repo: "hexpm", optional: false]}], "hexpm", "11222dd7f029f8db7a6662b41c992dbdb0e1c6e4fdea6a42056f9d27c847efbb"},
+ "elixir_make": {:hex, :elixir_make, "0.8.3", "d38d7ee1578d722d89b4d452a3e36bcfdc644c618f0d063b874661876e708683", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "5c99a18571a756d4af7a4d89ca75c28ac899e6103af6f223982f09ce44942cc9"},
+ "elixir_xml_to_map": {:hex, :elixir_xml_to_map, "3.1.0", "4d6260486a8cce59e4bf3575fe2dd2a24766546ceeef9f93fcec6f7c62a2827a", [:mix], [{:erlsom, "~> 1.4", [hex: :erlsom, repo: "hexpm", optional: false]}], "hexpm", "8fe5f2e75f90bab07ee2161120c2dc038ebcae8135554f5582990f1c8c21f911"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"erlsom": {:hex, :erlsom, "1.5.1", "c8fe2babd33ff0846403f6522328b8ab676f896b793634cfe7ef181c05316c03", [:rebar3], [], "hexpm", "7965485494c5844dd127656ac40f141aadfa174839ec1be1074e7edf5b4239eb"},
"eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"},
- "ex_aws": {:hex, :ex_aws, "2.5.0", "1785e69350b16514c1049330537c7da10039b1a53e1d253bbd703b135174aec3", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "971b86e5495fc0ae1c318e35e23f389e74cf322f2c02d34037c6fc6d405006f1"},
- "ex_aws_s3": {:hex, :ex_aws_s3, "2.5.2", "cee302b8e9ee198cc0d89f1de2a7d6a8921e1a556574476cf5590d2156590fe3", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "cc5bd945a22a99eece4721d734ae2452d3717e81c357a781c8574663254df4a1"},
+ "ex_aws": {:hex, :ex_aws, "2.5.3", "9c2d05ba0c057395b12c7b5ca6267d14cdaec1d8e65bdf6481fe1fd245accfb4", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "67115f1d399d7ec4d191812ee565c6106cb4b1bbf19a9d4db06f265fd87da97e"},
+ "ex_aws_s3": {:hex, :ex_aws_s3, "2.5.3", "422468e5c3e1a4da5298e66c3468b465cfd354b842e512cb1f6fbbe4e2f5bdaf", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "4f09dd372cc386550e484808c5ac5027766c8d0cd8271ccc578b82ee6ef4f3b8"},
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm", "96fd346610cc992b8f896ed26a98be82ac4efb065a0578f334a32d60a3ba9767"},
- "ex_doc": {:hex, :ex_doc, "0.31.0", "06eb1dfd787445d9cab9a45088405593dd3bb7fe99e097eaa71f37ba80c7a676", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "5350cafa6b7f77bdd107aa2199fe277acf29d739aba5aee7e865fc680c62a110"},
+ "ex_doc": {:hex, :ex_doc, "0.32.0", "896afb57b1e00030f6ec8b2e19d3ca99a197afb23858d49d94aea673dc222f12", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "ed2c3e42c558f49bda3ff37e05713432006e1719a6c4a3320c7e4735787374e7"},
"ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"},
"ex_syslogger": {:hex, :ex_syslogger, "2.0.0", "de6de5c5472a9c4fdafb28fa6610e381ae79ebc17da6490b81d785d68bd124c9", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "a52b2fe71764e9e6ecd149ab66635812f68e39279cbeee27c52c0e35e8b8019e"},
"excoveralls": {:hex, :excoveralls, "0.16.1", "0bd42ed05c7d2f4d180331a20113ec537be509da31fed5c8f7047ce59ee5a7c5", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "dae763468e2008cf7075a64cb1249c97cb4bc71e236c5c2b5e5cdf1cfa2bf138"},
"expo": {:hex, :expo, "0.4.1", "1c61d18a5df197dfda38861673d392e642649a9cef7694d2f97a587b2cfb319b", [:mix], [], "hexpm", "2ff7ba7a798c8c543c12550fa0e2cbc81b95d4974c65855d8d15ba7b37a1ce47"},
- "fast_html": {:hex, :fast_html, "2.2.0", "6c5ef1be087a4ed613b0379c13f815c4d11742b36b67bb52cee7859847c84520", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "064c4f23b4a6168f9187dac8984b056f2c531bb0787f559fd6a8b34b38aefbae"},
+ "fast_html": {:hex, :fast_html, "2.3.0", "08c1d8ead840dd3060ba02c761bed9f37f456a1ddfe30bcdcfee8f651cec06a6", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "f18e3c7668f82d3ae0b15f48d48feeb257e28aa5ab1b0dbf781c7312e5da029d"},
"fast_sanitize": {:hex, :fast_sanitize, "0.2.3", "67b93dfb34e302bef49fec3aaab74951e0f0602fd9fa99085987af05bd91c7a5", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "e8ad286d10d0386e15d67d0ee125245ebcfbc7d7290b08712ba9013c8c5e56e2"},
"file_ex": {:git, "https://akkoma.dev/AkkomaGang/file_ex.git", "cc7067c7d446c2526e9ecf91d40896b088851569", [ref: "cc7067c7d446c2526e9ecf91d40896b088851569"]},
- "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
+ "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
"finch": {:hex, :finch, "0.16.0", "40733f02c89f94a112518071c0a91fe86069560f5dbdb39f9150042f44dcfb1a", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f660174c4d519e5fec629016054d60edd822cdfe2b7270836739ac2f97735ec5"},
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
- "floki": {:hex, :floki, "0.35.2", "87f8c75ed8654b9635b311774308b2760b47e9a579dabf2e4d5f1e1d42c39e0b", [:mix], [], "hexpm", "6b05289a8e9eac475f644f09c2e4ba7e19201fd002b89c28c1293e7bd16773d9"},
+ "floki": {:hex, :floki, "0.36.1", "712b7f2ba19a4d5a47dfe3e74d81876c95bbcbee44fe551f0af3d2a388abb3da", [:mix], [], "hexpm", "21ba57abb8204bcc70c439b423fc0dd9f0286de67dc82773a14b0200ada0995f"},
"gen_smtp": {:hex, :gen_smtp, "1.2.0", "9cfc75c72a8821588b9b9fe947ae5ab2aed95a052b81237e0928633a13276fd3", [:rebar3], [{:ranch, ">= 1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "5ee0375680bca8f20c4d85f58c2894441443a743355430ff33a783fe03296779"},
"gettext": {:hex, :gettext, "0.22.3", "c8273e78db4a0bb6fba7e9f0fd881112f349a3117f7f7c598fa18c66c888e524", [:mix], [{:expo, "~> 0.4.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "935f23447713954a6866f1bb28c3a878c4c011e802bcd68a726f5e558e4b64bd"},
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
@@ -60,17 +60,17 @@
"http_signatures": {:git, "https://akkoma.dev/AkkomaGang/http_signatures.git", "6640ce7d24c783ac2ef56e27d00d12e8dc85f396", [ref: "6640ce7d24c783ac2ef56e27d00d12e8dc85f396"]},
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
- "inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
+ "inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
- "joken": {:hex, :joken, "2.6.0", "b9dd9b6d52e3e6fcb6c65e151ad38bf4bc286382b5b6f97079c47ade6b1bcc6a", [:mix], [{:jose, "~> 1.11.5", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "5a95b05a71cd0b54abd35378aeb1d487a23a52c324fa7efdffc512b655b5aaa7"},
- "jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"},
+ "joken": {:hex, :joken, "2.6.1", "2ca3d8d7f83bf7196296a3d9b2ecda421a404634bfc618159981a960020480a1", [:mix], [{:jose, "~> 1.11.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "ab26122c400b3d254ce7d86ed066d6afad27e70416df947cdcb01e13a7382e68"},
+ "jose": {:hex, :jose, "1.11.9", "c861eb99d9e9f62acd071dc5a49ffbeab9014e44490cd85ea3e49e3d36184777", [:mix, :rebar3], [], "hexpm", "b5ccc3749d2e1638c26bed806259df5bc9e438797fe60dc71e9fa0716133899b"},
"jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"},
"linkify": {:git, "https://akkoma.dev/AkkomaGang/linkify.git", "2567e2c1073fa371fd26fd66dfa5bc77b6919c16", []},
"mail": {:hex, :mail, "0.3.1", "cb0a14e4ed8904e4e5a08214e686ccf6f9099346885db17d8c309381f865cc5c", [:mix], [], "hexpm", "1db701e89865c1d5fa296b2b57b1cd587587cca8d8a1a22892b35ef5a8e352a6"},
"majic": {:git, "https://akkoma.dev/AkkomaGang/majic.git", "80540b36939ec83f48e76c61e5000e0fd67706f0", [ref: "80540b36939ec83f48e76c61e5000e0fd67706f0"]},
"makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"},
- "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
- "makeup_erlang": {:hex, :makeup_erlang, "0.1.3", "d684f4bac8690e70b06eb52dad65d26de2eefa44cd19d64a8095e1417df7c8fd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "b78dc853d2e670ff6390b605d807263bf606da3c82be37f9d7f68635bd886fc9"},
+ "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
+ "makeup_erlang": {:hex, :makeup_erlang, "0.1.5", "e0ff5a7c708dda34311f7522a8758e23bfcd7d8d8068dc312b5eb41c6fd76eba", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "94d2e986428585a21516d7d7149781480013c56e30c6a233534bedf38867a59a"},
"meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
"mfm_parser": {:git, "https://akkoma.dev/AkkomaGang/mfm-parser.git", "b21ab7754024af096f2d14247574f55f0063295b", [ref: "b21ab7754024af096f2d14247574f55f0063295b"]},
@@ -82,54 +82,54 @@
"mox": {:hex, :mox, "1.1.0", "0f5e399649ce9ab7602f72e718305c0f9cdc351190f72844599545e4996af73c", [:mix], [], "hexpm", "d44474c50be02d5b72131070281a5d3895c0e7a95c780e90bc0cfe712f633a13"},
"nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
- "nimble_pool": {:hex, :nimble_pool, "1.0.0", "5eb82705d138f4dd4423f69ceb19ac667b3b492ae570c9f5c900bb3d2f50a847", [:mix], [], "hexpm", "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"},
+ "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
"oban": {:hex, :oban, "2.15.4", "d49ab4ffb7153010e32f80fe9e56f592706238149ec579eb50f8a4e41d218856", [:mix], [{:ecto_sql, "~> 3.6", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5fce611fdfffb13e9148df883116e5201adf1e731eb302cc88cde0588510079c"},
- "open_api_spex": {:hex, :open_api_spex, "3.18.0", "f9952b6bc8a1bf14168f3754981b7c8d72d015112bfedf2588471dd602e1e715", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "37849887ab67efab052376401fac28c0974b273ffaecd98f4532455ca0886464"},
+ "open_api_spex": {:hex, :open_api_spex, "3.18.3", "fefb84fe323cacfc92afdd0ecb9e89bc0261ae00b7e3167ffc2028ce3944de42", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "c0cfc31570199ce7e7520b494a591027da609af45f6bf9adce51e2469b1609fb"},
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
- "phoenix": {:hex, :phoenix, "1.7.10", "02189140a61b2ce85bb633a9b6fd02dff705a5f1596869547aeb2b2b95edd729", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "cf784932e010fd736d656d7fead6a584a4498efefe5b8227e9f383bf15bb79d0"},
- "phoenix_ecto": {:hex, :phoenix_ecto, "4.4.3", "86e9878f833829c3f66da03d75254c155d91d72a201eb56ae83482328dc7ca93", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d36c401206f3011fefd63d04e8ef626ec8791975d9d107f9a0817d426f61ac07"},
+ "phoenix": {:hex, :phoenix, "1.7.12", "1cc589e0eab99f593a8aa38ec45f15d25297dd6187ee801c8de8947090b5a9d3", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "d646192fbade9f485b01bc9920c139bfdd19d0f8df3d73fd8eaf2dfbe0d2837c"},
+ "phoenix_ecto": {:hex, :phoenix_ecto, "4.5.1", "6fdbc334ea53620e71655664df6f33f670747b3a7a6c4041cdda3e2c32df6257", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ebe43aa580db129e54408e719fb9659b7f9e0d52b965c5be26cdca416ecead28"},
"phoenix_html": {:hex, :phoenix_html, "3.3.3", "380b8fb45912b5638d2f1d925a3771b4516b9a78587249cabe394e0a5d579dc9", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "923ebe6fec6e2e3b3e569dfbdc6560de932cd54b000ada0208b5f45024bdd76c"},
"phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.7.2", "97cc4ff2dba1ebe504db72cb45098cb8e91f11160528b980bd282cc45c73b29c", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.18.3", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "0e5fdf063c7a3b620c566a30fcf68b7ee02e5e46fe48ee46a6ec3ba382dc05b7"},
"phoenix_live_view": {:hex, :phoenix_live_view, "0.18.18", "1f38fbd7c363723f19aad1a04b5490ff3a178e37daaf6999594d5f34796c47fc", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a5810d0472f3189ede6d2a95bda7f31c6113156b91784a3426cb0ab6a6d85214"},
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
- "phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.0", "a544d83fde4a767efb78f45404a74c9e37b2a9c5ea3339692e65a6966731f935", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "e88d117251e89a16b92222415a6d87b99a96747ddf674fc5c7631de734811dba"},
- "phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"},
+ "phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"},
+ "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
"phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"},
- "plug": {:hex, :plug, "1.15.2", "94cf1fa375526f30ff8770837cb804798e0045fd97185f0bb9e5fcd858c792a3", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "02731fa0c2dcb03d8d21a1d941bdbbe99c2946c0db098eee31008e04c6283615"},
- "plug_cowboy": {:hex, :plug_cowboy, "2.6.1", "9a3bbfceeb65eff5f39dab529e5cd79137ac36e913c02067dba3963a26efe9b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "de36e1a21f451a18b790f37765db198075c25875c64834bcc82d90b309eb6613"},
+ "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"},
+ "plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"},
"plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"},
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
"poison": {:hex, :poison, "5.0.0", "d2b54589ab4157bbb82ec2050757779bfed724463a544b6e20d79855a9e43b24", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "11dc6117c501b80c62a7594f941d043982a1bd05a1184280c0d9166eb4d8d3fc"},
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
- "postgrex": {:hex, :postgrex, "0.17.4", "5777781f80f53b7c431a001c8dad83ee167bcebcf3a793e3906efff680ab62b3", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "6458f7d5b70652bc81c3ea759f91736c16a31be000f306d3c64bcdfe9a18b3cc"},
+ "postgrex": {:hex, :postgrex, "0.17.5", "0483d054938a8dc069b21bdd636bf56c487404c241ce6c319c1f43588246b281", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "50b8b11afbb2c4095a3ba675b4f055c416d0f3d7de6633a595fc131a828a67eb"},
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
- "recon": {:hex, :recon, "2.5.4", "05dd52a119ee4059fa9daa1ab7ce81bc7a8161a2f12e9d42e9d551ffd2ba901c", [:mix, :rebar3], [], "hexpm", "e9ab01ac7fc8572e41eb59385efeb3fb0ff5bf02103816535bacaedf327d0263"},
+ "recon": {:hex, :recon, "2.5.5", "c108a4c406fa301a529151a3bb53158cadc4064ec0c5f99b03ddb8c0e4281bdf", [:mix, :rebar3], [], "hexpm", "632a6f447df7ccc1a4a10bdcfce71514412b16660fe59deca0fcf0aa3c054404"},
"remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"},
"search_parser": {:git, "https://github.com/FloatingGhost/pleroma-contrib-search-parser.git", "08971a81e68686f9ac465cfb6661d51c5e4e1e7f", [ref: "08971a81e68686f9ac465cfb6661d51c5e4e1e7f"]},
"sleeplocks": {:hex, :sleeplocks, "1.1.2", "d45aa1c5513da48c888715e3381211c859af34bee9b8290490e10c90bb6ff0ca", [:rebar3], [], "hexpm", "9fe5d048c5b781d6305c1a3a0f40bb3dfc06f49bf40571f3d2d0c57eaa7f59a5"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
"statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"},
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
- "swoosh": {:hex, :swoosh, "1.14.2", "cf686f92ad3b21e6651b20c50eeb1781f581dc7097ef6251b4d322a9f1d19339", [:mix], [{:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.4 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "01d8fae72930a0b5c1bb9725df0408602ed8c5c3d59dc6e7a39c57b723cd1065"},
+ "swoosh": {:hex, :swoosh, "1.14.4", "94e9dba91f7695a10f49b0172c4a4cb658ef24abef7e8140394521b7f3bbb2d4", [:mix], [{:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.4 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "081c5a590e4ba85cc89baddf7b2beecf6c13f7f84a958f1cd969290815f0f026"},
"syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"},
- "table_rex": {:hex, :table_rex, "3.1.1", "0c67164d1714b5e806d5067c1e96ff098ba7ae79413cc075973e17c38a587caa", [:mix], [], "hexpm", "678a23aba4d670419c23c17790f9dcd635a4a89022040df7d5d772cb21012490"},
+ "table_rex": {:hex, :table_rex, "4.0.0", "3c613a68ebdc6d4d1e731bc973c233500974ec3993c99fcdabb210407b90959b", [:mix], [], "hexpm", "c35c4d5612ca49ebb0344ea10387da4d2afe278387d4019e4d8111e815df8f55"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
- "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.1", "315d9163a1d4660aedc3fee73f33f1d355dcc76c5c3ab3d59e76e3edf80eef1f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7be9e0871c41732c233be71e4be11b96e56177bf15dde64a8ac9ce72ac9834c6"},
+ "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.2", "2caabe9344ec17eafe5403304771c3539f3b6e2f7fb6a6f602558c825d0d0bfb", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9b43db0dc33863930b9ef9d27137e78974756f5f198cae18409970ed6fa5b561"},
"telemetry_metrics_prometheus": {:hex, :telemetry_metrics_prometheus, "1.1.0", "1cc23e932c1ef9aa3b91db257ead31ea58d53229d407e059b29bb962c1505a13", [:mix], [{:plug_cowboy, "~> 2.1", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}], "hexpm", "d43b3659b3244da44fe0275b717701542365d4519b79d9ce895b9719c1ce4d26"},
"telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.1.0", "4e15f6d7dbedb3a4e3aed2262b7e1407f166fcb9c30ca3f96635dfbbef99965c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "0dd10e7fe8070095df063798f82709b0a1224c31b8baf6278b423898d591a069"},
- "telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
+ "telemetry_poller": {:hex, :telemetry_poller, "1.1.0", "58fa7c216257291caaf8d05678c8d01bd45f4bdbc1286838a28c4bb62ef32999", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9eb9d9cbfd81cbd7cdd24682f8711b6e2b691289a0de6826e58452f28c103c8f"},
"temple": {:git, "https://akkoma.dev/AkkomaGang/temple.git", "066a699ade472d8fa42a9d730b29a61af9bc8b59", [ref: "066a699ade472d8fa42a9d730b29a61af9bc8b59"]},
- "tesla": {:hex, :tesla, "1.8.0", "d511a4f5c5e42538d97eef7c40ec4f3e44effdc5068206f42ed859e09e51d1fd", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "10501f360cd926a309501287470372af1a6e1cbed0f43949203a4c13300bc79f"},
+ "tesla": {:hex, :tesla, "1.9.0", "8c22db6a826e56a087eeb8cdef56889731287f53feeb3f361dec5d4c8efb6f14", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "7c240c67e855f7e63e795bf16d6b3f5115a81d1f44b7fe4eadbf656bae0fef8a"},
"timex": {:hex, :timex, "3.7.11", "bb95cb4eb1d06e27346325de506bcc6c30f9c6dea40d1ebe390b262fad1862d1", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.20", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8b9024f7efbabaf9bd7aa04f65cf8dcd7c9818ca5737677c7b76acbc6a94d1aa"},
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
"tzdata": {:hex, :tzdata, "1.1.1", "20c8043476dfda8504952d00adac41c6eda23912278add38edc140ae0c5bcc46", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a69cec8352eafcd2e198dea28a34113b60fdc6cb57eb5ad65c10292a6ba89787"},
"ueberauth": {:hex, :ueberauth, "0.10.5", "806adb703df87e55b5615cf365e809f84c20c68aa8c08ff8a416a5a6644c4b02", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "3efd1f31d490a125c7ed453b926f7c31d78b97b8a854c755f5c40064bf3ac9e1"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
"unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"},
- "vex": {:hex, :vex, "0.9.1", "cb65348ebd1c4002861b65bef36e524c29d9a879c90119b2d0e674e323124277", [:mix], [], "hexpm", "a0f9f3959d127ad6a6a617c3f607ecfb1bc6f3c59f9c3614a901a46d1765bafe"},
+ "vex": {:hex, :vex, "0.9.2", "fe061acc9e0907d983d46b51bf35d58176f0fe6eb7ba3b33c9336401bf42b6d1", [:mix], [], "hexpm", "76e709a9762e98c6b462dfce92e9b5dfbf712839227f2da8add6dd11549b12cb"},
"web_push_encryption": {:hex, :web_push_encryption, "0.3.1", "76d0e7375142dfee67391e7690e89f92578889cbcf2879377900b5620ee4708d", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.11.1", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "4f82b2e57622fb9337559058e8797cb0df7e7c9790793bdc4e40bc895f70e2a2"},
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
- "websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"},
+ "websock_adapter": {:hex, :websock_adapter, "0.5.6", "0437fe56e093fd4ac422de33bf8fc89f7bc1416a3f2d732d8b2c8fd54792fe60", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "e04378d26b0af627817ae84c92083b7e97aca3121196679b73c73b99d0d133ea"},
"websockex": {:hex, :websockex, "0.4.3", "92b7905769c79c6480c02daacaca2ddd49de936d912976a4d3c923723b647bf0", [:mix], [], "hexpm", "95f2e7072b85a3a4cc385602d42115b73ce0b74a9121d0d6dbbf557645ac53e4"},
}
diff --git a/priv/static/schemas/litepub-0.1.jsonld b/priv/static/schemas/litepub-0.1.jsonld
index 12a6ec16f..6287669b6 100644
--- a/priv/static/schemas/litepub-0.1.jsonld
+++ b/priv/static/schemas/litepub-0.1.jsonld
@@ -19,6 +19,7 @@
"toot": "http://joinmastodon.org/ns#",
"misskey": "https://misskey-hub.net/ns#",
"fedibird": "http://fedibird.com/ns#",
+ "sharkey": "https://joinsharkey.org/ns#",
"value": "schema:value",
"sensitive": "as:sensitive",
"litepub": "http://litepub.social/ns#",
@@ -45,6 +46,14 @@
"contentMap": {
"@id": "as:content",
"@container": "@language"
+ },
+ "featured": {
+ "@id": "toot:featured",
+ "@type": "@id"
+ },
+ "backgroundUrl": {
+ "@id": "sharkey:backgroundUrl",
+ "@type": "@id"
}
}
]
diff --git a/priv/static/static-fe/static-fe.css b/priv/static/static-fe/static-fe.css
index ed89f0aed..5ca19e9aa 100644
--- a/priv/static/static-fe/static-fe.css
+++ b/priv/static/static-fe/static-fe.css
@@ -85,6 +85,7 @@ .inner-nav a {
.inner-nav img {
height: 28px;
+ width: auto;
vertical-align: middle;
padding-right: 5px
}
@@ -408,6 +409,7 @@ .user-info .avatar img {
.avatar img {
border-radius: 3px;
box-shadow: var(--avatarShadow);
+ object-fit: cover;
}
.user-summary {
@@ -610,13 +612,13 @@ @media (max-width: 800px) {
}
img:not(.u-photo, .fa-icon) {
- width: 32px;
+ width: auto;
height: 32px;
padding: 0;
vertical-align: middle;
}
.username img:not(.u-photo) {
- width: 16px;
+ width: auto;
height: 16px;
}
diff --git a/priv/templates/sample_config.eex b/priv/templates/sample_config.eex
index 42d4bb5f8..1eab99601 100644
--- a/priv/templates/sample_config.eex
+++ b/priv/templates/sample_config.eex
@@ -88,6 +88,8 @@ config :pleroma, configurable_from_database: <%= db_configurable? %>
# config :pleroma, :mrf,
# policies: []
+config :pleroma, Pleroma.Upload,
<%= if Kernel.length(upload_filters) > 0 do
-"config :pleroma, Pleroma.Upload, filters: #{inspect(upload_filters)}"
+" filters: #{inspect(upload_filters)},"
end %>
+ base_url: "<%= media_url %>"
diff --git a/test/fixtures/bridgy/actor.json b/test/fixtures/bridgy/actor.json
index 5b2d8982b..b4e859a82 100644
--- a/test/fixtures/bridgy/actor.json
+++ b/test/fixtures/bridgy/actor.json
@@ -70,7 +70,7 @@
"preferredUsername": "jk.nipponalba.scot",
"summary": "",
"publicKey": {
- "id": "jk.nipponalba.scot",
+ "id": "https://fed.brid.gy/jk.nipponalba.scot#key",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdarxwzxnNbJ2hneWOYHkYJowk\npyigQtxlUd0VjgSQHwxU9kWqfbrHBVADyTtcqi/4dAzQd3UnCI1TPNnn4LPZY9PW\noiWd3Zl1/EfLFxO7LU9GS7fcSLQkyj5JNhSlN3I8QPudZbybrgRDVZYooDe1D+52\n5KLGqC2ajrIVOiDRTQIDAQAB\n-----END PUBLIC KEY-----"
},
"inbox": "https://fed.brid.gy/jk.nipponalba.scot/inbox",
diff --git a/test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json b/test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json
index ca76d6e17..70d750e5d 100644
--- a/test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json
+++ b/test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json
@@ -37,7 +37,7 @@
"sharedInbox": "https://osada.macgirvin.com/inbox"
},
"publicKey": {
- "id": "https://osada.macgirvin.com/channel/mike/public_key_pem",
+ "id": "https://osada.macgirvin.com/channel/mike",
"owner": "https://osada.macgirvin.com/channel/mike",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAskSyK2VwBNKbzZl9XNJk\nvxU5AAilmRArMmmKSzphdHaVBHakeafUfixvqNrQ/oX2srJvJKcghNmEMrJ6MJ7r\npeEndVOo7pcP4PwVjtnC06p3J711q5tBehqM25BfCLCrB2YqWF6c8zk3CPN3Na21\n8k5s4cO95N/rGN+Po0XFAX/HjKjlpgNpKRDrpxmXxTU8NZfAqeQGJ5oiMBZI9vVB\n+eU7t1L6F5/XWuUCeP4OMrG8oZX822AREba8rknS6DpkWGES0Rx2eNOyYTf6ue75\nI6Ek6rlO+da5wMWr+3BvYMq4JMIwTHzAO+ZqqJPFpzKSiVuAWb2DOX/MDFecVWJE\ntF/R60lONxe4e/00MPCoDdqkLKdwROsk1yGL7z4Zk6jOWFEhIcWy/d2Ya5CpPvS3\nu4wNN4jkYAjra+8TiloRELhV4gpcEk8nkyNwLXOhYm7zQ5sIc5rfXoIrFzALB86W\nG05Nnqg+77zZIaTZpD9qekYlaEt+0OVtt9TTIeTiudQ983l6mfKwZYymrzymH1dL\nVgxBRYo+Z53QOSLiSKELfTBZxEoP1pBw6RiOHXydmJ/39hGgc2YAY/5ADwW2F2yb\nJ7+gxG6bPJ3ikDLYcD4CB5iJQdnTcDsFt3jyHAT6wOCzFAYPbHUqtzHfUM30dZBn\nnJhQF8udPLcXLaj6GW75JacCAwEAAQ==\n-----END PUBLIC KEY-----\n"
},
diff --git a/test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json b/test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json
index 1df73f2c5..dbf74dfe1 100644
--- a/test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json
+++ b/test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json
@@ -3,7 +3,7 @@
"attributedTo": "http://mastodon.example.org/users/admin",
"attachment": [],
"content": "this post was not actually written by Haelwenn
",
- "id": "https://info.pleroma.site/activity2.json",
+ "id": "https://info.pleroma.site/activity3.json",
"published": "2018-09-01T22:15:00Z",
"tag": [],
"to": [
diff --git a/test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json b/test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json
index 11c79e11e..c354747cc 100644
--- a/test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json
+++ b/test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json
@@ -1 +1 @@
-{"@context":["https://www.w3.org/ns/activitystreams","https://w3id.org/security/v1","https://hubzilla.example.org/apschema/v1.2"],"type":"Person","id":"https://hubzilla.example.org/channel/kaniini","preferredUsername":"kaniini","name":"kaniini","icon":{"type":"Image","mediaType":"image/jpeg","url":"https://hubzilla.example.org/photo/profile/l/281","height":300,"width":300},"url":{"type":"Link","mediaType":"text/html","href":"https://hubzilla.example.org/channel/kaniini"},"inbox":"https://hubzilla.example.org/inbox/kaniini","outbox":"https://hubzilla.example.org/outbox/kaniini","followers":"https://hubzilla.example.org/followers/kaniini","following":"https://hubzilla.example.org/following/kaniini","endpoints":{"sharedInbox":"https://hubzilla.example.org/inbox"},"publicKey":{"id":"https://hubzilla.example.org/channel/kaniini/public_key_pem","owner":"https://hubzilla.example.org/channel/kaniini","publicKeyPem":"-----BEGIN PUBLIC KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvXCDkQPw+1N8B2CUd5s2\nbYvjHt+t7soMNfUiRy0qGbgW46S45k5lCq1KpbFIX3sgGZ4OWjnXVbvjCJi4kl5M\nfm5DBXzpuu05AmjVl8hqk4GejajiE/1Nq0uWHPiOSFWispUjCzzCu65V+IsiE5JU\nvcL6WEf/pYNRq7gYqyT693F7+cO5/rVv9OScx5UOxbIuU1VXYhdHCqAMDJWadC89\nhePrcD3HOQKl06W2tDxHcWk6QjrdsUQGbNOgK/QIN9gSxA+rCFEvH5O0HAhI0aXq\ncOB+vysJUFLeQOAqmAKvKS5V6RqE1GqqT0pDWHack4EmQi0gkgVzo+45xoP6wfDl\nWwG88w21LNxGvGHuN4I8mg6cEoApqKQBSOj086UtfDfSlPC1B+PRD2phE5etucHd\nF/RIWN3SxVzU9BKIiaDm2gwOpvI8QuorQb6HDtZFO5NsSN3PnMnSywPe7kXl/469\nuQRYXrseqyOVIi6WjhvXkyWVKVE5CBz+S8wXHfKph+9YOyUcJeAVMijp9wrjBlMc\noSzOGu79oM7tpMSq/Xo6ePJ/glNOwZR+OKrg92Qp9BGTKDNwGrxuxP/9KwWtGLNf\nOMTtIkxtC3ubhxL3lBxOd7l+Bmum0UJV2f8ogkCgvTpIz05jMoyU8qWl6kkWNQlY\nDropXWaOfy7Lac+G4qlfSgsCAwEAAQ==\n-----END PUBLIC KEY-----\n"},"nomadicLocations":[{"id":"https://hubzilla.example.org/locs/kaniini","type":"nomadicLocation","locationAddress":"acct:kaniini@hubzilla.example.org","locationPrimary":true,"locationDeleted":false}],"signature":{"@context":["https://www.w3.org/ns/activitystreams","https://w3id.org/security/v1"],"type":"RsaSignature2017","nonce":"6b981a2f3bdcffc20252e3b131d4a4569fd2dea9fac543e5196136302f492694","creator":"https://hubzilla.example.org/channel/kaniini/public_key_pem","created":"2018-05-19T08:19:13Z","signatureValue":"ezpT4iCIUzJSeJa/Jsf4EkgbX9enWZG/0eliLXZcvkeCX9mZabaX9LMQRViP2GSlAJBHJu+UqK5LWaoWw9pYkQQHUL+43w2DeBxQicEcPqpT46j6pHuWptfwB8YHTC2/Pb56Y/jseU37j+FW8xVmcGZk4cPqJRLQNojwJlQiFOpBEd4Cel6081W12Pep578+6xBL+h92RJsWznA1gE/NV9dkCqoAoNdiORJg68sVTm0yYxPit2D/DLwXUFeBhC47EZtY3DtAOf7rADGwbquXKug/wtEI47R4p9dJvMWERSVW9O2FmDk8deUjRR3qO1iYGce8O+uMnnBHmuTcToRUHH7mxfMdqjfbcZ9DGBjKtLPSOyVPT9rENeyX8fsksmX0XhfHsNSWkmeDaU5/Au3IY75gDewiGzmzLOpRc6GUnHHro7lMpyMuo3lLZKjNVsFZbx+sXCYwORz5GAMuwIt/iCUdrsQsF5aycqfUAZrFBPguH6DVjbMUqyLvS78sDKiWqgWVhq9VDKse+WuQaJLGBDJNF9APoA6NDMjjIBZfmkGf2mV7ubIYihoOncUjahFqxU5306cNxAcdj2uNcwkgX4BCnBe/L2YsvMHhZrupzDewWWy4fxhktyoZ7VhLSl1I7fMPytjOpb9EIvng4DHGX2t+hKfon2rCGfECPavwiTM="}}
+{"@context":["https://www.w3.org/ns/activitystreams","https://w3id.org/security/v1","https://hubzilla.example.org/apschema/v1.2"],"type":"Person","id":"https://hubzilla.example.org/channel/kaniini","preferredUsername":"kaniini","name":"kaniini","icon":{"type":"Image","mediaType":"image/jpeg","url":"https://hubzilla.example.org/photo/profile/l/281","height":300,"width":300},"url":{"type":"Link","mediaType":"text/html","href":"https://hubzilla.example.org/channel/kaniini"},"inbox":"https://hubzilla.example.org/inbox/kaniini","outbox":"https://hubzilla.example.org/outbox/kaniini","followers":"https://hubzilla.example.org/followers/kaniini","following":"https://hubzilla.example.org/following/kaniini","endpoints":{"sharedInbox":"https://hubzilla.example.org/inbox"},"publicKey":{"id":"https://hubzilla.example.org/channel/kaniini","owner":"https://hubzilla.example.org/channel/kaniini","publicKeyPem":"-----BEGIN PUBLIC KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvXCDkQPw+1N8B2CUd5s2\nbYvjHt+t7soMNfUiRy0qGbgW46S45k5lCq1KpbFIX3sgGZ4OWjnXVbvjCJi4kl5M\nfm5DBXzpuu05AmjVl8hqk4GejajiE/1Nq0uWHPiOSFWispUjCzzCu65V+IsiE5JU\nvcL6WEf/pYNRq7gYqyT693F7+cO5/rVv9OScx5UOxbIuU1VXYhdHCqAMDJWadC89\nhePrcD3HOQKl06W2tDxHcWk6QjrdsUQGbNOgK/QIN9gSxA+rCFEvH5O0HAhI0aXq\ncOB+vysJUFLeQOAqmAKvKS5V6RqE1GqqT0pDWHack4EmQi0gkgVzo+45xoP6wfDl\nWwG88w21LNxGvGHuN4I8mg6cEoApqKQBSOj086UtfDfSlPC1B+PRD2phE5etucHd\nF/RIWN3SxVzU9BKIiaDm2gwOpvI8QuorQb6HDtZFO5NsSN3PnMnSywPe7kXl/469\nuQRYXrseqyOVIi6WjhvXkyWVKVE5CBz+S8wXHfKph+9YOyUcJeAVMijp9wrjBlMc\noSzOGu79oM7tpMSq/Xo6ePJ/glNOwZR+OKrg92Qp9BGTKDNwGrxuxP/9KwWtGLNf\nOMTtIkxtC3ubhxL3lBxOd7l+Bmum0UJV2f8ogkCgvTpIz05jMoyU8qWl6kkWNQlY\nDropXWaOfy7Lac+G4qlfSgsCAwEAAQ==\n-----END PUBLIC KEY-----\n"},"nomadicLocations":[{"id":"https://hubzilla.example.org/locs/kaniini","type":"nomadicLocation","locationAddress":"acct:kaniini@hubzilla.example.org","locationPrimary":true,"locationDeleted":false}],"signature":{"@context":["https://www.w3.org/ns/activitystreams","https://w3id.org/security/v1"],"type":"RsaSignature2017","nonce":"6b981a2f3bdcffc20252e3b131d4a4569fd2dea9fac543e5196136302f492694","creator":"https://hubzilla.example.org/channel","created":"2018-05-19T08:19:13Z","signatureValue":"ezpT4iCIUzJSeJa/Jsf4EkgbX9enWZG/0eliLXZcvkeCX9mZabaX9LMQRViP2GSlAJBHJu+UqK5LWaoWw9pYkQQHUL+43w2DeBxQicEcPqpT46j6pHuWptfwB8YHTC2/Pb56Y/jseU37j+FW8xVmcGZk4cPqJRLQNojwJlQiFOpBEd4Cel6081W12Pep578+6xBL+h92RJsWznA1gE/NV9dkCqoAoNdiORJg68sVTm0yYxPit2D/DLwXUFeBhC47EZtY3DtAOf7rADGwbquXKug/wtEI47R4p9dJvMWERSVW9O2FmDk8deUjRR3qO1iYGce8O+uMnnBHmuTcToRUHH7mxfMdqjfbcZ9DGBjKtLPSOyVPT9rENeyX8fsksmX0XhfHsNSWkmeDaU5/Au3IY75gDewiGzmzLOpRc6GUnHHro7lMpyMuo3lLZKjNVsFZbx+sXCYwORz5GAMuwIt/iCUdrsQsF5aycqfUAZrFBPguH6DVjbMUqyLvS78sDKiWqgWVhq9VDKse+WuQaJLGBDJNF9APoA6NDMjjIBZfmkGf2mV7ubIYihoOncUjahFqxU5306cNxAcdj2uNcwkgX4BCnBe/L2YsvMHhZrupzDewWWy4fxhktyoZ7VhLSl1I7fMPytjOpb9EIvng4DHGX2t+hKfon2rCGfECPavwiTM="}}
diff --git a/test/fixtures/tesla_mock/relay@mastdon.example.org.json b/test/fixtures/tesla_mock/relay@mastdon.example.org.json
index c1fab7d3b..21dd405c8 100644
--- a/test/fixtures/tesla_mock/relay@mastdon.example.org.json
+++ b/test/fixtures/tesla_mock/relay@mastdon.example.org.json
@@ -11,7 +11,7 @@
"toot": "http://joinmastodon.org/ns#",
"Emoji": "toot:Emoji"
}],
- "id": "http://mastodon.example.org/users/admin",
+ "id": "http://mastodon.example.org/users/relay",
"type": "Application",
"invisible": true,
"following": "http://mastodon.example.org/users/admin/following",
@@ -24,8 +24,8 @@
"url": "http://mastodon.example.org/@admin",
"manuallyApprovesFollowers": false,
"publicKey": {
- "id": "http://mastodon.example.org/users/admin#main-key",
- "owner": "http://mastodon.example.org/users/admin",
+ "id": "http://mastodon.example.org/users/relay#main-key",
+ "owner": "http://mastodon.example.org/users/relay",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtc4Tir+3ADhSNF6VKrtW\nOU32T01w7V0yshmQei38YyiVwVvFu8XOP6ACchkdxbJ+C9mZud8qWaRJKVbFTMUG\nNX4+6Q+FobyuKrwN7CEwhDALZtaN2IPbaPd6uG1B7QhWorrY+yFa8f2TBM3BxnUy\nI4T+bMIZIEYG7KtljCBoQXuTQmGtuffO0UwJksidg2ffCF5Q+K//JfQagJ3UzrR+\nZXbKMJdAw4bCVJYs4Z5EhHYBwQWiXCyMGTd7BGlmMkY6Av7ZqHKC/owp3/0EWDNz\nNqF09Wcpr3y3e8nA10X40MJqp/wR+1xtxp+YGbq/Cj5hZGBG7etFOmIpVBrDOhry\nBwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"attachment": [{
diff --git a/test/fixtures/users_mock/friendica_followers.json b/test/fixtures/users_mock/friendica_followers.json
index 7b86b5fe2..02b287e23 100644
--- a/test/fixtures/users_mock/friendica_followers.json
+++ b/test/fixtures/users_mock/friendica_followers.json
@@ -13,7 +13,7 @@
"directMessage": "litepub:directMessage"
}
],
- "id": "http://localhost:8080/followers/fuser3",
+ "id": "http://remote.org/followers/fuser3",
"type": "OrderedCollection",
"totalItems": 296
}
diff --git a/test/fixtures/users_mock/friendica_following.json b/test/fixtures/users_mock/friendica_following.json
index 7c526befc..0908e78f0 100644
--- a/test/fixtures/users_mock/friendica_following.json
+++ b/test/fixtures/users_mock/friendica_following.json
@@ -13,7 +13,7 @@
"directMessage": "litepub:directMessage"
}
],
- "id": "http://localhost:8080/following/fuser3",
+ "id": "http://remote.org/following/fuser3",
"type": "OrderedCollection",
"totalItems": 32
}
diff --git a/test/fixtures/users_mock/masto_closed_followers.json b/test/fixtures/users_mock/masto_closed_followers.json
index da296892d..ccc32d15e 100644
--- a/test/fixtures/users_mock/masto_closed_followers.json
+++ b/test/fixtures/users_mock/masto_closed_followers.json
@@ -1,7 +1,7 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
- "id": "http://localhost:4001/users/masto_closed/followers",
+ "id": "http://remote.org/users/masto_closed/followers",
"type": "OrderedCollection",
"totalItems": 437,
- "first": "http://localhost:4001/users/masto_closed/followers?page=1"
+ "first": "http://remote.org/users/masto_closed/followers?page=1"
}
diff --git a/test/fixtures/users_mock/masto_closed_followers_page.json b/test/fixtures/users_mock/masto_closed_followers_page.json
index 04ab0c4d3..e4f1b3ac0 100644
--- a/test/fixtures/users_mock/masto_closed_followers_page.json
+++ b/test/fixtures/users_mock/masto_closed_followers_page.json
@@ -1 +1 @@
-{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"http://localhost:4001/users/masto_closed/followers?page=2","partOf":"http://localhost:4001/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
+{"@context":"https://www.w3.org/ns/activitystreams","id":"http://remote.org/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"http://remote.org/users/masto_closed/followers?page=2","partOf":"http://remote.org/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
diff --git a/test/fixtures/users_mock/masto_closed_following.json b/test/fixtures/users_mock/masto_closed_following.json
index 146d49f9c..34e9e9fe8 100644
--- a/test/fixtures/users_mock/masto_closed_following.json
+++ b/test/fixtures/users_mock/masto_closed_following.json
@@ -1,7 +1,7 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
- "id": "http://localhost:4001/users/masto_closed/following",
+ "id": "http://remote.org/users/masto_closed/following",
"type": "OrderedCollection",
"totalItems": 152,
- "first": "http://localhost:4001/users/masto_closed/following?page=1"
+ "first": "http://remote.org/users/masto_closed/following?page=1"
}
diff --git a/test/fixtures/users_mock/masto_closed_following_page.json b/test/fixtures/users_mock/masto_closed_following_page.json
index 8d8324699..d398ae3cf 100644
--- a/test/fixtures/users_mock/masto_closed_following_page.json
+++ b/test/fixtures/users_mock/masto_closed_following_page.json
@@ -1 +1 @@
-{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"http://localhost:4001/users/masto_closed/following?page=2","partOf":"http://localhost:4001/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
+{"@context":"https://www.w3.org/ns/activitystreams","id":"http://remote.org/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"http://remote.org/users/masto_closed/following?page=2","partOf":"http://remote.org/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
diff --git a/test/fixtures/users_mock/pleroma_followers.json b/test/fixtures/users_mock/pleroma_followers.json
index db71d084b..9611990ee 100644
--- a/test/fixtures/users_mock/pleroma_followers.json
+++ b/test/fixtures/users_mock/pleroma_followers.json
@@ -1,14 +1,14 @@
{
"type": "OrderedCollection",
"totalItems": 527,
- "id": "http://localhost:4001/users/fuser2/followers",
+ "id": "http://remote.org/users/fuser2/followers",
"first": {
"type": "OrderedCollectionPage",
"totalItems": 527,
- "partOf": "http://localhost:4001/users/fuser2/followers",
+ "partOf": "http://remote.org/users/fuser2/followers",
"orderedItems": [],
- "next": "http://localhost:4001/users/fuser2/followers?page=2",
- "id": "http://localhost:4001/users/fuser2/followers?page=1"
+ "next": "http://remote.org/users/fuser2/followers?page=2",
+ "id": "http://remote.org/users/fuser2/followers?page=1"
},
"@context": [
"https://www.w3.org/ns/activitystreams",
diff --git a/test/fixtures/users_mock/pleroma_following.json b/test/fixtures/users_mock/pleroma_following.json
index 33d087703..27fadbc94 100644
--- a/test/fixtures/users_mock/pleroma_following.json
+++ b/test/fixtures/users_mock/pleroma_following.json
@@ -1,14 +1,14 @@
{
"type": "OrderedCollection",
"totalItems": 267,
- "id": "http://localhost:4001/users/fuser2/following",
+ "id": "http://remote.org/users/fuser2/following",
"first": {
"type": "OrderedCollectionPage",
"totalItems": 267,
- "partOf": "http://localhost:4001/users/fuser2/following",
+ "partOf": "http://remote.org/users/fuser2/following",
"orderedItems": [],
- "next": "http://localhost:4001/users/fuser2/following?page=2",
- "id": "http://localhost:4001/users/fuser2/following?page=1"
+ "next": "http://remote.org/users/fuser2/following?page=2",
+ "id": "http://remote.org/users/fuser2/following?page=1"
},
"@context": [
"https://www.w3.org/ns/activitystreams",
diff --git a/test/mix/tasks/pleroma/instance_test.exs b/test/mix/tasks/pleroma/instance_test.exs
index 5a5a68053..17b2e3267 100644
--- a/test/mix/tasks/pleroma/instance_test.exs
+++ b/test/mix/tasks/pleroma/instance_test.exs
@@ -39,6 +39,8 @@ test "running gen" do
tmp_path() <> "setup.psql",
"--domain",
"test.pleroma.social",
+ "--media-url",
+ "https://media.pleroma.social/media",
"--instance-name",
"Pleroma",
"--admin-email",
@@ -69,8 +71,6 @@ test "running gen" do
"./test/../test/instance/static/",
"--strip-uploads",
"y",
- "--dedupe-uploads",
- "n",
"--anonymize-uploads",
"n"
])
@@ -92,6 +92,7 @@ test "running gen" do
assert generated_config =~ "configurable_from_database: true"
assert generated_config =~ "http: [ip: {127, 0, 0, 1}, port: 4000]"
assert generated_config =~ "filters: [Pleroma.Upload.Filter.Exiftool]"
+ assert generated_config =~ "base_url: \"https://media.pleroma.social/media\""
assert File.read!(tmp_path() <> "setup.psql") == generated_setup_psql()
assert File.exists?(Path.expand("./test/instance/static/robots.txt"))
end
diff --git a/test/mix/tasks/pleroma/uploads_test.exs b/test/mix/tasks/pleroma/uploads_test.exs
index 67fb642c1..d00e25a37 100644
--- a/test/mix/tasks/pleroma/uploads_test.exs
+++ b/test/mix/tasks/pleroma/uploads_test.exs
@@ -16,7 +16,6 @@ defmodule Mix.Tasks.Pleroma.UploadsTest do
Mix.shell(Mix.Shell.IO)
end)
- File.mkdir_p!("test/uploads")
:ok
end
diff --git a/test/pleroma/collections/collections_fetcher_test.exs b/test/pleroma/collections/collections_fetcher_test.exs
index 7a582a3d7..ff1aa84db 100644
--- a/test/pleroma/collections/collections_fetcher_test.exs
+++ b/test/pleroma/collections/collections_fetcher_test.exs
@@ -12,11 +12,14 @@ defmodule Akkoma.Collections.FetcherTest do
end
test "it should extract items from an embedded array in a Collection" do
+ ap_id = "https://example.com/collection/ordered_array"
+
unordered_collection =
"test/fixtures/collections/unordered_array.json"
|> File.read!()
-
- ap_id = "https://example.com/collection/ordered_array"
+ |> Jason.decode!()
+ |> Map.put("id", ap_id)
+ |> Jason.encode!(pretty: true)
Tesla.Mock.mock(fn
%{
diff --git a/test/pleroma/config/deprecation_warnings_test.exs b/test/pleroma/config/deprecation_warnings_test.exs
index 98c128e6a..96d6fd739 100644
--- a/test/pleroma/config/deprecation_warnings_test.exs
+++ b/test/pleroma/config/deprecation_warnings_test.exs
@@ -289,4 +289,64 @@ test "check_http_adapter/0" do
Application.put_env(:tesla, :adapter, Tesla.Mock)
end
+
+ describe "check_uploader_base_url_set/0" do
+ test "should error if the base_url is not set" do
+ clear_config([Pleroma.Upload, :base_url], nil)
+
+ # we need to capture the error
+ assert_raise ArgumentError, fn ->
+ assert capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_set()
+ end) =~ "Your config does not specify a base_url for uploads!"
+ end
+ end
+
+ test "should not error if the base_url is set" do
+ clear_config([Pleroma.Upload, :base_url], "https://example.com")
+
+ refute capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_set()
+ end) =~ "Your config does not specify a base_url for uploads!"
+ end
+
+ test "should not error if local uploader is not used" do
+ clear_config([Pleroma.Upload, :base_url], nil)
+ clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3)
+
+ refute capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_set()
+ end) =~ "Your config does not specify a base_url for uploads!"
+ end
+ end
+
+ describe "check_uploader_base_url_is_not_base_domain/0" do
+ test "should error if the akkoma domain is the same as the upload domain" do
+ clear_config([Pleroma.Upload, :base_url], "http://localhost")
+
+ assert capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_is_not_base_domain()
+ end) =~ "Your Akkoma Host and your Upload base_url's host are the same!"
+ end
+
+ test "should not error if the local uploader is not used" do
+ clear_config([Pleroma.Upload, :base_url], "http://localhost")
+ clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3)
+
+ refute capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_is_not_base_domain()
+ end) =~ "Your Akkoma Host and your Upload base_url's host are the same!"
+ end
+
+ test "should not error if the akkoma domain is different from the upload domain" do
+ clear_config([Pleroma.Upload, :base_url], "https://media.localhost")
+ clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
+
+ refute capture_log(fn ->
+ DeprecationWarnings.check_uploader_base_url_is_not_base_domain()
+ end) =~ "Your Akkoma Host and your Upload base_url's host are the same!"
+
+ clear_config([Pleroma.Upload, :base_url])
+ end
+ end
end
diff --git a/test/pleroma/emoji/pack_test.exs b/test/pleroma/emoji/pack_test.exs
index 4d769789d..f5d2e2eef 100644
--- a/test/pleroma/emoji/pack_test.exs
+++ b/test/pleroma/emoji/pack_test.exs
@@ -93,7 +93,9 @@ test "add emoji file", %{pack: pack} do
assert updated_pack.files_count == 1
end
- test "load_pack/1 ignores path traversal in a forged pack name", %{pack: pack} do
- assert {:ok, ^pack} = Pack.load_pack("../../../../../dump_pack")
+ test "load_pack/1 panics on path traversal in a forged pack name" do
+ assert_raise(RuntimeError, "Invalid or malicious pack name: ../../../../../dump_pack", fn ->
+ Pack.load_pack("../../../../../dump_pack")
+ end)
end
end
diff --git a/test/pleroma/object/containment_test.exs b/test/pleroma/object/containment_test.exs
index fb2fb7d49..f8f40a3ac 100644
--- a/test/pleroma/object/containment_test.exs
+++ b/test/pleroma/object/containment_test.exs
@@ -17,16 +17,58 @@ defmodule Pleroma.Object.ContainmentTest do
end
describe "general origin containment" do
- test "works for completely actorless posts" do
- assert :error ==
- Containment.contain_origin("https://glaceon.social/users/monorail", %{
+ test "handles completly actorless objects gracefully" do
+ assert :ok ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
"deleted" => "2019-10-30T05:48:50.249606Z",
"formerType" => "Note",
- "id" => "https://glaceon.social/users/monorail/statuses/103049757364029187",
+ "id" => "https://glaceon.social/statuses/123",
"type" => "Tombstone"
})
end
+ test "errors for spoofed actors" do
+ assert :error ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
+ "actor" => "https://otp.akkoma.dev/users/you",
+ "id" => "https://glaceon.social/statuses/123",
+ "type" => "Note"
+ })
+ end
+
+ test "errors for spoofed attributedTo" do
+ assert :error ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
+ "attributedTo" => "https://otp.akkoma.dev/users/you",
+ "id" => "https://glaceon.social/statuses/123",
+ "type" => "Note"
+ })
+ end
+
+ test "accepts valid actors" do
+ assert :ok ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
+ "actor" => "https://glaceon.social/users/monorail",
+ "attributedTo" => "https://glaceon.social/users/monorail",
+ "id" => "https://glaceon.social/statuses/123",
+ "type" => "Note"
+ })
+
+ assert :ok ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
+ "actor" => "https://glaceon.social/users/monorail",
+ "id" => "https://glaceon.social/statuses/123",
+ "type" => "Note"
+ })
+
+ assert :ok ==
+ Containment.contain_origin("https://glaceon.social/statuses/123", %{
+ "attributedTo" => "https://glaceon.social/users/monorail",
+ "id" => "https://glaceon.social/statuses/123",
+ "type" => "Note"
+ })
+ end
+
test "contain_origin_from_id() catches obvious spoofing attempts" do
data = %{
"id" => "http://example.com/~alyssa/activities/1234.json"
@@ -63,6 +105,56 @@ test "contain_origin_from_id() allows matching IDs" do
)
end
+ test "contain_id_to_fetch() refuses alternate IDs within the same origin domain" do
+ data = %{
+ "id" => "http://example.com/~alyssa/activities/1234.json",
+ "url" => "http://example.com/@alyssa/status/1234"
+ }
+
+ :error =
+ Containment.contain_id_to_fetch(
+ "http://example.com/~alyssa/activities/1234",
+ data
+ )
+ end
+
+ test "contain_id_to_fetch() allows matching IDs" do
+ data = %{
+ "id" => "http://example.com/~alyssa/activities/1234.json/"
+ }
+
+ :ok =
+ Containment.contain_id_to_fetch(
+ "http://example.com/~alyssa/activities/1234.json/",
+ data
+ )
+
+ :ok =
+ Containment.contain_id_to_fetch(
+ "http://example.com/~alyssa/activities/1234.json",
+ data
+ )
+ end
+
+ test "contain_id_to_fetch() allows display URLs" do
+ data = %{
+ "id" => "http://example.com/~alyssa/activities/1234.json",
+ "url" => "http://example.com/@alyssa/status/1234"
+ }
+
+ :ok =
+ Containment.contain_id_to_fetch(
+ "http://example.com/@alyssa/status/1234",
+ data
+ )
+
+ :ok =
+ Containment.contain_id_to_fetch(
+ "http://example.com/@alyssa/status/1234/",
+ data
+ )
+ end
+
test "users cannot be collided through fake direction spoofing attempts" do
_user =
insert(:user, %{
diff --git a/test/pleroma/object/fetcher_test.exs b/test/pleroma/object/fetcher_test.exs
index 8cf0bce48..4c4831af3 100644
--- a/test/pleroma/object/fetcher_test.exs
+++ b/test/pleroma/object/fetcher_test.exs
@@ -14,6 +14,17 @@ defmodule Pleroma.Object.FetcherTest do
import Mock
import Tesla.Mock
+ defp spoofed_object_with_ids(
+ id \\ "https://patch.cx/objects/spoof",
+ actor_id \\ "https://patch.cx/users/rin"
+ ) do
+ File.read!("test/fixtures/spoofed-object.json")
+ |> Jason.decode!()
+ |> Map.put("id", id)
+ |> Map.put("actor", actor_id)
+ |> Jason.encode!()
+ end
+
setup do
mock(fn
%{method: :get, url: "https://mastodon.example.org/users/userisgone"} ->
@@ -22,6 +33,32 @@ defmodule Pleroma.Object.FetcherTest do
%{method: :get, url: "https://mastodon.example.org/users/userisgone404"} ->
%Tesla.Env{status: 404}
+ # Spoof: wrong Content-Type
+ %{
+ method: :get,
+ url: "https://patch.cx/objects/spoof_content_type.json"
+ } ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/objects/spoof_content_type.json",
+ headers: [{"content-type", "application/json"}],
+ body: spoofed_object_with_ids("https://patch.cx/objects/spoof_content_type.json")
+ }
+
+ # Spoof: no Content-Type
+ %{
+ method: :get,
+ url: "https://patch.cx/objects/spoof_content_type"
+ } ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/objects/spoof_content_type",
+ headers: [],
+ body: spoofed_object_with_ids("https://patch.cx/objects/spoof_content_type")
+ }
+
+ # Spoof: mismatching ids
+ # Variant 1: Non-exisitng fake id
%{
method: :get,
url:
@@ -29,8 +66,75 @@ defmodule Pleroma.Object.FetcherTest do
} ->
%Tesla.Env{
status: 200,
- headers: [{"content-type", "application/json"}],
- body: File.read!("test/fixtures/spoofed-object.json")
+ url:
+ "https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids()
+ }
+
+ %{method: :get, url: "https://patch.cx/objects/spoof"} ->
+ %Tesla.Env{
+ status: 404,
+ url: "https://patch.cx/objects/spoof",
+ headers: [],
+ body: "Not found"
+ }
+
+ # Varaint 2: two-stage payload
+ %{method: :get, url: "https://patch.cx/media/spoof_stage1.json"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/media/spoof_stage1.json",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids("https://patch.cx/media/spoof_stage2.json")
+ }
+
+ %{method: :get, url: "https://patch.cx/media/spoof_stage2.json"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/media/spoof_stage2.json",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids("https://patch.cx/media/unpredictable.json")
+ }
+
+ # Spoof: cross-domain redirect with original domain id
+ %{method: :get, url: "https://patch.cx/objects/spoof_media_redirect1"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://media.patch.cx/objects/spoof",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids("https://patch.cx/objects/spoof_media_redirect1")
+ }
+
+ # Spoof: cross-domain redirect with final domain id
+ %{method: :get, url: "https://patch.cx/objects/spoof_media_redirect2"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://media.patch.cx/objects/spoof_media_redirect2",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids("https://media.patch.cx/objects/spoof_media_redirect2")
+ }
+
+ # No-Spoof: same domain redirect
+ %{method: :get, url: "https://patch.cx/objects/spoof_redirect"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/objects/spoof_redirect",
+ headers: [{"content-type", "application/activity+json"}],
+ body: spoofed_object_with_ids("https://patch.cx/objects/spoof_redirect")
+ }
+
+ # Spoof: Actor from another domain
+ %{method: :get, url: "https://patch.cx/objects/spoof_foreign_actor"} ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://patch.cx/objects/spoof_foreign_actor",
+ headers: [{"content-type", "application/activity+json"}],
+ body:
+ spoofed_object_with_ids(
+ "https://patch.cx/objects/spoof_foreign_actor",
+ "https://not.patch.cx/users/rin"
+ )
}
env ->
@@ -46,6 +150,7 @@ defmodule Pleroma.Object.FetcherTest do
%{method: :get, url: "https://social.sakamoto.gq/notice/9wTkLEnuq47B25EehM"} ->
%Tesla.Env{
status: 200,
+ url: "https://social.sakamoto.gq/objects/f20f2497-66d9-4a52-a2e1-1be2a39c32c1",
body: File.read!("test/fixtures/fetch_mocks/9wTkLEnuq47B25EehM.json"),
headers: HttpRequestMock.activitypub_object_headers()
}
@@ -129,6 +234,71 @@ test "it rejects objects when attributedTo is wrong (variant 2)" do
end
end
+ describe "fetcher security and auth checks" do
+ test "it does not fetch a spoofed object without content type" do
+ assert {:error, {:content_type, nil}} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_content_type"
+ )
+ end
+
+ test "it does not fetch a spoofed object with wrong content type" do
+ assert {:error, {:content_type, _}} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_content_type.json"
+ )
+ end
+
+ test "it does not fetch a spoofed object with id different from URL" do
+ assert {:error, "Object's ActivityPub id/url does not match final fetch URL"} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json"
+ )
+
+ assert {:error, "Object's ActivityPub id/url does not match final fetch URL"} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/media/spoof_stage1.json"
+ )
+ end
+
+ test "it does not fetch an object via cross-domain redirects (initial id)" do
+ assert {:error, {:cross_domain_redirect, true}} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_media_redirect1"
+ )
+ end
+
+ test "it does not fetch an object via cross-domain redirects (final id)" do
+ assert {:error, {:cross_domain_redirect, true}} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_media_redirect2"
+ )
+ end
+
+ test "it accepts same-domain redirects" do
+ assert {:ok, %{"id" => id} = _object} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_redirect"
+ )
+
+ assert id == "https://patch.cx/objects/spoof_redirect"
+ end
+
+ test "it does not fetch a spoofed object with a foreign actor" do
+ assert {:error, "Object containment failed."} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ "https://patch.cx/objects/spoof_foreign_actor"
+ )
+ end
+
+ test "it does not fetch from localhost" do
+ assert {:error, "Trying to fetch local resource"} =
+ Fetcher.fetch_and_contain_remote_object_from_id(
+ Pleroma.Web.Endpoint.url() <> "/spoof_local"
+ )
+ end
+ end
+
describe "fetching an object" do
test "it fetches an object" do
{:ok, object} =
@@ -155,13 +325,6 @@ test "Return MRF reason when fetched status is rejected by one" do
)
end
- test "it does not fetch a spoofed object uploaded on an instance as an attachment" do
- assert {:error, _} =
- Fetcher.fetch_object_from_id(
- "https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json"
- )
- end
-
test "does not fetch anything from a rejected instance" do
clear_config([:mrf_simple, :reject], [{"evil.example.org", "i said so"}])
@@ -583,12 +746,13 @@ test "should return ok if the content type is application/activity+json" do
} ->
%Tesla.Env{
status: 200,
+ url: "https://mastodon.social/2",
headers: [{"content-type", "application/activity+json"}],
body: "{}"
}
end)
- assert {:ok, "{}"} = Fetcher.get_object("https://mastodon.social/2")
+ assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
end
test "should return ok if the content type is application/ld+json with a profile" do
@@ -599,6 +763,7 @@ test "should return ok if the content type is application/ld+json with a profile
} ->
%Tesla.Env{
status: 200,
+ url: "https://mastodon.social/2",
headers: [
{"content-type",
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
@@ -607,24 +772,7 @@ test "should return ok if the content type is application/ld+json with a profile
}
end)
- assert {:ok, "{}"} = Fetcher.get_object("https://mastodon.social/2")
-
- Tesla.Mock.mock(fn
- %{
- method: :get,
- url: "https://mastodon.social/2"
- } ->
- %Tesla.Env{
- status: 200,
- headers: [
- {"content-type",
- "application/ld+json; profile=\"http://www.w3.org/ns/activitystreams\""}
- ],
- body: "{}"
- }
- end)
-
- assert {:ok, "{}"} = Fetcher.get_object("https://mastodon.social/2")
+ assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
end
test "should not return ok with other content types" do
@@ -635,6 +783,7 @@ test "should not return ok with other content types" do
} ->
%Tesla.Env{
status: 200,
+ url: "https://mastodon.social/2",
headers: [{"content-type", "application/json"}],
body: "{}"
}
@@ -643,5 +792,23 @@ test "should not return ok with other content types" do
assert {:error, {:content_type, "application/json"}} =
Fetcher.get_object("https://mastodon.social/2")
end
+
+ test "returns the url after redirects" do
+ Tesla.Mock.mock(fn
+ %{
+ method: :get,
+ url: "https://mastodon.social/5"
+ } ->
+ %Tesla.Env{
+ status: 200,
+ url: "https://mastodon.social/7",
+ headers: [{"content-type", "application/activity+json"}],
+ body: "{}"
+ }
+ end)
+
+ assert {:ok, "https://mastodon.social/7", "{}"} =
+ Fetcher.get_object("https://mastodon.social/5")
+ end
end
end
diff --git a/test/pleroma/object_test.exs b/test/pleroma/object_test.exs
index 8320660a5..4b0fec1bd 100644
--- a/test/pleroma/object_test.exs
+++ b/test/pleroma/object_test.exs
@@ -22,6 +22,13 @@ defmodule Pleroma.ObjectTest do
:ok
end
+ # Only works for a single attachment but that's all we need here
+ defp get_attachment_filepath(note, uploads_dir) do
+ %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} = note
+ filename = href |> Path.basename()
+ "#{uploads_dir}/#{filename}"
+ end
+
test "returns an object by it's AP id" do
object = insert(:note)
found_object = Object.get_by_ap_id(object.data["id"])
@@ -95,14 +102,13 @@ test "Disabled via config" do
{:ok, %Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
- %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
- note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
+ note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
- path = href |> Path.dirname() |> Path.basename()
+ path = get_attachment_filepath(note, uploads_dir)
- assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
+ assert File.exists?("#{path}")
Object.delete(note)
@@ -111,7 +117,7 @@ test "Disabled via config" do
assert Object.get_by_id(note.id).data["deleted"]
refute Object.get_by_id(attachment.id) == nil
- assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
+ assert File.exists?("#{path}")
end
test "in subdirectories" do
@@ -129,14 +135,13 @@ test "in subdirectories" do
{:ok, %Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
- %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
- note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
+ note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
- path = href |> Path.dirname() |> Path.basename()
+ path = get_attachment_filepath(note, uploads_dir)
- assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
+ assert File.exists?("#{path}")
Object.delete(note)
@@ -145,7 +150,7 @@ test "in subdirectories" do
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
- assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
+ refute File.exists?("#{path}")
end
test "with dedupe enabled" do
@@ -168,13 +173,11 @@ test "with dedupe enabled" do
{:ok, %Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
- %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
- note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
+ note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
- filename = Path.basename(href)
+ path = get_attachment_filepath(note, uploads_dir)
- assert {:ok, files} = File.ls(uploads_dir)
- assert filename in files
+ assert File.exists?("#{path}")
Object.delete(note)
@@ -182,8 +185,8 @@ test "with dedupe enabled" do
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
- assert {:ok, files} = File.ls(uploads_dir)
- refute filename in files
+ # what if another test runs concurrently using the same image file?
+ refute File.exists?("#{path}")
end
test "with objects that have legacy data.url attribute" do
@@ -203,14 +206,13 @@ test "with objects that have legacy data.url attribute" do
{:ok, %Object{}} = Object.create(%{url: "https://google.com", actor: user.ap_id})
- %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
- note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
+ note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
- path = href |> Path.dirname() |> Path.basename()
+ path = get_attachment_filepath(note, uploads_dir)
- assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
+ assert File.exists?("#{path}")
Object.delete(note)
@@ -219,7 +221,7 @@ test "with objects that have legacy data.url attribute" do
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
- assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
+ refute File.exists?("#{path}")
end
test "With custom base_url" do
@@ -238,14 +240,13 @@ test "With custom base_url" do
{:ok, %Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
- %{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
- note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
+ note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
- path = href |> Path.dirname() |> Path.basename()
+ path = get_attachment_filepath(note, uploads_dir)
- assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
+ assert File.exists?("#{path}")
Object.delete(note)
@@ -254,7 +255,7 @@ test "With custom base_url" do
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
- assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
+ refute File.exists?("#{path}")
end
end
diff --git a/test/pleroma/reverse_proxy_test.exs b/test/pleroma/reverse_proxy_test.exs
index e3e2a1571..fc6ae42bc 100644
--- a/test/pleroma/reverse_proxy_test.exs
+++ b/test/pleroma/reverse_proxy_test.exs
@@ -75,13 +75,16 @@ test "common", %{conn: conn} do
Tesla.Mock.mock(fn %{method: :head, url: "/head"} ->
%Tesla.Env{
status: 200,
- headers: [{"content-type", "text/html; charset=utf-8"}],
+ headers: [{"content-type", "image/png"}],
body: ""
}
end)
conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
- assert html_response(conn, 200) == ""
+
+ assert conn.status == 200
+ assert Conn.get_resp_header(conn, "content-type") == ["image/png"]
+ assert conn.resp_body == ""
end
end
@@ -252,4 +255,38 @@ test "with content-disposition header", %{conn: conn} do
assert {"content-disposition", "attachment; filename=\"filename.jpg\""} in conn.resp_headers
end
end
+
+ describe "content-type sanitisation" do
+ test "preserves video type", %{conn: conn} do
+ Tesla.Mock.mock(fn %{method: :get, url: "/content"} ->
+ %Tesla.Env{
+ status: 200,
+ headers: [{"content-type", "video/mp4"}],
+ body: "test"
+ }
+ end)
+
+ conn = ReverseProxy.call(Map.put(conn, :method, "GET"), "/content")
+
+ assert conn.status == 200
+ assert Conn.get_resp_header(conn, "content-type") == ["video/mp4"]
+ assert conn.resp_body == "test"
+ end
+
+ test "replaces application type", %{conn: conn} do
+ Tesla.Mock.mock(fn %{method: :get, url: "/content"} ->
+ %Tesla.Env{
+ status: 200,
+ headers: [{"content-type", "application/activity+json"}],
+ body: "test"
+ }
+ end)
+
+ conn = ReverseProxy.call(Map.put(conn, :method, "GET"), "/content")
+
+ assert conn.status == 200
+ assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
+ assert conn.resp_body == "test"
+ end
+ end
end
diff --git a/test/pleroma/upload_test.exs b/test/pleroma/upload_test.exs
index ad6065b43..27a2d1b97 100644
--- a/test/pleroma/upload_test.exs
+++ b/test/pleroma/upload_test.exs
@@ -188,7 +188,7 @@ test "copies the file to the configured folder with anonymizing filename" do
refute data["name"] == "an [image.jpg"
end
- test "escapes invalid characters in url" do
+ test "mangles the filename" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
@@ -200,23 +200,8 @@ test "escapes invalid characters in url" do
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
- assert Path.basename(attachment_url["href"]) == "an%E2%80%A6%20image.jpg"
- end
-
- test "escapes reserved uri characters" do
- File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
-
- file = %Plug.Upload{
- content_type: "image/jpeg",
- path: Path.absname("test/fixtures/image_tmp.jpg"),
- filename: ":?#[]@!$&\\'()*+,;=.jpg"
- }
-
- {:ok, data} = Upload.store(file)
- [attachment_url | _] = data["url"]
-
- assert Path.basename(attachment_url["href"]) ==
- "%3A%3F%23%5B%5D%40%21%24%26%5C%27%28%29%2A%2B%2C%3B%3D.jpg"
+ refute Path.basename(attachment_url["href"]) == "an%E2%80%A6%20image.jpg"
+ refute Path.basename(attachment_url["href"]) == "an… image.jpg"
end
end
diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs
index 5a0d77cab..96ca8d0fd 100644
--- a/test/pleroma/user_test.exs
+++ b/test/pleroma/user_test.exs
@@ -326,9 +326,9 @@ test "unfollow with synchronizes external user" do
insert(:user, %{
local: false,
nickname: "fuser2",
- ap_id: "http://localhost:4001/users/fuser2",
- follower_address: "http://localhost:4001/users/fuser2/followers",
- following_address: "http://localhost:4001/users/fuser2/following"
+ ap_id: "http://remote.org/users/fuser2",
+ follower_address: "http://remote.org/users/fuser2/followers",
+ following_address: "http://remote.org/users/fuser2/following"
})
{:ok, user, followed} = User.follow(user, followed, :follow_accept)
@@ -2177,8 +2177,8 @@ test "it returns a list of AP ids for a given set of nicknames" do
describe "sync followers count" do
setup do
- user1 = insert(:user, local: false, ap_id: "http://localhost:4001/users/masto_closed")
- user2 = insert(:user, local: false, ap_id: "http://localhost:4001/users/fuser2")
+ user1 = insert(:user, local: false, ap_id: "http://remote.org/users/masto_closed")
+ user2 = insert(:user, local: false, ap_id: "http://remote.org/users/fuser2")
insert(:user, local: true)
insert(:user, local: false, is_active: false)
{:ok, user1: user1, user2: user2}
@@ -2272,8 +2272,8 @@ test "updates the counters normally on following/getting a follow when disabled"
other_user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_closed/followers",
- following_address: "http://localhost:4001/users/masto_closed/following",
+ follower_address: "http://remote.org/users/masto_closed/followers",
+ following_address: "http://remote.org/users/masto_closed/following",
ap_enabled: true
)
@@ -2294,8 +2294,8 @@ test "synchronizes the counters with the remote instance for the followed when e
other_user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_closed/followers",
- following_address: "http://localhost:4001/users/masto_closed/following",
+ follower_address: "http://remote.org/users/masto_closed/followers",
+ following_address: "http://remote.org/users/masto_closed/following",
ap_enabled: true
)
@@ -2316,8 +2316,8 @@ test "synchronizes the counters with the remote instance for the follower when e
other_user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_closed/followers",
- following_address: "http://localhost:4001/users/masto_closed/following",
+ follower_address: "http://remote.org/users/masto_closed/followers",
+ following_address: "http://remote.org/users/masto_closed/following",
ap_enabled: true
)
diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs
index 5d5388cf5..5ad6d4716 100644
--- a/test/pleroma/web/activity_pub/activity_pub_test.exs
+++ b/test/pleroma/web/activity_pub/activity_pub_test.exs
@@ -312,7 +312,7 @@ test "fetches user featured collection" do
end
test "fetches user featured collection using the first property" do
- featured_url = "https://friendica.example.com/raha/collections/featured"
+ featured_url = "https://friendica.example.com/featured/raha"
first_url = "https://friendica.example.com/featured/raha?page=1"
featured_data =
@@ -350,7 +350,7 @@ test "fetches user featured collection using the first property" do
end
test "fetches user featured when it has string IDs" do
- featured_url = "https://example.com/alisaie/collections/featured"
+ featured_url = "https://example.com/users/alisaie/collections/featured"
dead_url = "https://example.com/users/alisaie/statuses/108311386746229284"
featured_data =
@@ -1304,14 +1304,6 @@ test "returns reblogs for users for whom reblogs have not been muted" do
%{test_file: test_file}
end
- test "strips / from filename", %{test_file: file} do
- file = %Plug.Upload{file | filename: "../../../../../nested/bad.jpg"}
- {:ok, %Object{} = object} = ActivityPub.upload(file)
- [%{"href" => href}] = object.data["url"]
- assert Regex.match?(~r"/bad.jpg$", href)
- refute Regex.match?(~r"/nested/", href)
- end
-
test "sets a description if given", %{test_file: file} do
{:ok, %Object{} = object} = ActivityPub.upload(file, description: "a cool file")
assert object.data["name"] == "a cool file"
@@ -1651,8 +1643,8 @@ test "synchronizes following/followers counters" do
user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/fuser2/followers",
- following_address: "http://localhost:4001/users/fuser2/following"
+ follower_address: "http://remote.org/users/fuser2/followers",
+ following_address: "http://remote.org/users/fuser2/following"
)
{:ok, info} = ActivityPub.fetch_follow_information_for_user(user)
@@ -1663,7 +1655,7 @@ test "synchronizes following/followers counters" do
test "detects hidden followers" do
mock(fn env ->
case env.url do
- "http://localhost:4001/users/masto_closed/followers?page=1" ->
+ "http://remote.org/users/masto_closed/followers?page=1" ->
%Tesla.Env{status: 403, body: ""}
_ ->
@@ -1674,8 +1666,8 @@ test "detects hidden followers" do
user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_closed/followers",
- following_address: "http://localhost:4001/users/masto_closed/following"
+ follower_address: "http://remote.org/users/masto_closed/followers",
+ following_address: "http://remote.org/users/masto_closed/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@@ -1686,7 +1678,7 @@ test "detects hidden followers" do
test "detects hidden follows" do
mock(fn env ->
case env.url do
- "http://localhost:4001/users/masto_closed/following?page=1" ->
+ "http://remote.org/users/masto_closed/following?page=1" ->
%Tesla.Env{status: 403, body: ""}
_ ->
@@ -1697,8 +1689,8 @@ test "detects hidden follows" do
user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_closed/followers",
- following_address: "http://localhost:4001/users/masto_closed/following"
+ follower_address: "http://remote.org/users/masto_closed/followers",
+ following_address: "http://remote.org/users/masto_closed/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@@ -1710,8 +1702,8 @@ test "detects hidden follows/followers for friendica" do
user =
insert(:user,
local: false,
- follower_address: "http://localhost:8080/followers/fuser3",
- following_address: "http://localhost:8080/following/fuser3"
+ follower_address: "http://remote.org/followers/fuser3",
+ following_address: "http://remote.org/following/fuser3"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@@ -1724,28 +1716,28 @@ test "detects hidden follows/followers for friendica" do
test "doesn't crash when follower and following counters are hidden" do
mock(fn env ->
case env.url do
- "http://localhost:4001/users/masto_hidden_counters/following" ->
+ "http://remote.org/users/masto_hidden_counters/following" ->
json(
%{
"@context" => "https://www.w3.org/ns/activitystreams",
- "id" => "http://localhost:4001/users/masto_hidden_counters/followers"
+ "id" => "http://remote.org/users/masto_hidden_counters/following"
},
headers: HttpRequestMock.activitypub_object_headers()
)
- "http://localhost:4001/users/masto_hidden_counters/following?page=1" ->
+ "http://remote.org/users/masto_hidden_counters/following?page=1" ->
%Tesla.Env{status: 403, body: ""}
- "http://localhost:4001/users/masto_hidden_counters/followers" ->
+ "http://remote.org/users/masto_hidden_counters/followers" ->
json(
%{
"@context" => "https://www.w3.org/ns/activitystreams",
- "id" => "http://localhost:4001/users/masto_hidden_counters/following"
+ "id" => "http://remote.org/users/masto_hidden_counters/followers"
},
headers: HttpRequestMock.activitypub_object_headers()
)
- "http://localhost:4001/users/masto_hidden_counters/followers?page=1" ->
+ "http://remote.org/users/masto_hidden_counters/followers?page=1" ->
%Tesla.Env{status: 403, body: ""}
end
end)
@@ -1753,8 +1745,8 @@ test "doesn't crash when follower and following counters are hidden" do
user =
insert(:user,
local: false,
- follower_address: "http://localhost:4001/users/masto_hidden_counters/followers",
- following_address: "http://localhost:4001/users/masto_hidden_counters/following"
+ follower_address: "http://remote.org/users/masto_hidden_counters/followers",
+ following_address: "http://remote.org/users/masto_hidden_counters/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
diff --git a/test/pleroma/web/activity_pub/mrf/inline_quote_policy_test.exs b/test/pleroma/web/activity_pub/mrf/inline_quote_policy_test.exs
index 4e0910d3e..7671ad5a1 100644
--- a/test/pleroma/web/activity_pub/mrf/inline_quote_policy_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/inline_quote_policy_test.exs
@@ -4,10 +4,16 @@
defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicyTest do
alias Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy
+ alias Pleroma.Object
use Pleroma.DataCase
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "adds quote URL to post content" do
- quote_url = "https://example.com/objects/1234"
+ quote_url = "https://mastodon.social/users/emelie/statuses/101849165031453009"
activity = %{
"type" => "Create",
@@ -19,10 +25,13 @@ test "adds quote URL to post content" do
}
}
+ # Prefetch the quoted post
+ %Object{} = Object.normalize(quote_url, fetch: true)
+
{:ok, %{"object" => %{"content" => filtered}}} = InlineQuotePolicy.filter(activity)
assert filtered ==
- "Nice post
RE: https://example.com/objects/1234
"
+ "Nice post
RE: https://mastodon.social/@emelie/101849165031453009
"
end
test "ignores Misskey quote posts" do
diff --git a/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs b/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs
index 59baa3a43..932251389 100644
--- a/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs
@@ -3,15 +3,66 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
- use Pleroma.DataCase
+ use Pleroma.DataCase, async: false
alias Pleroma.Config
alias Pleroma.Emoji
+ alias Pleroma.Emoji.Pack
alias Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy
+ defp has_pack?() do
+ case Pack.load_pack("stolen") do
+ {:ok, _pack} -> true
+ {:error, :enoent} -> false
+ end
+ end
+
+ defp has_emoji?(shortcode) do
+ case Pack.load_pack("stolen") do
+ {:ok, pack} -> Map.has_key?(pack.files, shortcode)
+ {:error, :enoent} -> false
+ end
+ end
+
+ defmacro mock_tesla(
+ url \\ "https://example.org/emoji/firedfox.png",
+ status \\ 200,
+ headers \\ [],
+ get_body \\ File.read!("test/fixtures/image.jpg")
+ ) do
+ quote do
+ Tesla.Mock.mock(fn
+ %{method: :head, url: unquote(url)} ->
+ %Tesla.Env{
+ status: unquote(status),
+ body: nil,
+ url: unquote(url),
+ headers: unquote(headers)
+ }
+
+ %{method: :get, url: unquote(url)} ->
+ %Tesla.Env{
+ status: unquote(status),
+ body: unquote(get_body),
+ url: unquote(url),
+ headers: unquote(headers)
+ }
+ end)
+ end
+ end
+
setup do
+ clear_config(:mrf_steal_emoji,
+ hosts: ["example.org"],
+ size_limit: 284_468,
+ download_unknown_size: true
+ )
+
emoji_path = [:instance, :static_dir] |> Config.get() |> Path.join("emoji/stolen")
+ emoji_base_path = [:instance, :static_dir] |> Config.get() |> Path.join("emoji/")
+ File.mkdir_p(emoji_base_path)
+
Emoji.reload()
message = %{
@@ -26,41 +77,35 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
File.rm_rf!(emoji_path)
end)
- [message: message, path: emoji_path]
+ [message: message]
end
test "does nothing by default", %{message: message} do
refute "firedfox" in installed()
+ clear_config(:mrf_steal_emoji, [])
assert {:ok, _message} = StealEmojiPolicy.filter(message)
refute "firedfox" in installed()
end
test "Steals emoji on unknown shortcode from allowed remote host", %{
- message: message,
- path: path
+ message: message
} do
refute "firedfox" in installed()
- refute File.exists?(path)
+ refute has_pack?()
- Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox.png"} ->
- %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
- end)
-
- clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
+ mock_tesla()
assert {:ok, _message} = StealEmojiPolicy.filter(message)
assert "firedfox" in installed()
- assert File.exists?(path)
+ assert has_pack?()
- assert path
- |> Path.join("firedfox.png")
- |> File.exists?()
+ assert has_emoji?("firedfox")
end
- test "rejects invalid shortcodes", %{path: path} do
+ test "rejects invalid shortcodes" do
message = %{
"type" => "Create",
"object" => %{
@@ -69,31 +114,38 @@ test "rejects invalid shortcodes", %{path: path} do
}
}
- fullpath = Path.join(path, "fired/fox.png")
-
- Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} ->
- %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
- end)
-
- clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
+ mock_tesla()
refute "firedfox" in installed()
- refute File.exists?(path)
+ refute has_pack?()
assert {:ok, _message} = StealEmojiPolicy.filter(message)
refute "fired/fox" in installed()
- refute File.exists?(fullpath)
+ refute has_emoji?("fired/fox")
+ end
+
+ test "prefers content-type header for extension" do
+ message = %{
+ "type" => "Create",
+ "object" => %{
+ "emoji" => [{"firedfox", "https://example.org/emoji/firedfox.fud"}],
+ "actor" => "https://example.org/users/admin"
+ }
+ }
+
+ mock_tesla("https://example.org/emoji/firedfox.fud", 200, [{"content-type", "image/gif"}])
+
+ assert {:ok, _message} = StealEmojiPolicy.filter(message)
+
+ assert "firedfox" in installed()
+ assert has_emoji?("firedfox")
end
test "reject regex shortcode", %{message: message} do
refute "firedfox" in installed()
- clear_config(:mrf_steal_emoji,
- hosts: ["example.org"],
- size_limit: 284_468,
- rejected_shortcodes: [~r/firedfox/]
- )
+ clear_config([:mrf_steal_emoji, :rejected_shortcodes], [~r/firedfox/])
assert {:ok, _message} = StealEmojiPolicy.filter(message)
@@ -103,11 +155,7 @@ test "reject regex shortcode", %{message: message} do
test "reject string shortcode", %{message: message} do
refute "firedfox" in installed()
- clear_config(:mrf_steal_emoji,
- hosts: ["example.org"],
- size_limit: 284_468,
- rejected_shortcodes: ["firedfox"]
- )
+ clear_config([:mrf_steal_emoji, :rejected_shortcodes], ["firedfox"])
assert {:ok, _message} = StealEmojiPolicy.filter(message)
@@ -117,11 +165,9 @@ test "reject string shortcode", %{message: message} do
test "reject if size is above the limit", %{message: message} do
refute "firedfox" in installed()
- Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox.png"} ->
- %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
- end)
+ mock_tesla()
- clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 50_000)
+ clear_config([:mrf_steal_emoji, :size_limit], 50_000)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
@@ -131,11 +177,7 @@ test "reject if size is above the limit", %{message: message} do
test "reject if host returns error", %{message: message} do
refute "firedfox" in installed()
- Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox.png"} ->
- {:ok, %Tesla.Env{status: 404, body: "Not found"}}
- end)
-
- clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
+ mock_tesla("https://example.org/emoji/firedfox.png", 404, [], "Not found")
ExUnit.CaptureLog.capture_log(fn ->
assert {:ok, _message} = StealEmojiPolicy.filter(message)
@@ -144,5 +186,44 @@ test "reject if host returns error", %{message: message} do
refute "firedfox" in installed()
end
+ test "reject unknown size", %{message: message} do
+ clear_config([:mrf_steal_emoji, :download_unknown_size], false)
+ mock_tesla()
+
+ refute "firedfox" in installed()
+
+ ExUnit.CaptureLog.capture_log(fn ->
+ assert {:ok, _message} = StealEmojiPolicy.filter(message)
+ end) =~
+ "MRF.StealEmojiPolicy: Failed to fetch https://example.org/emoji/firedfox.png: {:remote_size, false}"
+
+ refute "firedfox" in installed()
+ end
+
+ test "reject too large content-size before download", %{message: message} do
+ clear_config([:mrf_steal_emoji, :download_unknown_size], false)
+ mock_tesla("https://example.org/emoji/firedfox.png", 200, [{"content-length", 2 ** 30}])
+
+ refute "firedfox" in installed()
+
+ ExUnit.CaptureLog.capture_log(fn ->
+ assert {:ok, _message} = StealEmojiPolicy.filter(message)
+ end) =~
+ "MRF.StealEmojiPolicy: Failed to fetch https://example.org/emoji/firedfox.png: {:remote_size, false}"
+
+ refute "firedfox" in installed()
+ end
+
+ test "accepts content-size below limit", %{message: message} do
+ clear_config([:mrf_steal_emoji, :download_unknown_size], false)
+ mock_tesla("https://example.org/emoji/firedfox.png", 200, [{"content-length", 2}])
+
+ refute "firedfox" in installed()
+
+ assert {:ok, _message} = StealEmojiPolicy.filter(message)
+
+ assert "firedfox" in installed()
+ end
+
defp installed, do: Emoji.get_all() |> Enum.map(fn {k, _} -> k end)
end
diff --git a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
index bcf096a5b..4e96f3200 100644
--- a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
+++ b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
@@ -39,6 +39,28 @@ test "a basic note validates", %{note: note} do
%{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
end
+ test "note with url validates", %{note: note} do
+ note = Map.put(note, "url", "https://remote.example/link")
+ %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
+ end
+
+ test "note with url array validates", %{note: note} do
+ note = Map.put(note, "url", ["https://remote.example/link"])
+ %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
+ end
+
+ test "note with url array validates if contains a link object", %{note: note} do
+ note =
+ Map.put(note, "url", [
+ %{
+ "type" => "Link",
+ "href" => "https://remote.example/link"
+ }
+ ])
+
+ %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note)
+ end
+
test "a note with a language validates" do
insert(:user, %{ap_id: "https://mastodon.social/users/akkoma_ap_integration_tester"})
note = File.read!("test/fixtures/mastodon/note_with_language.json") |> Jason.decode!()
diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
index 9150b8d41..f8dec09d3 100644
--- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
+++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
@@ -11,6 +11,23 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do
import Pleroma.Factory
describe "attachments" do
+ test "works with apng" do
+ attachment =
+ %{
+ "mediaType" => "image/apng",
+ "name" => "",
+ "type" => "Document",
+ "url" =>
+ "https://media.misskeyusercontent.com/io/2859c26e-cd43-4550-848b-b6243bc3fe28.apng"
+ }
+
+ assert {:ok, attachment} =
+ AttachmentValidator.cast_and_validate(attachment)
+ |> Ecto.Changeset.apply_action(:insert)
+
+ assert attachment.mediaType == "image/apng"
+ end
+
test "works with honkerific attachments" do
attachment = %{
"mediaType" => "",
diff --git a/test/pleroma/web/activity_pub/transmogrifier/emoji_react_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/emoji_react_handling_test.exs
index 977950434..b40e7f4da 100644
--- a/test/pleroma/web/activity_pub/transmogrifier/emoji_react_handling_test.exs
+++ b/test/pleroma/web/activity_pub/transmogrifier/emoji_react_handling_test.exs
@@ -37,7 +37,80 @@ test "it works for incoming emoji reactions" do
assert match?([["👌", _, nil]], object.data["reactions"])
end
- test "it works for incoming custom emoji reactions" do
+ test "it works for incoming custom emoji with nil id" do
+ user = insert(:user)
+ other_user = insert(:user, local: false)
+ {:ok, activity} = CommonAPI.post(user, %{status: "hello"})
+
+ shortcode = "blobcatgoogly"
+ emoji = emoji_object(shortcode)
+ data = react_with_custom(activity.data["object"], other_user.ap_id, emoji)
+
+ {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data)
+
+ assert data["actor"] == other_user.ap_id
+ assert data["type"] == "EmojiReact"
+ assert data["object"] == activity.data["object"]
+ assert data["content"] == ":" <> shortcode <> ":"
+ [%{}] = data["tag"]
+
+ object = Object.get_by_ap_id(data["object"])
+
+ assert object.data["reaction_count"] == 1
+ assert match?([[^shortcode, _, _]], object.data["reactions"])
+ end
+
+ test "it works for incoming custom emoji with image url as id" do
+ user = insert(:user)
+ other_user = insert(:user, local: false)
+ {:ok, activity} = CommonAPI.post(user, %{status: "hello"})
+
+ shortcode = "blobcatgoogly"
+ imgurl = "https://example.org/emoji/a.png"
+ emoji = emoji_object(shortcode, imgurl, imgurl)
+ data = react_with_custom(activity.data["object"], other_user.ap_id, emoji)
+
+ {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data)
+
+ assert data["actor"] == other_user.ap_id
+ assert data["type"] == "EmojiReact"
+ assert data["object"] == activity.data["object"]
+ assert data["content"] == ":" <> shortcode <> ":"
+ assert [%{}] = data["tag"]
+
+ object = Object.get_by_ap_id(data["object"])
+
+ assert object.data["reaction_count"] == 1
+ assert match?([[^shortcode, _, ^imgurl]], object.data["reactions"])
+ end
+
+ test "it works for incoming custom emoji without tag array" do
+ user = insert(:user)
+ other_user = insert(:user, local: false)
+ {:ok, activity} = CommonAPI.post(user, %{status: "hello"})
+
+ shortcode = "blobcatgoogly"
+ imgurl = "https://example.org/emoji/b.png"
+ emoji = emoji_object(shortcode, imgurl, imgurl)
+ data = react_with_custom(activity.data["object"], other_user.ap_id, emoji, false)
+
+ assert %{} = data["tag"]
+
+ {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data)
+
+ assert data["actor"] == other_user.ap_id
+ assert data["type"] == "EmojiReact"
+ assert data["object"] == activity.data["object"]
+ assert data["content"] == ":" <> shortcode <> ":"
+ assert [%{}] = data["tag"]
+
+ object = Object.get_by_ap_id(data["object"])
+
+ assert object.data["reaction_count"] == 1
+ assert match?([[^shortcode, _, _]], object.data["reactions"])
+ end
+
+ test "it works for incoming custom emoji reactions from Misskey" do
user = insert(:user)
other_user = insert(:user, local: false)
{:ok, activity} = CommonAPI.post(user, %{status: "hello"})
@@ -138,4 +211,27 @@ test "it reject invalid emoji reactions" do
assert {:error, _} = Transmogrifier.handle_incoming(data)
end
+
+ defp emoji_object(shortcode, id \\ nil, url \\ "https://example.org/emoji.png") do
+ %{
+ "type" => "Emoji",
+ "id" => id,
+ "name" => shortcode |> String.replace_prefix(":", "") |> String.replace_suffix(":", ""),
+ "icon" => %{
+ "type" => "Image",
+ "url" => url
+ }
+ }
+ end
+
+ defp react_with_custom(object_id, as_actor, emoji, tag_array \\ true) do
+ tag = if tag_array, do: [emoji], else: emoji
+
+ File.read!("test/fixtures/emoji-reaction.json")
+ |> Jason.decode!()
+ |> Map.put("object", object_id)
+ |> Map.put("actor", as_actor)
+ |> Map.put("content", ":" <> emoji["name"] <> ":")
+ |> Map.put("tag", tag)
+ end
end
diff --git a/test/pleroma/web/metadata/providers/open_graph_test.exs b/test/pleroma/web/metadata/providers/open_graph_test.exs
index 64ff19561..20afb64e5 100644
--- a/test/pleroma/web/metadata/providers/open_graph_test.exs
+++ b/test/pleroma/web/metadata/providers/open_graph_test.exs
@@ -9,6 +9,8 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do
setup do: clear_config([Pleroma.Web.Metadata, :unfurl_nsfw])
setup do: clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
+ setup do: clear_config([:restrict_unauthenticated, :profiles, :local])
+ setup do: clear_config([:restrict_unauthenticated, :activities, :local])
test "it renders all supported types of attachments and skips unknown types" do
user = insert(:user)
@@ -188,4 +190,24 @@ test "video attachments have no image thumbnail with Preview Proxy disabled" do
"http://localhost:4001/proxy/preview/LzAnlke-l5oZbNzWsrHfprX1rGw/aHR0cHM6Ly9wbGVyb21hLmdvdi9hYm91dC9qdWNoZS53ZWJt/juche.webm"
], []} in result
end
+
+ test "it does not render users if profiles are marked as restricted" do
+ clear_config([:restrict_unauthenticated, :profiles, :local], true)
+
+ user = insert(:user)
+
+ result = OpenGraph.build_tags(%{user: user})
+ assert Enum.empty?(result)
+ end
+
+ test "it does not activities users if they are marked as restricted" do
+ clear_config([:restrict_unauthenticated, :activities, :local], true)
+
+ user = insert(:user)
+ note = insert(:note, data: %{"actor" => user.ap_id})
+
+ result = OpenGraph.build_tags(%{object: note, url: note.data["id"], user: user})
+
+ assert {:meta, [property: "og:description", content: "Content cannot be displayed."], []} in result
+ end
end
diff --git a/test/pleroma/web/metadata/providers/twitter_card_test.exs b/test/pleroma/web/metadata/providers/twitter_card_test.exs
index cd3f5eced..13fcd56ef 100644
--- a/test/pleroma/web/metadata/providers/twitter_card_test.exs
+++ b/test/pleroma/web/metadata/providers/twitter_card_test.exs
@@ -14,6 +14,8 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCardTest do
alias Pleroma.Web.Metadata.Utils
setup do: clear_config([Pleroma.Web.Metadata, :unfurl_nsfw])
+ setup do: clear_config([:restrict_unauthenticated, :profiles, :local])
+ setup do: clear_config([:restrict_unauthenticated, :activities, :local])
test "it renders twitter card for user info" do
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
@@ -28,6 +30,14 @@ test "it renders twitter card for user info" do
]
end
+ test "it does not render twitter card for user info if it is restricted" do
+ clear_config([:restrict_unauthenticated, :profiles, :local], true)
+ user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
+ res = TwitterCard.build_tags(%{user: user})
+
+ assert Enum.empty?(res)
+ end
+
test "it uses summary twittercard if post has no attachment" do
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
{:ok, activity} = CommonAPI.post(user, %{status: "HI"})
@@ -54,6 +64,16 @@ test "it uses summary twittercard if post has no attachment" do
] == result
end
+ test "it does not summarise activities if they are marked as restricted" do
+ clear_config([:restrict_unauthenticated, :activities, :local], true)
+ user = insert(:user)
+ note = insert(:note, data: %{"actor" => user.ap_id})
+
+ result = TwitterCard.build_tags(%{object: note, activity_id: note.data["id"], user: user})
+
+ assert {:meta, [name: "twitter:description", content: "Content cannot be displayed."], []} in result
+ end
+
test "it uses summary as description if post has one" do
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
{:ok, activity} = CommonAPI.post(user, %{status: "HI"})
diff --git a/test/pleroma/web/pleroma_api/controllers/mascot_controller_test.exs b/test/pleroma/web/pleroma_api/controllers/mascot_controller_test.exs
index 7f02bff8f..8829597eb 100644
--- a/test/pleroma/web/pleroma_api/controllers/mascot_controller_test.exs
+++ b/test/pleroma/web/pleroma_api/controllers/mascot_controller_test.exs
@@ -64,6 +64,10 @@ test "mascot retrieving" do
assert json_response_and_validate_schema(ret_conn, 200)
+ %{"url" => uploaded_url} = Jason.decode!(ret_conn.resp_body)
+
+ assert uploaded_url != nil and is_binary(uploaded_url)
+
user = User.get_cached_by_id(user.id)
conn =
@@ -72,6 +76,6 @@ test "mascot retrieving" do
|> get("/api/v1/pleroma/mascot")
assert %{"url" => url, "type" => "image"} = json_response_and_validate_schema(conn, 200)
- assert url =~ "an_image"
+ assert url == uploaded_url
end
end
diff --git a/test/pleroma/web/plugs/http_security_plug_test.exs b/test/pleroma/web/plugs/http_security_plug_test.exs
index 3c029b9b2..009fb829c 100644
--- a/test/pleroma/web/plugs/http_security_plug_test.exs
+++ b/test/pleroma/web/plugs/http_security_plug_test.exs
@@ -96,51 +96,68 @@ test "it sets the Service-Worker-Allowed header", %{conn: conn} do
test "media_proxy with base_url", %{conn: conn} do
url = "https://example.com"
clear_config([:media_proxy, :base_url], url)
- assert_media_img_src(conn, url)
+ assert_media_img_src(conn, proxy: url)
assert_connect_src(conn, url)
end
test "upload with base url", %{conn: conn} do
url = "https://example2.com"
clear_config([Pleroma.Upload, :base_url], url)
- assert_media_img_src(conn, url)
+ assert_media_img_src(conn, upload: url)
assert_connect_src(conn, url)
end
test "with S3 public endpoint", %{conn: conn} do
url = "https://example3.com"
clear_config([Pleroma.Uploaders.S3, :public_endpoint], url)
- assert_media_img_src(conn, url)
+ assert_media_img_src(conn, s3: url)
end
test "with captcha endpoint", %{conn: conn} do
clear_config([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com")
- assert_media_img_src(conn, "https://captcha.com")
+ assert_media_img_src(conn, captcha: "https://captcha.com")
end
test "with media_proxy whitelist", %{conn: conn} do
clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"])
- assert_media_img_src(conn, "https://example7.com https://example6.com")
+ assert_media_img_src(conn, proxy_whitelist: "https://example7.com https://example6.com")
end
# TODO: delete after removing support bare domains for media proxy whitelist
test "with media_proxy bare domains whitelist (deprecated)", %{conn: conn} do
clear_config([:media_proxy, :whitelist], ["example4.com", "example5.com"])
- assert_media_img_src(conn, "example5.com example4.com")
+ assert_media_img_src(conn, proxy_whitelist: "example5.com example4.com")
end
test "with media_proxy blocklist", %{conn: conn} do
clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"])
clear_config([:media_proxy, :blocklist], ["https://example8.com"])
- assert_media_img_src(conn, "https://example7.com https://example6.com")
+ assert_media_img_src(conn, proxy_whitelist: "https://example7.com https://example6.com")
end
end
- defp assert_media_img_src(conn, url) do
+ defp maybe_concat(nil, b), do: b
+ defp maybe_concat(a, nil), do: a
+ defp maybe_concat(a, b), do: a <> " " <> b
+
+ defp build_src_str(urls) do
+ urls[:proxy_whitelist]
+ |> maybe_concat(urls[:s3])
+ |> maybe_concat(urls[:upload])
+ |> maybe_concat(urls[:proxy])
+ |> maybe_concat(urls[:captcha])
+ end
+
+ defp assert_media_img_src(conn, urls) do
+ urlstr =
+ [upload: "http://localhost", proxy: "http://localhost"]
+ |> Keyword.merge(urls)
+ |> build_src_str()
+
conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy")
- assert csp =~ "media-src 'self' #{url};"
- assert csp =~ "img-src 'self' data: blob: #{url};"
+ assert csp =~ "media-src 'self' #{urlstr};"
+ assert csp =~ "img-src 'self' data: blob: #{urlstr};"
end
defp assert_connect_src(conn, url) do
diff --git a/test/pleroma/web/twitter_api/util_controller_test.exs b/test/pleroma/web/twitter_api/util_controller_test.exs
index 169e9981c..3192ad3af 100644
--- a/test/pleroma/web/twitter_api/util_controller_test.exs
+++ b/test/pleroma/web/twitter_api/util_controller_test.exs
@@ -24,7 +24,7 @@ defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
describe "PUT /api/pleroma/notification_settings" do
setup do: oauth_access(["write:accounts"])
- test "it updates notification settings", %{user: user, conn: conn} do
+ test "it updates notification settings via url paramters", %{user: user, conn: conn} do
conn
|> put(
"/api/pleroma/notification_settings?#{URI.encode_query(%{block_from_strangers: true})}"
@@ -39,6 +39,57 @@ test "it updates notification settings", %{user: user, conn: conn} do
} == user.notification_settings
end
+ test "it updates notification settings via JSON body params", %{user: user, conn: conn} do
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put(
+ "/api/pleroma/notification_settings",
+ %{"block_from_strangers" => true}
+ )
+ |> json_response_and_validate_schema(:ok)
+
+ user = refresh_record(user)
+
+ assert %Pleroma.User.NotificationSetting{
+ block_from_strangers: true,
+ hide_notification_contents: false
+ } == user.notification_settings
+ end
+
+ test "it updates notification settings via form data", %{user: user, conn: conn} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> put(
+ "/api/pleroma/notification_settings",
+ %{:block_from_strangers => true}
+ )
+ |> json_response_and_validate_schema(:ok)
+
+ user = refresh_record(user)
+
+ assert %Pleroma.User.NotificationSetting{
+ block_from_strangers: true,
+ hide_notification_contents: false
+ } == user.notification_settings
+ end
+
+ test "it updates notification settings via urlencoded body", %{user: user, conn: conn} do
+ conn
+ |> put_req_header("content-type", "application/x-www-form-urlencoded")
+ |> put(
+ "/api/pleroma/notification_settings",
+ "block_from_strangers=true"
+ )
+ |> json_response_and_validate_schema(:ok)
+
+ user = refresh_record(user)
+
+ assert %Pleroma.User.NotificationSetting{
+ block_from_strangers: true,
+ hide_notification_contents: false
+ } == user.notification_settings
+ end
+
test "it updates notification settings to enable hiding contents", %{user: user, conn: conn} do
conn
|> put(
@@ -53,6 +104,27 @@ test "it updates notification settings to enable hiding contents", %{user: user,
hide_notification_contents: true
} == user.notification_settings
end
+
+ # we already test all body variants for block_from_strangers, so just one should suffice here
+ test "it updates notification settings to enable hiding contents via JSON body params", %{
+ user: user,
+ conn: conn
+ } do
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put(
+ "/api/pleroma/notification_settings",
+ %{"hide_notification_contents" => true}
+ )
+ |> json_response_and_validate_schema(:ok)
+
+ user = refresh_record(user)
+
+ assert %Pleroma.User.NotificationSetting{
+ block_from_strangers: false,
+ hide_notification_contents: true
+ } == user.notification_settings
+ end
end
describe "GET /api/pleroma/frontend_configurations" do
diff --git a/test/pleroma/web/web_finger/web_finger_controller_test.exs b/test/pleroma/web/web_finger/web_finger_controller_test.exs
index fe8301fa4..f792f20e6 100644
--- a/test/pleroma/web/web_finger/web_finger_controller_test.exs
+++ b/test/pleroma/web/web_finger/web_finger_controller_test.exs
@@ -46,8 +46,7 @@ test "Webfinger JRD" do
assert response["subject"] == "acct:#{user.nickname}@localhost"
assert response["aliases"] == [
- "https://hyrule.world/users/zelda",
- "https://mushroom.kingdom/users/toad"
+ "https://hyrule.world/users/zelda"
]
end
@@ -104,7 +103,6 @@ test "Webfinger XML" do
|> response(200)
assert response =~ "https://hyrule.world/users/zelda"
- assert response =~ "https://mushroom.kingdom/users/toad"
end
test "it returns 404 when user isn't found (XML)" do
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
index 6772a7421..e487d2e6b 100644
--- a/test/support/http_request_mock.ex
+++ b/test/support/http_request_mock.ex
@@ -5,7 +5,16 @@
defmodule HttpRequestMock do
require Logger
- def activitypub_object_headers, do: [{"content-type", "application/activity+json"}]
+ def activitypub_object_headers,
+ do: [
+ {"content-type", "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
+ ]
+
+ # The Accept headers we genrate to be exact; AP spec only requires the first somewhere
+ @activitypub_accept_headers [
+ {"accept", "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""},
+ {"accept", "application/activity+json"}
+ ]
def request(
%Tesla.Env{
@@ -97,7 +106,7 @@ def get("https://mastodon.sdf.org/users/rinpatch/collections/featured", _, _, _)
File.read!("test/fixtures/users_mock/masto_featured.json")
|> String.replace("{{domain}}", "mastodon.sdf.org")
|> String.replace("{{nickname}}", "rinpatch"),
- headers: [{"content-type", "application/activity+json"}]
+ headers: activitypub_object_headers()
}}
end
@@ -208,7 +217,7 @@ def get(
"https://mst3k.interlinked.me/users/luciferMysticus",
_,
_,
- [{"accept", "application/activity+json"}]
+ @activitypub_accept_headers
) do
{:ok,
%Tesla.Env{
@@ -231,7 +240,7 @@ def get(
"https://hubzilla.example.org/channel/kaniini",
_,
_,
- [{"accept", "application/activity+json"}]
+ @activitypub_accept_headers
) do
{:ok,
%Tesla.Env{
@@ -241,7 +250,7 @@ def get(
}}
end
- def get("https://niu.moe/users/rye", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://niu.moe/users/rye", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -250,7 +259,7 @@ def get("https://niu.moe/users/rye", _, _, [{"accept", "application/activity+jso
}}
end
- def get("https://n1u.moe/users/rye", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://n1u.moe/users/rye", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -270,7 +279,7 @@ def get("http://mastodon.example.org/users/admin/statuses/100787282858396771", _
}}
end
- def get("https://puckipedia.com/", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://puckipedia.com/", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -342,9 +351,12 @@ def get("https://peertube.social/videos/watch/278d2b7c-0f38-4aaa-afe6-9ecc0c4a34
}}
end
- def get("https://mobilizon.org/events/252d5816-00a3-4a89-a66f-15bf65c33e39", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get(
+ "https://mobilizon.org/events/252d5816-00a3-4a89-a66f-15bf65c33e39",
+ _,
+ _,
+ @activitypub_accept_headers
+ ) do
{:ok,
%Tesla.Env{
status: 200,
@@ -353,7 +365,7 @@ def get("https://mobilizon.org/events/252d5816-00a3-4a89-a66f-15bf65c33e39", _,
}}
end
- def get("https://mobilizon.org/@tcit", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://mobilizon.org/@tcit", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -416,9 +428,7 @@ def get(
{:ok, %Tesla.Env{status: 404, body: ""}}
end
- def get("http://mastodon.example.org/users/relay", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get("http://mastodon.example.org/users/relay", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -427,9 +437,7 @@ def get("http://mastodon.example.org/users/relay", _, _, [
}}
end
- def get("http://mastodon.example.org/users/gargron", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get("http://mastodon.example.org/users/gargron", _, _, @activitypub_accept_headers) do
{:error, :nxdomain}
end
@@ -572,6 +580,7 @@ def get("https://social.stopwatchingus-heidelberg.de/.well-known/host-meta", _,
}}
end
+ # Mastodon status via display URL
def get(
"http://mastodon.example.org/@admin/99541947525187367",
_,
@@ -581,6 +590,23 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
+ url: "http://mastodon.example.org/@admin/99541947525187367",
+ body: File.read!("test/fixtures/mastodon-note-object.json"),
+ headers: activitypub_object_headers()
+ }}
+ end
+
+ # same status via its canonical ActivityPub id
+ def get(
+ "http://mastodon.example.org/users/admin/statuses/99541947525187367",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ url: "http://mastodon.example.org/users/admin/statuses/99541947525187367",
body: File.read!("test/fixtures/mastodon-note-object.json"),
headers: activitypub_object_headers()
}}
@@ -602,7 +628,7 @@ def get("https://shitposter.club/notice/7369654", _, _, _) do
}}
end
- def get("https://mstdn.io/users/mayuutann", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://mstdn.io/users/mayuutann", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -615,7 +641,7 @@ def get(
"https://mstdn.io/users/mayuutann/statuses/99568293732299394",
_,
_,
- [{"accept", "application/activity+json"}]
+ @activitypub_accept_headers
) do
{:ok,
%Tesla.Env{
@@ -761,7 +787,7 @@ def get(
"http://gs.example.org:4040/index.php/user/1",
_,
_,
- [{"accept", "application/activity+json"}]
+ @activitypub_accept_headers
) do
{:ok, %Tesla.Env{status: 406, body: ""}}
end
@@ -948,7 +974,7 @@ def get("https://apfed.club/channel/indio", _, _, _) do
}}
end
- def get("https://social.heldscal.la/user/23211", _, _, [{"accept", "application/activity+json"}]) do
+ def get("https://social.heldscal.la/user/23211", _, _, @activitypub_accept_headers) do
{:ok, Tesla.Mock.json(%{"id" => "https://social.heldscal.la/user/23211"}, status: 200)}
end
@@ -964,7 +990,7 @@ def get("https://pleroma.local/notice/9kCP7V", _, _, _) do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/ogp.html")}}
end
- def get("http://localhost:4001/users/masto_closed/followers", _, _, _) do
+ def get("http://remote.org/users/masto_closed/followers", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -973,7 +999,7 @@ def get("http://localhost:4001/users/masto_closed/followers", _, _, _) do
}}
end
- def get("http://localhost:4001/users/masto_closed/followers?page=1", _, _, _) do
+ def get("http://remote.org/users/masto_closed/followers?page=1", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -982,7 +1008,7 @@ def get("http://localhost:4001/users/masto_closed/followers?page=1", _, _, _) do
}}
end
- def get("http://localhost:4001/users/masto_closed/following", _, _, _) do
+ def get("http://remote.org/users/masto_closed/following", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -991,7 +1017,7 @@ def get("http://localhost:4001/users/masto_closed/following", _, _, _) do
}}
end
- def get("http://localhost:4001/users/masto_closed/following?page=1", _, _, _) do
+ def get("http://remote.org/users/masto_closed/following?page=1", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1000,7 +1026,7 @@ def get("http://localhost:4001/users/masto_closed/following?page=1", _, _, _) do
}}
end
- def get("http://localhost:8080/followers/fuser3", _, _, _) do
+ def get("http://remote.org/followers/fuser3", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1009,7 +1035,7 @@ def get("http://localhost:8080/followers/fuser3", _, _, _) do
}}
end
- def get("http://localhost:8080/following/fuser3", _, _, _) do
+ def get("http://remote.org/following/fuser3", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1018,7 +1044,7 @@ def get("http://localhost:8080/following/fuser3", _, _, _) do
}}
end
- def get("http://localhost:4001/users/fuser2/followers", _, _, _) do
+ def get("http://remote.org/users/fuser2/followers", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1027,7 +1053,7 @@ def get("http://localhost:4001/users/fuser2/followers", _, _, _) do
}}
end
- def get("http://localhost:4001/users/fuser2/following", _, _, _) do
+ def get("http://remote.org/users/fuser2/following", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1189,13 +1215,11 @@ def get("https://lm.kazv.moe/users/mewmew/collections/featured", _, _, _) do
File.read!("test/fixtures/users_mock/masto_featured.json")
|> String.replace("{{domain}}", "lm.kazv.moe")
|> String.replace("{{nickname}}", "mewmew"),
- headers: [{"content-type", "application/activity+json"}]
+ headers: activitypub_object_headers()
}}
end
- def get("https://info.pleroma.site/activity.json", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get("https://info.pleroma.site/activity.json", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1208,9 +1232,7 @@ def get("https://info.pleroma.site/activity.json", _, _, _) do
{:ok, %Tesla.Env{status: 404, body: ""}}
end
- def get("https://info.pleroma.site/activity2.json", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get("https://info.pleroma.site/activity2.json", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
@@ -1223,9 +1245,7 @@ def get("https://info.pleroma.site/activity2.json", _, _, _) do
{:ok, %Tesla.Env{status: 404, body: ""}}
end
- def get("https://info.pleroma.site/activity3.json", _, _, [
- {"accept", "application/activity+json"}
- ]) do
+ def get("https://info.pleroma.site/activity3.json", _, _, @activitypub_accept_headers) do
{:ok,
%Tesla.Env{
status: 200,
diff --git a/test/test_helper.exs b/test/test_helper.exs
index 0fc7a86b9..22a0f33ee 100644
--- a/test/test_helper.exs
+++ b/test/test_helper.exs
@@ -9,6 +9,10 @@
{:ok, _} = Application.ensure_all_started(:ex_machina)
+# Prepare and later automatically cleanup upload dir
+uploads_dir = Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads], "test/uploads")
+File.mkdir_p!(uploads_dir)
+
ExUnit.after_suite(fn _results ->
uploads = Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads], "test/uploads")
File.rm_rf!(uploads)