Merge remote-tracking branch 'upstream/develop' into bnakkoma
This commit is contained in:
commit
666e3bc4ad
71 changed files with 31838 additions and 21103 deletions
|
@ -12,6 +12,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
## Added
|
## Added
|
||||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
||||||
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
||||||
|
- New standalone `prune_orphaned_activities` mix task with configurable batch limit
|
||||||
|
- The `prune_objects` mix task now accepts a `--limit` parameter for initial object pruning
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
||||||
|
|
|
@ -63,7 +63,6 @@
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
filters: [],
|
filters: [],
|
||||||
link_name: false,
|
link_name: false,
|
||||||
proxy_remote: false,
|
|
||||||
filename_display_max_length: 30,
|
filename_display_max_length: 30,
|
||||||
base_url: nil,
|
base_url: nil,
|
||||||
allowed_mime_types: ["image", "audio", "video"]
|
allowed_mime_types: ["image", "audio", "video"]
|
||||||
|
|
|
@ -118,14 +118,6 @@
|
||||||
"font"
|
"font"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
key: :proxy_remote,
|
|
||||||
type: :boolean,
|
|
||||||
description: """
|
|
||||||
Proxy requests to the remote uploader.\n
|
|
||||||
Useful if media upload endpoint is not internet accessible.
|
|
||||||
"""
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :filename_display_max_length,
|
key: :filename_display_max_length,
|
||||||
type: :integer,
|
type: :integer,
|
||||||
|
|
|
@ -50,9 +50,39 @@ This will prune remote posts older than 90 days (configurable with [`config :ple
|
||||||
|
|
||||||
- `--keep-threads` - Don't prune posts when they are part of a thread where at least one post has seen local interaction (e.g. one of the posts is a local post, or is favourited by a local user, or has been repeated by a local user...). It also wont delete posts when at least one of the posts in that thread is kept (e.g. because one of the posts has seen recent activity).
|
- `--keep-threads` - Don't prune posts when they are part of a thread where at least one post has seen local interaction (e.g. one of the posts is a local post, or is favourited by a local user, or has been repeated by a local user...). It also wont delete posts when at least one of the posts in that thread is kept (e.g. because one of the posts has seen recent activity).
|
||||||
- `--keep-non-public` - Keep non-public posts like DM's and followers-only, even if they are remote.
|
- `--keep-non-public` - Keep non-public posts like DM's and followers-only, even if they are remote.
|
||||||
|
- `--limit` - limits how many remote posts get pruned. This limit does **not** apply to any of the follow up jobs. If wanting to keep the database load in check it is thus advisable to run the standalone `prune_orphaned_activities` task with a limit afterwards instead of passing `--prune-orphaned-activities` to this task.
|
||||||
- `--prune-orphaned-activities` - Also prune orphaned activities afterwards. Activities are things like Like, Create, Announce, Flag (aka reports)... They can significantly help reduce the database size.
|
- `--prune-orphaned-activities` - Also prune orphaned activities afterwards. Activities are things like Like, Create, Announce, Flag (aka reports)... They can significantly help reduce the database size.
|
||||||
- `--vacuum` - Run `VACUUM FULL` after the objects are pruned. This should not be used on a regular basis, but is useful if your instance has been running for a long time before pruning.
|
- `--vacuum` - Run `VACUUM FULL` after the objects are pruned. This should not be used on a regular basis, but is useful if your instance has been running for a long time before pruning.
|
||||||
|
|
||||||
|
## Prune orphaned activities from the database
|
||||||
|
|
||||||
|
This will prune activities which are no longer referenced by anything.
|
||||||
|
Such activities might be the result of running `prune_objects` without `--prune-orphaned-activities`.
|
||||||
|
The same notes and warnings apply as for `prune_objects`.
|
||||||
|
|
||||||
|
The task will print out how many rows were freed in total in its last
|
||||||
|
line of output in the form `Deleted 345 rows`.
|
||||||
|
When running the job in limited batches this can be used to determine
|
||||||
|
when all orphaned activities have been deleted.
|
||||||
|
|
||||||
|
=== "OTP"
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./bin/pleroma_ctl database prune_orphaned_activities [option ...]
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "From Source"
|
||||||
|
|
||||||
|
```sh
|
||||||
|
mix pleroma.database prune_orphaned_activities [option ...]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
- `--limit n` - Only delete up to `n` activities in each query making up this job, i.e. if this job runs two queries at most `2n` activities will be deleted. Running this task repeatedly in limited batches can help maintain the instance’s responsiveness while still freeing up some space.
|
||||||
|
- `--no-singles` - Do not delete activites referencing single objects
|
||||||
|
- `--no-arrays` - Do not delete activites referencing an array of objects
|
||||||
|
|
||||||
## Create a conversation for all existing DMs
|
## Create a conversation for all existing DMs
|
||||||
|
|
||||||
Can be safely re-run
|
Can be safely re-run
|
||||||
|
|
|
@ -4,12 +4,12 @@
|
||||||
|
|
||||||
1. Stop the Akkoma service.
|
1. Stop the Akkoma service.
|
||||||
2. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
2. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||||
3. Run[¹] `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>` (make sure the postgres user has write access to the destination file)
|
3. Run `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>`[¹] (make sure the postgres user has write access to the destination file)
|
||||||
4. Copy `akkoma.pgdump`, `config/prod.secret.exs`[²], `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
4. Copy `akkoma.pgdump`, `config/config.exs`[²], `uploads` folder, and [static directory](../configuration/static_dir.md) to your backup destination. If you have other modifications, copy those changes too.
|
||||||
5. Restart the Akkoma service.
|
5. Restart the Akkoma service.
|
||||||
|
|
||||||
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your config files.
|
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your configuration files.
|
||||||
[²]: If you've installed using OTP, you need `config/config.exs` instead of `config/prod.secret.exs`.
|
[²]: If you have a from source installation, you need `config/prod.secret.exs` instead of `config/config.exs`. The `config/config.exs` file also exists, but in case of from source installations, it only contains the default values and it is tracked by Git, so you don't need to back it up.
|
||||||
|
|
||||||
## Restore/Move
|
## Restore/Move
|
||||||
|
|
||||||
|
@ -17,19 +17,16 @@
|
||||||
2. Stop the Akkoma service.
|
2. Stop the Akkoma service.
|
||||||
3. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
3. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||||
4. Copy the above mentioned files back to their original position.
|
4. Copy the above mentioned files back to their original position.
|
||||||
5. Drop the existing database and user if restoring in-place[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
5. Drop the existing database and user[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||||
6. Restore the database schema and akkoma role using either of the following options
|
6. Restore the database schema and akkoma role[¹] (replace the password with the one you find in the configuration file), `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-configuration-file>';"` `sudo -Hu postgres psql -c "CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||||
* You can use the original `setup_db.psql` if you have it[²]: `sudo -Hu postgres psql -f config/setup_db.psql`.
|
|
||||||
* Or recreate the database and user yourself (replace the password with the one you find in the config file) `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-config-file>'; CREATE DATABASE akkoma OWNER akkoma;"`.
|
|
||||||
7. Now restore the Akkoma instance's data into the empty database schema[¹]: `sudo -Hu postgres pg_restore -d akkoma -v -1 </path/to/backup_location/akkoma.pgdump>`
|
7. Now restore the Akkoma instance's data into the empty database schema[¹]: `sudo -Hu postgres pg_restore -d akkoma -v -1 </path/to/backup_location/akkoma.pgdump>`
|
||||||
8. If you installed a newer Akkoma version, you should run `MIX_ENV=prod mix ecto.migrate`[³]. This task performs database migrations, if there were any.
|
8. If you installed a newer Akkoma version, you should run the database migrations `./bin/pleroma_ctl migrate`[²].
|
||||||
9. Restart the Akkoma service.
|
9. Restart the Akkoma service.
|
||||||
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
||||||
11. If setting up on a new server configure Nginx by using the `installation/akkoma.nginx` config sample or reference the Akkoma installation guide for your OS which contains the Nginx configuration instructions.
|
11. If setting up on a new server, configure Nginx by using the `installation/nginx/akkoma.nginx` configuration sample or reference the Akkoma installation guide which contains the Nginx configuration instructions.
|
||||||
|
|
||||||
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your config files.
|
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your configuration files.
|
||||||
[²]: You can recreate the `config/setup_db.psql` by running the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backed up config file. This will also create a new `config/generated_config.exs` file which you may delete as it is not needed.
|
[²]: If you have a from source installation, the command is `MIX_ENV=prod mix ecto.migrate`. Note that we prefix with `MIX_ENV=prod` to use the `config/prod.secret.exs` configuration file.
|
||||||
[³]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
|
||||||
|
|
||||||
## Remove
|
## Remove
|
||||||
|
|
||||||
|
|
|
@ -605,7 +605,6 @@ the source code is here: [kocaptcha](https://github.com/koto-bank/kocaptcha). Th
|
||||||
* `link_name`: When enabled Akkoma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers
|
* `link_name`: When enabled Akkoma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers
|
||||||
* `base_url`: The base URL to access a user-uploaded file; MUST be configured explicitly.
|
* `base_url`: The base URL to access a user-uploaded file; MUST be configured explicitly.
|
||||||
Using a (sub)domain distinct from the instance endpoint is **strongly** recommended. A good value might be `https://media.myakkoma.instance/media/`.
|
Using a (sub)domain distinct from the instance endpoint is **strongly** recommended. A good value might be `https://media.myakkoma.instance/media/`.
|
||||||
* `proxy_remote`: If you're using a remote uploader, Akkoma will proxy media requests instead of redirecting to it.
|
|
||||||
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
||||||
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
|
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ as soon as the post is received by your instance.
|
||||||
|
|
||||||
## Nginx
|
## Nginx
|
||||||
|
|
||||||
The following are excerpts from the [suggested nginx config](../../../installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
The following are excerpts from the [suggested nginx config](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
||||||
|
|
||||||
A `proxy_cache_path` must be defined, for example:
|
A `proxy_cache_path` must be defined, for example:
|
||||||
|
|
||||||
|
|
|
@ -1033,7 +1033,6 @@ Most of the settings will be applied in `runtime`, this means that you don't nee
|
||||||
- `:pools`
|
- `:pools`
|
||||||
- partially settings inside these keys:
|
- partially settings inside these keys:
|
||||||
- `:seconds_valid` in `Pleroma.Captcha`
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
- `:proxy_remote` in `Pleroma.Upload`
|
|
||||||
- `:upload_limit` in `:instance`
|
- `:upload_limit` in `:instance`
|
||||||
|
|
||||||
- Params:
|
- Params:
|
||||||
|
@ -1094,7 +1093,6 @@ List of settings which support only full update by subkey:
|
||||||
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
{"tuple": [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
{"tuple": [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
||||||
{"tuple": [":link_name", true]},
|
{"tuple": [":link_name", true]},
|
||||||
{"tuple": [":proxy_remote", false]},
|
|
||||||
{"tuple": [":proxy_opts", [
|
{"tuple": [":proxy_opts", [
|
||||||
{"tuple": [":redirect_on_failure", false]},
|
{"tuple": [":redirect_on_failure", false]},
|
||||||
{"tuple": [":max_body_length", 1048576]},
|
{"tuple": [":max_body_length", 1048576]},
|
||||||
|
|
|
@ -12,26 +12,22 @@ example.tld {
|
||||||
output file /var/log/caddy/akkoma.log
|
output file /var/log/caddy/akkoma.log
|
||||||
}
|
}
|
||||||
|
|
||||||
encode gzip
|
|
||||||
|
|
||||||
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
||||||
# and `localhost.` resolves to [::0] on some systems: see issue #930
|
# and `localhost.` resolves to [::0] on some systems: see issue #930
|
||||||
reverse_proxy 127.0.0.1:4000
|
reverse_proxy 127.0.0.1:4000
|
||||||
|
|
||||||
# Uncomment if using a separate media subdomain
|
@mediaproxy path /media/* /proxy/*
|
||||||
#@mediaproxy path /media/* /proxy/*
|
handle @mediaproxy {
|
||||||
#handle @mediaproxy {
|
redir https://media.example.tld{uri} permanent
|
||||||
# redir https://media.example.tld{uri} permanent
|
}
|
||||||
#}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Uncomment if using a separate media subdomain
|
media.example.tld {
|
||||||
#media.example.tld {
|
@mediaproxy path /media/* /proxy/*
|
||||||
# @mediaproxy path /media/* /proxy/*
|
reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
||||||
# reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
transport http {
|
||||||
# transport http {
|
response_header_timeout 10s
|
||||||
# response_header_timeout 10s
|
read_timeout 15s
|
||||||
# read_timeout 15s
|
}
|
||||||
# }
|
}
|
||||||
# }
|
}
|
||||||
#}
|
|
||||||
|
|
|
@ -112,18 +112,26 @@ def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def shell_info(message) do
|
def shell_info(message) when is_binary(message) or is_list(message) do
|
||||||
if mix_shell?(),
|
if mix_shell?(),
|
||||||
do: Mix.shell().info(message),
|
do: Mix.shell().info(message),
|
||||||
else: IO.puts(message)
|
else: IO.puts(message)
|
||||||
end
|
end
|
||||||
|
|
||||||
def shell_error(message) do
|
def shell_info(message) do
|
||||||
|
shell_info("#{inspect(message)}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_error(message) when is_binary(message) or is_list(message) do
|
||||||
if mix_shell?(),
|
if mix_shell?(),
|
||||||
do: Mix.shell().error(message),
|
do: Mix.shell().error(message),
|
||||||
else: IO.puts(:stderr, message)
|
else: IO.puts(:stderr, message)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def shell_error(message) do
|
||||||
|
shell_error("#{inspect(message)}")
|
||||||
|
end
|
||||||
|
|
||||||
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
|
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
|
||||||
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
|
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ defmodule Mix.Tasks.Pleroma.Activity do
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Pagination
|
alias Pleroma.Pagination
|
||||||
alias Pleroma.Search.DatabaseSearch
|
alias Pleroma.Search.DatabaseSearch
|
||||||
require Logger
|
|
||||||
import Mix.Pleroma
|
import Mix.Pleroma
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
@ -18,7 +17,7 @@ def run(["get", id | _rest]) do
|
||||||
|
|
||||||
id
|
id
|
||||||
|> Activity.get_by_id()
|
|> Activity.get_by_id()
|
||||||
|> IO.inspect()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["delete_by_keyword", user, keyword | _rest]) do
|
def run(["delete_by_keyword", user, keyword | _rest]) do
|
||||||
|
@ -43,6 +42,6 @@ def run(["delete_by_keyword", user, keyword | _rest]) do
|
||||||
)
|
)
|
||||||
|> Enum.map(fn x -> CommonAPI.delete(x.id, u) end)
|
|> Enum.map(fn x -> CommonAPI.delete(x.id, u) end)
|
||||||
|> Enum.count()
|
|> Enum.count()
|
||||||
|> IO.puts()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,6 +20,102 @@ defmodule Mix.Tasks.Pleroma.Database do
|
||||||
@shortdoc "A collection of database related tasks"
|
@shortdoc "A collection of database related tasks"
|
||||||
@moduledoc File.read!("docs/docs/administration/CLI_tasks/database.md")
|
@moduledoc File.read!("docs/docs/administration/CLI_tasks/database.md")
|
||||||
|
|
||||||
|
defp maybe_limit(query, limit_cnt) do
|
||||||
|
if is_number(limit_cnt) and limit_cnt > 0 do
|
||||||
|
limit(query, [], ^limit_cnt)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp limit_statement(limit) when is_number(limit) do
|
||||||
|
if limit > 0 do
|
||||||
|
"LIMIT #{limit}"
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prune_orphaned_activities_singles(limit) do
|
||||||
|
%{:num_rows => del_single} =
|
||||||
|
"""
|
||||||
|
delete from public.activities
|
||||||
|
where id in (
|
||||||
|
select a.id from public.activities a
|
||||||
|
left join public.objects o on a.data ->> 'object' = o.data ->> 'id'
|
||||||
|
left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id'
|
||||||
|
left join public.users u on a.data ->> 'object' = u.ap_id
|
||||||
|
where not a.local
|
||||||
|
and jsonb_typeof(a."data" -> 'object') = 'string'
|
||||||
|
and o.id is null
|
||||||
|
and a2.id is null
|
||||||
|
and u.id is null
|
||||||
|
#{limit_statement(limit)}
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|> Repo.query!([], timeout: :infinity)
|
||||||
|
|
||||||
|
Logger.info("Prune activity singles: deleted #{del_single} rows...")
|
||||||
|
del_single
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prune_orphaned_activities_array(limit) do
|
||||||
|
%{:num_rows => del_array} =
|
||||||
|
"""
|
||||||
|
delete from public.activities
|
||||||
|
where id in (
|
||||||
|
select a.id from public.activities a
|
||||||
|
join json_array_elements_text((a."data" -> 'object')::json) as j
|
||||||
|
on a.data->>'type' = 'Flag'
|
||||||
|
left join public.objects o on j.value = o.data ->> 'id'
|
||||||
|
left join public.activities a2 on j.value = a2.data ->> 'id'
|
||||||
|
left join public.users u on j.value = u.ap_id
|
||||||
|
group by a.id
|
||||||
|
having max(o.data ->> 'id') is null
|
||||||
|
and max(a2.data ->> 'id') is null
|
||||||
|
and max(u.ap_id) is null
|
||||||
|
#{limit_statement(limit)}
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|> Repo.query!([], timeout: :infinity)
|
||||||
|
|
||||||
|
Logger.info("Prune activity arrays: deleted #{del_array} rows...")
|
||||||
|
del_array
|
||||||
|
end
|
||||||
|
|
||||||
|
def prune_orphaned_activities(limit \\ 0, opts \\ []) when is_number(limit) do
|
||||||
|
# Activities can either refer to a single object id, and array of object ids
|
||||||
|
# or contain an inlined object (at least after going through our normalisation)
|
||||||
|
#
|
||||||
|
# Flag is the only type we support with an array (and always has arrays).
|
||||||
|
# Update the only one with inlined objects.
|
||||||
|
#
|
||||||
|
# We already regularly purge old Delete, Undo, Update and Remove and if
|
||||||
|
# rejected Follow requests anyway; no need to explicitly deal with those here.
|
||||||
|
#
|
||||||
|
# Since there’s an index on types and there are typically only few Flag
|
||||||
|
# activites, it’s _much_ faster to utilise the index. To avoid accidentally
|
||||||
|
# deleting useful activities should more types be added, keep typeof for singles.
|
||||||
|
|
||||||
|
# Prune activities who link to an array of objects
|
||||||
|
del_array =
|
||||||
|
if Keyword.get(opts, :arrays, true) do
|
||||||
|
prune_orphaned_activities_array(limit)
|
||||||
|
else
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
# Prune activities who link to a single object
|
||||||
|
del_single =
|
||||||
|
if Keyword.get(opts, :singles, true) do
|
||||||
|
prune_orphaned_activities_singles(limit)
|
||||||
|
else
|
||||||
|
0
|
||||||
|
end
|
||||||
|
|
||||||
|
del_single + del_array
|
||||||
|
end
|
||||||
|
|
||||||
def run(["remove_embedded_objects" | args]) do
|
def run(["remove_embedded_objects" | args]) do
|
||||||
{options, [], []} =
|
{options, [], []} =
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
|
@ -62,6 +158,37 @@ def run(["update_users_following_followers_counts"]) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def run(["prune_orphaned_activities" | args]) do
|
||||||
|
{options, [], []} =
|
||||||
|
OptionParser.parse(
|
||||||
|
args,
|
||||||
|
strict: [
|
||||||
|
limit: :integer,
|
||||||
|
singles: :boolean,
|
||||||
|
arrays: :boolean
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
{limit, options} = Keyword.pop(options, :limit, 0)
|
||||||
|
|
||||||
|
log_message = "Pruning orphaned activities"
|
||||||
|
|
||||||
|
log_message =
|
||||||
|
if limit > 0 do
|
||||||
|
log_message <> ", limiting deletion to #{limit} rows"
|
||||||
|
else
|
||||||
|
log_message
|
||||||
|
end
|
||||||
|
|
||||||
|
Logger.info(log_message)
|
||||||
|
|
||||||
|
deleted = prune_orphaned_activities(limit, options)
|
||||||
|
|
||||||
|
Logger.info("Deleted #{deleted} rows")
|
||||||
|
end
|
||||||
|
|
||||||
def run(["prune_objects" | args]) do
|
def run(["prune_objects" | args]) do
|
||||||
{options, [], []} =
|
{options, [], []} =
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
|
@ -70,7 +197,8 @@ def run(["prune_objects" | args]) do
|
||||||
vacuum: :boolean,
|
vacuum: :boolean,
|
||||||
keep_threads: :boolean,
|
keep_threads: :boolean,
|
||||||
keep_non_public: :boolean,
|
keep_non_public: :boolean,
|
||||||
prune_orphaned_activities: :boolean
|
prune_orphaned_activities: :boolean,
|
||||||
|
limit: :integer
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -79,6 +207,8 @@ def run(["prune_objects" | args]) do
|
||||||
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
||||||
time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400))
|
time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400))
|
||||||
|
|
||||||
|
limit_cnt = Keyword.get(options, :limit, 0)
|
||||||
|
|
||||||
log_message = "Pruning objects older than #{deadline} days"
|
log_message = "Pruning objects older than #{deadline} days"
|
||||||
|
|
||||||
log_message =
|
log_message =
|
||||||
|
@ -110,129 +240,124 @@ def run(["prune_objects" | args]) do
|
||||||
log_message
|
log_message
|
||||||
end
|
end
|
||||||
|
|
||||||
|
log_message =
|
||||||
|
if limit_cnt > 0 do
|
||||||
|
log_message <> ", limiting to #{limit_cnt} rows"
|
||||||
|
else
|
||||||
|
log_message
|
||||||
|
end
|
||||||
|
|
||||||
Logger.info(log_message)
|
Logger.info(log_message)
|
||||||
|
|
||||||
if Keyword.get(options, :keep_threads) do
|
{del_obj, _} =
|
||||||
# We want to delete objects from threads where
|
if Keyword.get(options, :keep_threads) do
|
||||||
# 1. the newest post is still old
|
# We want to delete objects from threads where
|
||||||
# 2. none of the activities is local
|
# 1. the newest post is still old
|
||||||
# 3. none of the activities is bookmarked
|
# 2. none of the activities is local
|
||||||
# 4. optionally none of the posts is non-public
|
# 3. none of the activities is bookmarked
|
||||||
deletable_context =
|
# 4. optionally none of the posts is non-public
|
||||||
if Keyword.get(options, :keep_non_public) do
|
deletable_context =
|
||||||
Pleroma.Activity
|
if Keyword.get(options, :keep_non_public) do
|
||||||
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|
Pleroma.Activity
|
||||||
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|
||||||
|> having(
|
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|
||||||
[a],
|
|> having(
|
||||||
not fragment(
|
[a],
|
||||||
# Posts (checked on Create Activity) is non-public
|
not fragment(
|
||||||
"bool_or((not(?->'to' \\? ? OR ?->'cc' \\? ?)) and ? ->> 'type' = 'Create')",
|
# Posts (checked on Create Activity) is non-public
|
||||||
a.data,
|
"bool_or((not(?->'to' \\? ? OR ?->'cc' \\? ?)) and ? ->> 'type' = 'Create')",
|
||||||
^Pleroma.Constants.as_public(),
|
a.data,
|
||||||
a.data,
|
^Pleroma.Constants.as_public(),
|
||||||
^Pleroma.Constants.as_public(),
|
a.data,
|
||||||
a.data
|
^Pleroma.Constants.as_public(),
|
||||||
|
a.data
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
else
|
||||||
else
|
Pleroma.Activity
|
||||||
Pleroma.Activity
|
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|
||||||
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|
||||||
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|
end
|
||||||
end
|
|> having([a], max(a.updated_at) < ^time_deadline)
|
||||||
|> having([a], max(a.updated_at) < ^time_deadline)
|
|> having([a], not fragment("bool_or(?)", a.local))
|
||||||
|> having([a], not fragment("bool_or(?)", a.local))
|
|> having([_, b], fragment("max(?::text) is null", b.id))
|
||||||
|> having([_, b], fragment("max(?::text) is null", b.id))
|
|> maybe_limit(limit_cnt)
|
||||||
|> select([a], fragment("? ->> 'context'::text", a.data))
|
|> select([a], fragment("? ->> 'context'::text", a.data))
|
||||||
|
|
||||||
Pleroma.Object
|
|
||||||
|> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context))
|
|
||||||
else
|
|
||||||
if Keyword.get(options, :keep_non_public) do
|
|
||||||
Pleroma.Object
|
Pleroma.Object
|
||||||
|> where(
|
|> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context))
|
||||||
[o],
|
|
||||||
fragment(
|
|
||||||
"?->'to' \\? ? OR ?->'cc' \\? ?",
|
|
||||||
o.data,
|
|
||||||
^Pleroma.Constants.as_public(),
|
|
||||||
o.data,
|
|
||||||
^Pleroma.Constants.as_public()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else
|
else
|
||||||
|
deletable =
|
||||||
|
if Keyword.get(options, :keep_non_public) do
|
||||||
|
Pleroma.Object
|
||||||
|
|> where(
|
||||||
|
[o],
|
||||||
|
fragment(
|
||||||
|
"?->'to' \\? ? OR ?->'cc' \\? ?",
|
||||||
|
o.data,
|
||||||
|
^Pleroma.Constants.as_public(),
|
||||||
|
o.data,
|
||||||
|
^Pleroma.Constants.as_public()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else
|
||||||
|
Pleroma.Object
|
||||||
|
end
|
||||||
|
|> where([o], o.updated_at < ^time_deadline)
|
||||||
|
|> where(
|
||||||
|
[o],
|
||||||
|
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
|
||||||
|
)
|
||||||
|
|> maybe_limit(limit_cnt)
|
||||||
|
|> select([o], o.id)
|
||||||
|
|
||||||
Pleroma.Object
|
Pleroma.Object
|
||||||
|
|> where([o], o.id in subquery(deletable))
|
||||||
end
|
end
|
||||||
|> where([o], o.updated_at < ^time_deadline)
|
|> Repo.delete_all(timeout: :infinity)
|
||||||
|> where(
|
|
||||||
[o],
|
Logger.info("Deleted #{del_obj} objects...")
|
||||||
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|> Repo.delete_all(timeout: :infinity)
|
|
||||||
|
|
||||||
if !Keyword.get(options, :keep_threads) do
|
if !Keyword.get(options, :keep_threads) do
|
||||||
# Without the --keep-threads option, it's possible that bookmarked
|
# Without the --keep-threads option, it's possible that bookmarked
|
||||||
# objects have been deleted. We remove the corresponding bookmarks.
|
# objects have been deleted. We remove the corresponding bookmarks.
|
||||||
"""
|
%{:num_rows => del_bookmarks} =
|
||||||
delete from public.bookmarks
|
"""
|
||||||
where id in (
|
delete from public.bookmarks
|
||||||
select b.id from public.bookmarks b
|
where id in (
|
||||||
left join public.activities a on b.activity_id = a.id
|
select b.id from public.bookmarks b
|
||||||
left join public.objects o on a."data" ->> 'object' = o.data ->> 'id'
|
left join public.activities a on b.activity_id = a.id
|
||||||
where o.id is null
|
left join public.objects o on a."data" ->> 'object' = o.data ->> 'id'
|
||||||
)
|
where o.id is null
|
||||||
"""
|
)
|
||||||
|> Repo.query([], timeout: :infinity)
|
"""
|
||||||
|
|> Repo.query!([], timeout: :infinity)
|
||||||
|
|
||||||
|
Logger.info("Deleted #{del_bookmarks} orphaned bookmarks...")
|
||||||
end
|
end
|
||||||
|
|
||||||
if Keyword.get(options, :prune_orphaned_activities) do
|
if Keyword.get(options, :prune_orphaned_activities) do
|
||||||
# Prune activities who link to a single object
|
del_activities = prune_orphaned_activities()
|
||||||
"""
|
Logger.info("Deleted #{del_activities} orphaned activities...")
|
||||||
delete from public.activities
|
|
||||||
where id in (
|
|
||||||
select a.id from public.activities a
|
|
||||||
left join public.objects o on a.data ->> 'object' = o.data ->> 'id'
|
|
||||||
left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id'
|
|
||||||
left join public.users u on a.data ->> 'object' = u.ap_id
|
|
||||||
where not a.local
|
|
||||||
and jsonb_typeof(a."data" -> 'object') = 'string'
|
|
||||||
and o.id is null
|
|
||||||
and a2.id is null
|
|
||||||
and u.id is null
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
|> Repo.query([], timeout: :infinity)
|
|
||||||
|
|
||||||
# Prune activities who link to an array of objects
|
|
||||||
"""
|
|
||||||
delete from public.activities
|
|
||||||
where id in (
|
|
||||||
select a.id from public.activities a
|
|
||||||
join json_array_elements_text((a."data" -> 'object')::json) as j on jsonb_typeof(a."data" -> 'object') = 'array'
|
|
||||||
left join public.objects o on j.value = o.data ->> 'id'
|
|
||||||
left join public.activities a2 on j.value = a2.data ->> 'id'
|
|
||||||
left join public.users u on j.value = u.ap_id
|
|
||||||
group by a.id
|
|
||||||
having max(o.data ->> 'id') is null
|
|
||||||
and max(a2.data ->> 'id') is null
|
|
||||||
and max(u.ap_id) is null
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
|> Repo.query([], timeout: :infinity)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
"""
|
%{:num_rows => del_hashtags} =
|
||||||
DELETE FROM hashtags AS ht
|
"""
|
||||||
WHERE NOT EXISTS (
|
DELETE FROM hashtags AS ht
|
||||||
SELECT 1 FROM hashtags_objects hto
|
WHERE NOT EXISTS (
|
||||||
WHERE ht.id = hto.hashtag_id)
|
SELECT 1 FROM hashtags_objects hto
|
||||||
"""
|
WHERE ht.id = hto.hashtag_id)
|
||||||
|> Repo.query()
|
"""
|
||||||
|
|> Repo.query!()
|
||||||
|
|
||||||
|
Logger.info("Deleted #{del_hashtags} no longer used hashtags...")
|
||||||
|
|
||||||
if Keyword.get(options, :vacuum) do
|
if Keyword.get(options, :vacuum) do
|
||||||
|
Logger.info("Starting vacuum...")
|
||||||
Maintenance.vacuum("full")
|
Maintenance.vacuum("full")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Logger.info("All done!")
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["prune_task"]) do
|
def run(["prune_task"]) do
|
||||||
|
|
|
@ -3,7 +3,6 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
require Logger
|
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
|
||||||
import Mix.Pleroma
|
import Mix.Pleroma
|
||||||
|
@ -14,7 +13,7 @@ def run(["http", url]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Pleroma.HTTP.get(url)
|
Pleroma.HTTP.get(url)
|
||||||
|> IO.inspect()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["fetch_object", url]) do
|
def run(["fetch_object", url]) do
|
||||||
|
@ -27,7 +26,7 @@ def run(["fetch_object", url]) do
|
||||||
def run(["home_timeline", nickname]) do
|
def run(["home_timeline", nickname]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
user = Repo.get_by!(User, nickname: nickname)
|
user = Repo.get_by!(User, nickname: nickname)
|
||||||
Logger.info("Home timeline query #{user.nickname}")
|
shell_info("Home timeline query #{user.nickname}")
|
||||||
|
|
||||||
followed_hashtags =
|
followed_hashtags =
|
||||||
user
|
user
|
||||||
|
@ -56,14 +55,14 @@ def run(["home_timeline", nickname]) do
|
||||||
|> limit(20)
|
|> limit(20)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||||
|> IO.puts()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["user_timeline", nickname, reading_nickname]) do
|
def run(["user_timeline", nickname, reading_nickname]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
user = Repo.get_by!(User, nickname: nickname)
|
user = Repo.get_by!(User, nickname: nickname)
|
||||||
reading_user = Repo.get_by!(User, nickname: reading_nickname)
|
reading_user = Repo.get_by!(User, nickname: reading_nickname)
|
||||||
Logger.info("User timeline query #{user.nickname}")
|
shell_info("User timeline query #{user.nickname}")
|
||||||
|
|
||||||
params =
|
params =
|
||||||
%{limit: 20}
|
%{limit: 20}
|
||||||
|
@ -87,7 +86,7 @@ def run(["user_timeline", nickname, reading_nickname]) do
|
||||||
|> limit(20)
|
|> limit(20)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||||
|> IO.puts()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["notifications", nickname]) do
|
def run(["notifications", nickname]) do
|
||||||
|
@ -103,7 +102,7 @@ def run(["notifications", nickname]) do
|
||||||
|> limit(20)
|
|> limit(20)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||||
|> IO.puts()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["known_network", nickname]) do
|
def run(["known_network", nickname]) do
|
||||||
|
@ -129,6 +128,6 @@ def run(["known_network", nickname]) do
|
||||||
|> limit(20)
|
|> limit(20)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||||
|> IO.puts()
|
|> shell_info()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -27,11 +27,11 @@ def run(["ls-packs" | args]) do
|
||||||
]
|
]
|
||||||
|
|
||||||
for {param, value} <- to_print do
|
for {param, value} <- to_print do
|
||||||
IO.puts(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
shell_info(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
||||||
end
|
end
|
||||||
|
|
||||||
# A newline
|
# A newline
|
||||||
IO.puts("")
|
shell_info("")
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ def run(["get-packs" | args]) do
|
||||||
pack = manifest[pack_name]
|
pack = manifest[pack_name]
|
||||||
src = pack["src"]
|
src = pack["src"]
|
||||||
|
|
||||||
IO.puts(
|
shell_info(
|
||||||
IO.ANSI.format([
|
IO.ANSI.format([
|
||||||
"Downloading ",
|
"Downloading ",
|
||||||
:bright,
|
:bright,
|
||||||
|
@ -67,9 +67,9 @@ def run(["get-packs" | args]) do
|
||||||
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
||||||
|
|
||||||
if archive_sha == String.upcase(pack["src_sha256"]) do
|
if archive_sha == String.upcase(pack["src_sha256"]) do
|
||||||
IO.puts(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
shell_info(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
||||||
else
|
else
|
||||||
IO.puts(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
shell_info(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
||||||
|
|
||||||
raise "Bad SHA256 for #{pack_name}"
|
raise "Bad SHA256 for #{pack_name}"
|
||||||
end
|
end
|
||||||
|
@ -80,7 +80,7 @@ def run(["get-packs" | args]) do
|
||||||
|> Path.dirname()
|
|> Path.dirname()
|
||||||
|> Path.join(pack["files"])
|
|> Path.join(pack["files"])
|
||||||
|
|
||||||
IO.puts(
|
shell_info(
|
||||||
IO.ANSI.format([
|
IO.ANSI.format([
|
||||||
"Fetching the file list for ",
|
"Fetching the file list for ",
|
||||||
:bright,
|
:bright,
|
||||||
|
@ -94,7 +94,7 @@ def run(["get-packs" | args]) do
|
||||||
|
|
||||||
files = fetch_and_decode!(files_loc)
|
files = fetch_and_decode!(files_loc)
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
shell_info(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||||
|
|
||||||
pack_path =
|
pack_path =
|
||||||
Path.join([
|
Path.join([
|
||||||
|
@ -115,7 +115,7 @@ def run(["get-packs" | args]) do
|
||||||
file_list: files_to_unzip
|
file_list: files_to_unzip
|
||||||
)
|
)
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
shell_info(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||||
|
|
||||||
pack_json = %{
|
pack_json = %{
|
||||||
pack: %{
|
pack: %{
|
||||||
|
@ -132,7 +132,7 @@ def run(["get-packs" | args]) do
|
||||||
File.write!(Path.join(pack_path, "pack.json"), Jason.encode!(pack_json, pretty: true))
|
File.write!(Path.join(pack_path, "pack.json"), Jason.encode!(pack_json, pretty: true))
|
||||||
Pleroma.Emoji.reload()
|
Pleroma.Emoji.reload()
|
||||||
else
|
else
|
||||||
IO.puts(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
shell_info(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -180,14 +180,14 @@ def run(["gen-pack" | args]) do
|
||||||
custom_exts
|
custom_exts
|
||||||
end
|
end
|
||||||
|
|
||||||
IO.puts("Using #{Enum.join(exts, " ")} extensions")
|
shell_info("Using #{Enum.join(exts, " ")} extensions")
|
||||||
|
|
||||||
IO.puts("Downloading the pack and generating SHA256")
|
shell_info("Downloading the pack and generating SHA256")
|
||||||
|
|
||||||
{:ok, %{body: binary_archive}} = Pleroma.HTTP.get(src)
|
{:ok, %{body: binary_archive}} = Pleroma.HTTP.get(src)
|
||||||
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||||
|
|
||||||
IO.puts("SHA256 is #{archive_sha}")
|
shell_info("SHA256 is #{archive_sha}")
|
||||||
|
|
||||||
pack_json = %{
|
pack_json = %{
|
||||||
name => %{
|
name => %{
|
||||||
|
@ -208,7 +208,7 @@ def run(["gen-pack" | args]) do
|
||||||
|
|
||||||
File.write!(files_name, Jason.encode!(emoji_map, pretty: true))
|
File.write!(files_name, Jason.encode!(emoji_map, pretty: true))
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
|
|
||||||
#{files_name} has been created and contains the list of all found emojis in the pack.
|
#{files_name} has been created and contains the list of all found emojis in the pack.
|
||||||
Please review the files in the pack and remove those not needed.
|
Please review the files in the pack and remove those not needed.
|
||||||
|
@ -230,11 +230,11 @@ def run(["gen-pack" | args]) do
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
IO.puts("#{pack_file} has been updated with the #{name} pack")
|
shell_info("#{pack_file} has been updated with the #{name} pack")
|
||||||
else
|
else
|
||||||
File.write!(pack_file, Jason.encode!(pack_json, pretty: true))
|
File.write!(pack_file, Jason.encode!(pack_json, pretty: true))
|
||||||
|
|
||||||
IO.puts("#{pack_file} has been created with the #{name} pack")
|
shell_info("#{pack_file} has been created with the #{name} pack")
|
||||||
end
|
end
|
||||||
|
|
||||||
Pleroma.Emoji.reload()
|
Pleroma.Emoji.reload()
|
||||||
|
@ -243,7 +243,7 @@ def run(["gen-pack" | args]) do
|
||||||
def run(["reload"]) do
|
def run(["reload"]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
Pleroma.Emoji.reload()
|
Pleroma.Emoji.reload()
|
||||||
IO.puts("Emoji packs have been reloaded.")
|
shell_info("Emoji packs have been reloaded.")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fetch_and_decode!(from) do
|
defp fetch_and_decode!(from) do
|
||||||
|
|
|
@ -11,7 +11,6 @@ defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
|
||||||
alias Pleroma.CounterCache
|
alias Pleroma.CounterCache
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
require Logger
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
def run([]) do
|
def run([]) do
|
||||||
|
|
|
@ -58,7 +58,7 @@ def run(["index"]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
IO.puts("Created indices. Starting to insert posts.")
|
shell_info("Created indices. Starting to insert posts.")
|
||||||
|
|
||||||
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
|
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ def run(["spoof-uploaded"]) do
|
||||||
Logger.put_process_level(self(), :notice)
|
Logger.put_process_level(self(), :notice)
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
+------------------------+
|
+------------------------+
|
||||||
| SPOOF SEARCH UPLOADS |
|
| SPOOF SEARCH UPLOADS |
|
||||||
+------------------------+
|
+------------------------+
|
||||||
|
@ -55,7 +55,7 @@ def run(["spoof-inserted"]) do
|
||||||
Logger.put_process_level(self(), :notice)
|
Logger.put_process_level(self(), :notice)
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
+----------------------+
|
+----------------------+
|
||||||
| SPOOF SEARCH NOTES |
|
| SPOOF SEARCH NOTES |
|
||||||
+----------------------+
|
+----------------------+
|
||||||
|
@ -77,7 +77,7 @@ defp do_spoof_uploaded() do
|
||||||
uploads_search_spoofs_local_dir(Config.get!([Pleroma.Uploaders.Local, :uploads]))
|
uploads_search_spoofs_local_dir(Config.get!([Pleroma.Uploaders.Local, :uploads]))
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
NOTE:
|
NOTE:
|
||||||
Not using local uploader; thus not affected by this exploit.
|
Not using local uploader; thus not affected by this exploit.
|
||||||
It's impossible to check for files, but in case local uploader was used before
|
It's impossible to check for files, but in case local uploader was used before
|
||||||
|
@ -98,13 +98,13 @@ defp do_spoof_uploaded() do
|
||||||
|
|
||||||
orphaned_attachs = upload_search_orphaned_attachments(not_orphaned_urls)
|
orphaned_attachs = upload_search_orphaned_attachments(not_orphaned_urls)
|
||||||
|
|
||||||
IO.puts("\nSearch concluded; here are the results:")
|
shell_info("\nSearch concluded; here are the results:")
|
||||||
pretty_print_list_with_title(emoji, "Emoji")
|
pretty_print_list_with_title(emoji, "Emoji")
|
||||||
pretty_print_list_with_title(files, "Uploaded Files")
|
pretty_print_list_with_title(files, "Uploaded Files")
|
||||||
pretty_print_list_with_title(post_attachs, "(Not Deleted) Post Attachments")
|
pretty_print_list_with_title(post_attachs, "(Not Deleted) Post Attachments")
|
||||||
pretty_print_list_with_title(orphaned_attachs, "Orphaned Uploads")
|
pretty_print_list_with_title(orphaned_attachs, "Orphaned Uploads")
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
In total found
|
In total found
|
||||||
#{length(emoji)} emoji
|
#{length(emoji)} emoji
|
||||||
#{length(files)} uploads
|
#{length(files)} uploads
|
||||||
|
@ -116,7 +116,7 @@ defp do_spoof_uploaded() do
|
||||||
defp uploads_search_spoofs_local_dir(dir) do
|
defp uploads_search_spoofs_local_dir(dir) do
|
||||||
local_dir = String.replace_suffix(dir, "/", "")
|
local_dir = String.replace_suffix(dir, "/", "")
|
||||||
|
|
||||||
IO.puts("Searching for suspicious files in #{local_dir}...")
|
shell_info("Searching for suspicious files in #{local_dir}...")
|
||||||
|
|
||||||
glob_ext = "{" <> Enum.join(@activity_exts, ",") <> "}"
|
glob_ext = "{" <> Enum.join(@activity_exts, ",") <> "}"
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ defp uploads_search_spoofs_local_dir(dir) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp uploads_search_spoofs_notes() do
|
defp uploads_search_spoofs_notes() do
|
||||||
IO.puts("Now querying DB for posts with spoofing attachments. This might take a while...")
|
shell_info("Now querying DB for posts with spoofing attachments. This might take a while...")
|
||||||
|
|
||||||
patterns = [local_id_pattern() | activity_ext_url_patterns()]
|
patterns = [local_id_pattern() | activity_ext_url_patterns()]
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ defp uploads_search_spoofs_notes() do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp upload_search_orphaned_attachments(not_orphaned_urls) do
|
defp upload_search_orphaned_attachments(not_orphaned_urls) do
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
Now querying DB for orphaned spoofing attachment (i.e. their post was deleted,
|
Now querying DB for orphaned spoofing attachment (i.e. their post was deleted,
|
||||||
but if :cleanup_attachments was not enabled traces remain in the database)
|
but if :cleanup_attachments was not enabled traces remain in the database)
|
||||||
This might take a bit...
|
This might take a bit...
|
||||||
|
@ -184,7 +184,7 @@ defp upload_search_orphaned_attachments(not_orphaned_urls) do
|
||||||
# | S P O O F - I N S E R T E D |
|
# | S P O O F - I N S E R T E D |
|
||||||
# +-----------------------------+
|
# +-----------------------------+
|
||||||
defp do_spoof_inserted() do
|
defp do_spoof_inserted() do
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
Searching for local posts whose Create activity has no ActivityPub id...
|
Searching for local posts whose Create activity has no ActivityPub id...
|
||||||
This is a pretty good indicator, but only for spoofs of local actors
|
This is a pretty good indicator, but only for spoofs of local actors
|
||||||
and only if the spoofing happened after around late 2021.
|
and only if the spoofing happened after around late 2021.
|
||||||
|
@ -194,9 +194,9 @@ defp do_spoof_inserted() do
|
||||||
search_local_notes_without_create_id()
|
search_local_notes_without_create_id()
|
||||||
|> Enum.sort()
|
|> Enum.sort()
|
||||||
|
|
||||||
IO.puts("Done.\n")
|
shell_info("Done.\n")
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
Now trying to weed out other poorly hidden spoofs.
|
Now trying to weed out other poorly hidden spoofs.
|
||||||
This can't detect all and may have some false positives.
|
This can't detect all and may have some false positives.
|
||||||
""")
|
""")
|
||||||
|
@ -207,9 +207,9 @@ defp do_spoof_inserted() do
|
||||||
search_sus_notes_by_id_patterns()
|
search_sus_notes_by_id_patterns()
|
||||||
|> Enum.filter(fn r -> !(r in likely_spoofed_posts_set) end)
|
|> Enum.filter(fn r -> !(r in likely_spoofed_posts_set) end)
|
||||||
|
|
||||||
IO.puts("Done.\n")
|
shell_info("Done.\n")
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
Finally, searching for spoofed, local user accounts.
|
Finally, searching for spoofed, local user accounts.
|
||||||
(It's impossible to detect spoofed remote users)
|
(It's impossible to detect spoofed remote users)
|
||||||
""")
|
""")
|
||||||
|
@ -220,7 +220,7 @@ defp do_spoof_inserted() do
|
||||||
pretty_print_list_with_title(idless_create, "Likely Spoofed Posts")
|
pretty_print_list_with_title(idless_create, "Likely Spoofed Posts")
|
||||||
pretty_print_list_with_title(spoofed_users, "Spoofed local user accounts")
|
pretty_print_list_with_title(spoofed_users, "Spoofed local user accounts")
|
||||||
|
|
||||||
IO.puts("""
|
shell_info("""
|
||||||
In total found:
|
In total found:
|
||||||
#{length(spoofed_users)} bogus users
|
#{length(spoofed_users)} bogus users
|
||||||
#{length(idless_create)} likely spoofed posts
|
#{length(idless_create)} likely spoofed posts
|
||||||
|
@ -289,27 +289,27 @@ defp search_bogus_local_users() do
|
||||||
defp pretty_print_list_with_title(list, title) do
|
defp pretty_print_list_with_title(list, title) do
|
||||||
title_len = String.length(title)
|
title_len = String.length(title)
|
||||||
title_underline = String.duplicate("=", title_len)
|
title_underline = String.duplicate("=", title_len)
|
||||||
IO.puts(title)
|
shell_info(title)
|
||||||
IO.puts(title_underline)
|
shell_info(title_underline)
|
||||||
pretty_print_list(list)
|
pretty_print_list(list)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp pretty_print_list([]), do: IO.puts("")
|
defp pretty_print_list([]), do: shell_info("")
|
||||||
|
|
||||||
defp pretty_print_list([{a, o} | rest])
|
defp pretty_print_list([{a, o} | rest])
|
||||||
when (is_binary(a) or is_number(a)) and is_binary(o) do
|
when (is_binary(a) or is_number(a)) and is_binary(o) do
|
||||||
IO.puts(" {#{a}, #{o}}")
|
shell_info(" {#{a}, #{o}}")
|
||||||
pretty_print_list(rest)
|
pretty_print_list(rest)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp pretty_print_list([{u, a, o} | rest])
|
defp pretty_print_list([{u, a, o} | rest])
|
||||||
when is_binary(a) and is_binary(u) and is_binary(o) do
|
when is_binary(a) and is_binary(u) and is_binary(o) do
|
||||||
IO.puts(" {#{u}, #{a}, #{o}}")
|
shell_info(" {#{u}, #{a}, #{o}}")
|
||||||
pretty_print_list(rest)
|
pretty_print_list(rest)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp pretty_print_list([e | rest]) when is_binary(e) do
|
defp pretty_print_list([e | rest]) when is_binary(e) do
|
||||||
IO.puts(" #{e}")
|
shell_info(" #{e}")
|
||||||
pretty_print_list(rest)
|
pretty_print_list(rest)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -114,7 +114,7 @@ def run(["reset_password", nickname]) do
|
||||||
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
shell_info("Generated password reset token for #{user.nickname}")
|
shell_info("Generated password reset token for #{user.nickname}")
|
||||||
|
|
||||||
IO.puts("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
|
shell_info("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
shell_error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
|
@ -301,7 +301,7 @@ def run(["invite" | rest]) do
|
||||||
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
|
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
|
||||||
|
|
||||||
url = url(~p[/registration/#{invite.token}])
|
url = url(~p[/registration/#{invite.token}])
|
||||||
IO.puts(url)
|
shell_info(url)
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
shell_error("Could not create invite token: #{inspect(error)}")
|
shell_error("Could not create invite token: #{inspect(error)}")
|
||||||
|
@ -373,7 +373,7 @@ def run(["show", nickname]) do
|
||||||
nickname
|
nickname
|
||||||
|> User.get_cached_by_nickname()
|
|> User.get_cached_by_nickname()
|
||||||
|
|
||||||
shell_info("#{inspect(user)}")
|
shell_info(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["send_confirmation", nickname]) do
|
def run(["send_confirmation", nickname]) do
|
||||||
|
@ -457,7 +457,7 @@ def run(["blocking", nickname]) do
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
blocks = User.following_ap_ids(user)
|
blocks = User.following_ap_ids(user)
|
||||||
IO.puts("#{inspect(blocks)}")
|
shell_info(blocks)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -516,12 +516,12 @@ def run(["fix_follow_state", local_user, remote_user]) do
|
||||||
{:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
|
{:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
|
||||||
calculated_state = User.following?(local, remote)
|
calculated_state = User.following?(local, remote)
|
||||||
|
|
||||||
IO.puts(
|
shell_info(
|
||||||
"Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
|
"Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if calculated_state == false && request_state == "accept" do
|
if calculated_state == false && request_state == "accept" do
|
||||||
IO.puts("Discrepancy found, fixing")
|
shell_info("Discrepancy found, fixing")
|
||||||
Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
|
Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
|
||||||
shell_info("Relationship fixed")
|
shell_info("Relationship fixed")
|
||||||
else
|
else
|
||||||
|
@ -551,14 +551,14 @@ defp refetch_public_keys(query) do
|
||||||
|> Stream.each(fn users ->
|
|> Stream.each(fn users ->
|
||||||
users
|
users
|
||||||
|> Enum.each(fn user ->
|
|> Enum.each(fn user ->
|
||||||
IO.puts("Re-Resolving: #{user.ap_id}")
|
shell_info("Re-Resolving: #{user.ap_id}")
|
||||||
|
|
||||||
with {:ok, user} <- Pleroma.User.fetch_by_ap_id(user.ap_id),
|
with {:ok, user} <- Pleroma.User.fetch_by_ap_id(user.ap_id),
|
||||||
changeset <- Pleroma.User.update_changeset(user),
|
changeset <- Pleroma.User.update_changeset(user),
|
||||||
{:ok, _user} <- Pleroma.User.update_and_set_cache(changeset) do
|
{:ok, _user} <- Pleroma.User.update_and_set_cache(changeset) do
|
||||||
:ok
|
:ok
|
||||||
else
|
else
|
||||||
error -> IO.puts("Could not resolve: #{user.ap_id}, #{inspect(error)}")
|
error -> shell_info("Could not resolve: #{user.ap_id}, #{inspect(error)}")
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
end)
|
end)
|
||||||
|
|
|
@ -24,7 +24,6 @@ defp reboot_time_keys,
|
||||||
defp reboot_time_subkeys,
|
defp reboot_time_subkeys,
|
||||||
do: [
|
do: [
|
||||||
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
|
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
|
||||||
{:pleroma, Pleroma.Upload, [:proxy_remote]},
|
|
||||||
{:pleroma, :instance, [:upload_limit]},
|
{:pleroma, :instance, [:upload_limit]},
|
||||||
{:pleroma, :http, [:pool_size]},
|
{:pleroma, :http, [:pool_size]},
|
||||||
{:pleroma, :http, [:proxy_url]}
|
{:pleroma, :http, [:proxy_url]}
|
||||||
|
|
|
@ -233,7 +233,7 @@ def config_descriptions(policies) do
|
||||||
if function_exported?(policy, :config_description, 0) do
|
if function_exported?(policy, :config_description, 0) do
|
||||||
description =
|
description =
|
||||||
@default_description
|
@default_description
|
||||||
|> Map.merge(policy.config_description)
|
|> Map.merge(policy.config_description())
|
||||||
|> Map.put(:group, :pleroma)
|
|> Map.put(:group, :pleroma)
|
||||||
|> Map.put(:tab, :mrf)
|
|> Map.put(:tab, :mrf)
|
||||||
|> Map.put(:type, :group)
|
|> Map.put(:type, :group)
|
||||||
|
|
|
@ -18,6 +18,8 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
@timeout :timer.seconds(60)
|
@timeout :timer.seconds(60)
|
||||||
# Hibernate every X messages
|
# Hibernate every X messages
|
||||||
@hibernate_every 100
|
@hibernate_every 100
|
||||||
|
# Tune garabge collect for long-lived websocket process
|
||||||
|
@fullsweep_after 20
|
||||||
|
|
||||||
def init(%{qs: qs} = req, state) do
|
def init(%{qs: qs} = req, state) do
|
||||||
with params <- Enum.into(:cow_qs.parse_qs(qs), %{}),
|
with params <- Enum.into(:cow_qs.parse_qs(qs), %{}),
|
||||||
|
@ -59,6 +61,10 @@ def websocket_init(state) do
|
||||||
"#{__MODULE__} accepted websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic}"
|
"#{__MODULE__} accepted websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# process is long-lived and can sometimes accumulate stale data in such a way it's
|
||||||
|
# not freed by young garbage cycles, thus make full collection sweeps more frequent
|
||||||
|
:erlang.process_flag(:fullsweep_after, @fullsweep_after)
|
||||||
|
|
||||||
Streamer.add_socket(state.topic, state.oauth_token)
|
Streamer.add_socket(state.topic, state.oauth_token)
|
||||||
{:ok, %{state | timer: timer()}}
|
{:ok, %{state | timer: timer()}}
|
||||||
end
|
end
|
||||||
|
|
|
@ -44,6 +44,26 @@ def route_aliases(%{path_info: ["objects", id], query_string: query_string}) do
|
||||||
|
|
||||||
def route_aliases(_), do: []
|
def route_aliases(_), do: []
|
||||||
|
|
||||||
|
def maybe_put_created_psudoheader(conn) do
|
||||||
|
case HTTPSignatures.signature_for_conn(conn) do
|
||||||
|
%{"created" => created} ->
|
||||||
|
put_req_header(conn, "(created)", created)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
conn
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_put_expires_psudoheader(conn) do
|
||||||
|
case HTTPSignatures.signature_for_conn(conn) do
|
||||||
|
%{"expires" => expires} ->
|
||||||
|
put_req_header(conn, "(expires)", expires)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
conn
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp assign_valid_signature_on_route_aliases(conn, []), do: conn
|
defp assign_valid_signature_on_route_aliases(conn, []), do: conn
|
||||||
|
|
||||||
defp assign_valid_signature_on_route_aliases(%{assigns: %{valid_signature: true}} = conn, _),
|
defp assign_valid_signature_on_route_aliases(%{assigns: %{valid_signature: true}} = conn, _),
|
||||||
|
@ -55,6 +75,8 @@ defp assign_valid_signature_on_route_aliases(conn, [path | rest]) do
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> put_req_header("(request-target)", request_target)
|
|> put_req_header("(request-target)", request_target)
|
||||||
|
|> maybe_put_created_psudoheader()
|
||||||
|
|> maybe_put_expires_psudoheader()
|
||||||
|> case do
|
|> case do
|
||||||
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
|
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
|
||||||
conn -> conn
|
conn -> conn
|
||||||
|
|
|
@ -9,13 +9,13 @@
|
||||||
xmlns:ostatus="http://ostatus.org/schema/1.0"
|
xmlns:ostatus="http://ostatus.org/schema/1.0"
|
||||||
xmlns:statusnet="http://status.net/schema/api/1/">
|
xmlns:statusnet="http://status.net/schema/api/1/">
|
||||||
|
|
||||||
<id><%= '#{url(~p"/tags/#{@tag}")}.rss' %></id>
|
<id><%= "#{url(~p"/tags/#{@tag}")}.rss" %></id>
|
||||||
<title>#<%= @tag %></title>
|
<title>#<%= @tag %></title>
|
||||||
|
|
||||||
<subtitle><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></subtitle>
|
<subtitle><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></subtitle>
|
||||||
<logo><%= feed_logo() %></logo>
|
<logo><%= feed_logo() %></logo>
|
||||||
<updated><%= most_recent_update(@activities) %></updated>
|
<updated><%= most_recent_update(@activities) %></updated>
|
||||||
<link rel="self" href="<%= '#{url(~p"/tags/#{@tag}")}.atom' %>" type="application/atom+xml"/>
|
<link rel="self" href="<%= "#{url(~p"/tags/#{@tag}")}.atom" %>" type="application/atom+xml"/>
|
||||||
<%= for activity <- @activities do %>
|
<%= for activity <- @activities do %>
|
||||||
<%= render @view_module, "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
|
<%= render @view_module, "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
<title>#<%= @tag %></title>
|
<title>#<%= @tag %></title>
|
||||||
<description><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></description>
|
<description><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></description>
|
||||||
<link><%= '#{url(~p"/tags/#{@tag}")}.rss' %></link>
|
<link><%= "#{url(~p"/tags/#{@tag}")}.rss" %></link>
|
||||||
<webfeeds:logo><%= feed_logo() %></webfeeds:logo>
|
<webfeeds:logo><%= feed_logo() %></webfeeds:logo>
|
||||||
<webfeeds:accentColor>2b90d9</webfeeds:accentColor>
|
<webfeeds:accentColor>2b90d9</webfeeds:accentColor>
|
||||||
<%= for activity <- @activities do %>
|
<%= for activity <- @activities do %>
|
||||||
|
|
|
@ -10,12 +10,12 @@
|
||||||
<title><%= @user.nickname <> "'s timeline" %></title>
|
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||||
<logo><%= logo(@user) %></logo>
|
<logo><%= logo(@user) %></logo>
|
||||||
<link rel="self" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom' %>" type="application/atom+xml"/>
|
<link rel="self" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom" %>" type="application/atom+xml"/>
|
||||||
|
|
||||||
<%= render @view_module, "_author.atom", assigns %>
|
<%= render @view_module, "_author.atom", assigns %>
|
||||||
|
|
||||||
<%= if last_activity(@activities) do %>
|
<%= if last_activity(@activities) do %>
|
||||||
<link rel="next" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}' %>" type="application/atom+xml"/>
|
<link rel="next" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}" %>" type="application/atom+xml"/>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
|
||||||
<%= for activity <- @activities do %>
|
<%= for activity <- @activities do %>
|
||||||
|
|
|
@ -5,12 +5,12 @@
|
||||||
<title><%= @user.nickname <> "'s timeline" %></title>
|
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||||

|

|
||||||
<link><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss' %></link>
|
<link><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss" %></link>
|
||||||
|
|
||||||
<%= render @view_module, "_author.rss", assigns %>
|
<%= render @view_module, "_author.rss", assigns %>
|
||||||
|
|
||||||
<%= if last_activity(@activities) do %>
|
<%= if last_activity(@activities) do %>
|
||||||
<link rel="next"><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}' %></link>
|
<link rel="next"><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}" %></link>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
|
||||||
<%= for activity <- @activities do %>
|
<%= for activity <- @activities do %>
|
||||||
|
|
4
mix.exs
4
mix.exs
|
@ -160,7 +160,9 @@ defp deps do
|
||||||
{:timex, "~> 3.7"},
|
{:timex, "~> 3.7"},
|
||||||
{:ueberauth, "== 0.10.5"},
|
{:ueberauth, "== 0.10.5"},
|
||||||
{:linkify, "~> 0.5.3"},
|
{:linkify, "~> 0.5.3"},
|
||||||
{:http_signatures, "~> 0.1.2"},
|
{:http_signatures,
|
||||||
|
git: "https://akkoma.dev/AkkomaGang/http_signatures.git",
|
||||||
|
ref: "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae"},
|
||||||
{:telemetry, "~> 1.2"},
|
{:telemetry, "~> 1.2"},
|
||||||
{:telemetry_poller, "~> 1.0"},
|
{:telemetry_poller, "~> 1.0"},
|
||||||
{:telemetry_metrics, "~> 0.6"},
|
{:telemetry_metrics, "~> 0.6"},
|
||||||
|
|
2
mix.lock
2
mix.lock
|
@ -57,7 +57,7 @@
|
||||||
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
||||||
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
|
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
|
||||||
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
|
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
|
||||||
"http_signatures": {:hex, :http_signatures, "0.1.2", "ed1cc7043abcf5bb4f30d68fb7bad9d618ec1a45c4ff6c023664e78b67d9c406", [:mix], [], "hexpm", "f08aa9ac121829dae109d608d83c84b940ef2f183ae50f2dd1e9a8bc619d8be7"},
|
"http_signatures": {:git, "https://akkoma.dev/AkkomaGang/http_signatures.git", "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae", [ref: "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae"]},
|
||||||
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
||||||
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||||
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
|
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
|
||||||
|
|
|
@ -5,8 +5,8 @@ msgstr ""
|
||||||
"POT-Creation-Date: 2022-07-28 09:35+0000\n"
|
"POT-Creation-Date: 2022-07-28 09:35+0000\n"
|
||||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||||
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/"
|
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||||
"akkoma-backend-config-descriptions/ca/>\n"
|
"backend-config-descriptions/ca/>\n"
|
||||||
"Language: ca\n"
|
"Language: ca\n"
|
||||||
"MIME-Version: 1.0\n"
|
"MIME-Version: 1.0\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
@ -3296,18 +3296,6 @@ msgstr ""
|
||||||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -5798,12 +5786,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr "Link name"
|
msgstr "Link name"
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr "Proxy remote"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
|
@ -2602,12 +2602,6 @@ msgctxt "config description at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, elixir-autogen, elixir-format
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy requests to the remote uploader.\n\nUseful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, elixir-autogen, elixir-format
|
#, elixir-autogen, elixir-format
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -4888,12 +4882,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, elixir-autogen, elixir-format
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, elixir-autogen, elixir-format
|
#, elixir-autogen, elixir-format
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
|
@ -2603,12 +2603,6 @@ msgctxt "config description at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, elixir-autogen, elixir-format
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy requests to the remote uploader.\n\nUseful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, elixir-autogen, elixir-format
|
#, elixir-autogen, elixir-format
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -4889,12 +4883,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, elixir-autogen, elixir-format
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, elixir-autogen, elixir-format
|
#, elixir-autogen, elixir-format
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
||||||
"POT-Creation-Date: 2022-08-06 22:23+0000\n"
|
"POT-Creation-Date: 2022-08-06 22:23+0000\n"
|
||||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||||
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/"
|
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||||
"akkoma-backend-config-descriptions/es/>\n"
|
"backend-config-descriptions/es/>\n"
|
||||||
"Language: es\n"
|
"Language: es\n"
|
||||||
"MIME-Version: 1.0\n"
|
"MIME-Version: 1.0\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
@ -3314,18 +3314,6 @@ msgstr ""
|
||||||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -5816,12 +5804,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr "Link name"
|
msgstr "Link name"
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr "Proxy remote"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
||||||
"POT-Creation-Date: 2022-08-06 21:54+0000\n"
|
"POT-Creation-Date: 2022-08-06 21:54+0000\n"
|
||||||
"PO-Revision-Date: 2023-08-04 14:26+0000\n"
|
"PO-Revision-Date: 2023-08-04 14:26+0000\n"
|
||||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||||
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/"
|
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||||
"akkoma-backend-config-descriptions/nl/>\n"
|
"backend-config-descriptions/nl/>\n"
|
||||||
"Language: nl\n"
|
"Language: nl\n"
|
||||||
"MIME-Version: 1.0\n"
|
"MIME-Version: 1.0\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
@ -3316,18 +3316,6 @@ msgstr ""
|
||||||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -5818,12 +5806,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr "Link name"
|
msgstr "Link name"
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr "Proxy remote"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
||||||
"POT-Creation-Date: 2023-07-07 18:47+0000\n"
|
"POT-Creation-Date: 2023-07-07 18:47+0000\n"
|
||||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||||
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/"
|
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||||
"akkoma-backend-config-descriptions/th/>\n"
|
"backend-config-descriptions/th/>\n"
|
||||||
"Language: th\n"
|
"Language: th\n"
|
||||||
"MIME-Version: 1.0\n"
|
"MIME-Version: 1.0\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
@ -3325,18 +3325,6 @@ msgstr ""
|
||||||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
msgstr ""
|
|
||||||
"Proxy requests to the remote uploader.\n"
|
|
||||||
"\n"
|
|
||||||
"Useful if media upload endpoint is not internet accessible.\n"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
@ -5827,12 +5815,6 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
||||||
msgid "Link name"
|
msgid "Link name"
|
||||||
msgstr "Link name"
|
msgstr "Link name"
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
|
||||||
#, fuzzy
|
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
|
||||||
msgid "Proxy remote"
|
|
||||||
msgstr "Proxy remote"
|
|
||||||
|
|
||||||
#: lib/pleroma/docs/translator.ex:5
|
#: lib/pleroma/docs/translator.ex:5
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,12 +1,10 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddContextIndex do
|
defmodule Pleroma.Repo.Migrations.AddContextIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(
|
||||||
index(:activities, ["(data->>'type')", "(data->>'context')"],
|
index(:activities, ["(data->>'type')", "(data->>'context')"],
|
||||||
name: :activities_context_index,
|
name: :activities_context_index
|
||||||
concurrently: true
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddFTSIndexToActivities do
|
defmodule Pleroma.Repo.Migrations.AddFTSIndexToActivities do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(
|
||||||
index(:activities, ["(to_tsvector('english', data->'object'->>'content'))"],
|
index(:activities, ["(to_tsvector('english', data->'object'->>'content'))"],
|
||||||
concurrently: true,
|
|
||||||
using: :gin,
|
using: :gin,
|
||||||
name: :activities_fts
|
name: :activities_fts
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddTagIndex do
|
defmodule Pleroma.Repo.Migrations.AddTagIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(
|
||||||
index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
|
index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
|
||||||
concurrently: true,
|
|
||||||
using: :gin,
|
using: :gin,
|
||||||
name: :activities_tags
|
name: :activities_tags
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddSecondObjectIndexToActivty do
|
defmodule Pleroma.Repo.Migrations.AddSecondObjectIndexToActivty do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
drop_if_exists(
|
drop_if_exists(
|
||||||
index(:activities, ["(data->'object'->>'id')", "(data->>'type')"],
|
index(:activities, ["(data->'object'->>'id')", "(data->>'type')"],
|
||||||
|
@ -12,8 +10,7 @@ def change do
|
||||||
|
|
||||||
create(
|
create(
|
||||||
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
|
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
|
||||||
name: :activities_create_objects_index,
|
name: :activities_create_objects_index
|
||||||
concurrently: true
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,14 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddObjectActorIndex do
|
defmodule Pleroma.Repo.Migrations.AddObjectActorIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(index(:objects, ["(data->>'actor')", "(data->>'type')"], name: :objects_actor_type))
|
||||||
index(:objects, ["(data->>'actor')", "(data->>'type')"],
|
|
||||||
concurrently: true,
|
|
||||||
name: :objects_actor_type
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddActorToActivity do
|
defmodule Pleroma.Repo.Migrations.AddActorToActivity do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def up do
|
def up do
|
||||||
alter table(:activities) do
|
alter table(:activities) do
|
||||||
add(:actor, :string)
|
add(:actor, :string)
|
||||||
end
|
end
|
||||||
|
|
||||||
create(index(:activities, [:actor, "id DESC NULLS LAST"], concurrently: true))
|
create(index(:activities, [:actor, "id DESC NULLS LAST"]))
|
||||||
end
|
end
|
||||||
|
|
||||||
def down do
|
def down do
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddSortIndexToActivities do
|
defmodule Pleroma.Repo.Migrations.AddSortIndexToActivities do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:activities, ["id desc nulls last"], concurrently: true))
|
create(index(:activities, ["id desc nulls last"]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddFollowerAddressIndexToUsers do
|
defmodule Pleroma.Repo.Migrations.AddFollowerAddressIndexToUsers do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:users, [:follower_address], concurrently: true))
|
create(index(:users, [:follower_address]))
|
||||||
create(index(:users, [:following], concurrently: true, using: :gin))
|
create(index(:users, [:following], using: :gin))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
defmodule Pleroma.Repo.Migrations.ModifyActivityIndex do
|
defmodule Pleroma.Repo.Migrations.ModifyActivityIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:activities, ["id desc nulls last", "local"], concurrently: true))
|
create(index(:activities, ["id desc nulls last", "local"]))
|
||||||
drop_if_exists(index(:activities, ["id desc nulls last"]))
|
drop_if_exists(index(:activities, ["id desc nulls last"]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,13 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.CreateApidHostExtractionIndex do
|
defmodule Pleroma.Repo.Migrations.CreateApidHostExtractionIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(index(:activities, ["(split_part(actor, '/', 3))"], name: :activities_hosts))
|
||||||
index(:activities, ["(split_part(actor, '/', 3))"],
|
|
||||||
concurrently: true,
|
|
||||||
name: :activities_hosts
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,13 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.CreateActivitiesInReplyToIndex do
|
defmodule Pleroma.Repo.Migrations.CreateActivitiesInReplyToIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(index(:activities, ["(data->'object'->>'inReplyTo')"], name: :activities_in_reply_to))
|
||||||
index(:activities, ["(data->'object'->>'inReplyTo')"],
|
|
||||||
concurrently: true,
|
|
||||||
name: :activities_in_reply_to
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddVisibilityFunction do
|
defmodule Pleroma.Repo.Migrations.AddVisibilityFunction do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def up do
|
def up do
|
||||||
definition = """
|
definition = """
|
||||||
|
@ -30,8 +29,7 @@ def up do
|
||||||
|
|
||||||
create(
|
create(
|
||||||
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
||||||
name: :activities_visibility_index,
|
name: :activities_visibility_index
|
||||||
concurrently: true
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddActivitiesLikesIndex do
|
defmodule Pleroma.Repo.Migrations.AddActivitiesLikesIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(
|
||||||
index(:activities, ["((data #> '{\"object\",\"likes\"}'))"],
|
index(:activities, ["((data #> '{\"object\",\"likes\"}'))"],
|
||||||
concurrently: true,
|
|
||||||
name: :activities_likes,
|
name: :activities_likes,
|
||||||
using: :gin
|
using: :gin
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddCorrectDMIndex do
|
defmodule Pleroma.Repo.Migrations.AddCorrectDMIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
@disable_ddl_transaction true
|
|
||||||
|
|
||||||
def up do
|
def up do
|
||||||
drop_if_exists(
|
drop_if_exists(
|
||||||
|
@ -12,7 +11,6 @@ def up do
|
||||||
create(
|
create(
|
||||||
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC NULLS LAST"],
|
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC NULLS LAST"],
|
||||||
name: :activities_visibility_index,
|
name: :activities_visibility_index,
|
||||||
concurrently: true,
|
|
||||||
where: "data->>'type' = 'Create'"
|
where: "data->>'type' = 'Create'"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -22,7 +20,6 @@ def down do
|
||||||
drop_if_exists(
|
drop_if_exists(
|
||||||
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC"],
|
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC"],
|
||||||
name: :activities_visibility_index,
|
name: :activities_visibility_index,
|
||||||
concurrently: true,
|
|
||||||
where: "data->>'type' = 'Create'"
|
where: "data->>'type' = 'Create'"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddIndexOnSubscribers do
|
defmodule Pleroma.Repo.Migrations.AddIndexOnSubscribers do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
def change do
|
def change do
|
||||||
create(
|
create(
|
||||||
index(:users, ["(info->'subscribers')"],
|
index(:users, ["(info->'subscribers')"],
|
||||||
name: :users_subscribers_index,
|
name: :users_subscribers_index,
|
||||||
using: :gin,
|
using: :gin
|
||||||
concurrently: true
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddFollowingAddressIndexToUser do
|
defmodule Pleroma.Repo.Migrations.AddFollowingAddressIndexToUser do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
@disable_ddl_transaction true
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:users, [:following_address], concurrently: true))
|
create(index(:users, [:following_address]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -100,7 +100,7 @@ def update_follower_count(%{id: user_id} = user) do
|
||||||
|
|
||||||
"users"
|
"users"
|
||||||
|> where(id: ^user_id)
|
|> where(id: ^user_id)
|
||||||
|> join(:inner, [u], s in subquery(follower_count_query))
|
|> join(:inner, [u], s in subquery(follower_count_query), on: true)
|
||||||
|> update([u, s],
|
|> update([u, s],
|
||||||
set: [follower_count: s.count]
|
set: [follower_count: s.count]
|
||||||
)
|
)
|
||||||
|
|
|
@ -471,7 +471,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities" do
|
||||||
assert length(activities) == 4
|
assert length(activities) == 4
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it prunes orphaned activities with the --prune-orphaned-activities when the objects are referenced from an array" do
|
test "it prunes orphaned activities with prune_orphaned_activities when the objects are referenced from an array" do
|
||||||
%Object{} |> Map.merge(%{data: %{"id" => "existing_object"}}) |> Repo.insert()
|
%Object{} |> Map.merge(%{data: %{"id" => "existing_object"}}) |> Repo.insert()
|
||||||
%User{} |> Map.merge(%{ap_id: "existing_actor"}) |> Repo.insert()
|
%User{} |> Map.merge(%{ap_id: "existing_actor"}) |> Repo.insert()
|
||||||
|
|
||||||
|
@ -479,6 +479,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities when th
|
||||||
|> Map.merge(%{
|
|> Map.merge(%{
|
||||||
local: false,
|
local: false,
|
||||||
data: %{
|
data: %{
|
||||||
|
"type" => "Flag",
|
||||||
"id" => "remote_activity_existing_object",
|
"id" => "remote_activity_existing_object",
|
||||||
"object" => ["non_ existing_object", "existing_object"]
|
"object" => ["non_ existing_object", "existing_object"]
|
||||||
}
|
}
|
||||||
|
@ -489,6 +490,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities when th
|
||||||
|> Map.merge(%{
|
|> Map.merge(%{
|
||||||
local: false,
|
local: false,
|
||||||
data: %{
|
data: %{
|
||||||
|
"type" => "Flag",
|
||||||
"id" => "remote_activity_existing_actor",
|
"id" => "remote_activity_existing_actor",
|
||||||
"object" => ["non_ existing_object", "existing_actor"]
|
"object" => ["non_ existing_object", "existing_actor"]
|
||||||
}
|
}
|
||||||
|
@ -499,6 +501,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities when th
|
||||||
|> Map.merge(%{
|
|> Map.merge(%{
|
||||||
local: false,
|
local: false,
|
||||||
data: %{
|
data: %{
|
||||||
|
"type" => "Flag",
|
||||||
"id" => "remote_activity_existing_activity",
|
"id" => "remote_activity_existing_activity",
|
||||||
"object" => ["non_ existing_object", "remote_activity_existing_actor"]
|
"object" => ["non_ existing_object", "remote_activity_existing_actor"]
|
||||||
}
|
}
|
||||||
|
@ -509,6 +512,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities when th
|
||||||
|> Map.merge(%{
|
|> Map.merge(%{
|
||||||
local: false,
|
local: false,
|
||||||
data: %{
|
data: %{
|
||||||
|
"type" => "Flag",
|
||||||
"id" => "remote_activity_without_existing_referenced_object",
|
"id" => "remote_activity_without_existing_referenced_object",
|
||||||
"object" => ["owo", "whats_this"]
|
"object" => ["owo", "whats_this"]
|
||||||
}
|
}
|
||||||
|
@ -518,7 +522,7 @@ test "it prunes orphaned activities with the --prune-orphaned-activities when th
|
||||||
assert length(Repo.all(Activity)) == 4
|
assert length(Repo.all(Activity)) == 4
|
||||||
Mix.Tasks.Pleroma.Database.run(["prune_objects"])
|
Mix.Tasks.Pleroma.Database.run(["prune_objects"])
|
||||||
assert length(Repo.all(Activity)) == 4
|
assert length(Repo.all(Activity)) == 4
|
||||||
Mix.Tasks.Pleroma.Database.run(["prune_objects", "--prune-orphaned-activities"])
|
Mix.Tasks.Pleroma.Database.run(["prune_orphaned_activities"])
|
||||||
activities = Repo.all(Activity)
|
activities = Repo.all(Activity)
|
||||||
assert length(activities) == 3
|
assert length(activities) == 3
|
||||||
|
|
||||||
|
|
|
@ -280,12 +280,13 @@ test "no user to set status" do
|
||||||
test "password reset token is generated" do
|
test "password reset token is generated" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
assert capture_io(fn ->
|
Mix.Tasks.Pleroma.User.run(["reset_password", user.nickname])
|
||||||
Mix.Tasks.Pleroma.User.run(["reset_password", user.nickname])
|
|
||||||
end) =~ "URL:"
|
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}
|
assert_receive {:mix_shell, :info, [message]}
|
||||||
assert message =~ "Generated"
|
assert message =~ "Generated"
|
||||||
|
|
||||||
|
assert_receive {:mix_shell, :info, [url]}
|
||||||
|
assert url =~ "URL:"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "no user to reset password" do
|
test "no user to reset password" do
|
||||||
|
@ -327,12 +328,13 @@ test "no user to reset MFA" do
|
||||||
|
|
||||||
describe "running invite" do
|
describe "running invite" do
|
||||||
test "invite token is generated" do
|
test "invite token is generated" do
|
||||||
assert capture_io(fn ->
|
Mix.Tasks.Pleroma.User.run(["invite"])
|
||||||
Mix.Tasks.Pleroma.User.run(["invite"])
|
|
||||||
end) =~ "http"
|
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}
|
assert_receive {:mix_shell, :info, [message]}
|
||||||
assert message =~ "Generated user invite token one time"
|
assert message =~ "Generated user invite token one time"
|
||||||
|
|
||||||
|
assert_receive {:mix_shell, :info, [invite_token]}
|
||||||
|
assert invite_token =~ "http"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "token is generated with expires_at" do
|
test "token is generated with expires_at" do
|
||||||
|
|
|
@ -389,7 +389,6 @@ test "complex keyword with nested mixed childs" do
|
||||||
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
|
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
||||||
%{"tuple" => [":link_name", true]},
|
%{"tuple" => [":link_name", true]},
|
||||||
%{"tuple" => [":proxy_remote", false]},
|
|
||||||
%{"tuple" => [":common_map", %{":key" => "value"}]},
|
%{"tuple" => [":common_map", %{":key" => "value"}]},
|
||||||
%{
|
%{
|
||||||
"tuple" => [
|
"tuple" => [
|
||||||
|
@ -413,7 +412,6 @@ test "complex keyword with nested mixed childs" do
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
filters: [Pleroma.Upload.Filter.Dedupe],
|
filters: [Pleroma.Upload.Filter.Dedupe],
|
||||||
link_name: true,
|
link_name: true,
|
||||||
proxy_remote: false,
|
|
||||||
common_map: %{key: "value"},
|
common_map: %{key: "value"},
|
||||||
proxy_opts: [
|
proxy_opts: [
|
||||||
redirect_on_failure: false,
|
redirect_on_failure: false,
|
||||||
|
|
|
@ -18,7 +18,11 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do
|
||||||
{HTTPSignatures, [],
|
{HTTPSignatures, [],
|
||||||
[
|
[
|
||||||
signature_for_conn: fn _ ->
|
signature_for_conn: fn _ ->
|
||||||
%{"keyId" => "http://mastodon.example.org/users/admin#main-key"}
|
%{
|
||||||
|
"keyId" => "http://mastodon.example.org/users/admin#main-key",
|
||||||
|
"created" => "1234567890",
|
||||||
|
"expires" => "1234567890"
|
||||||
|
}
|
||||||
end,
|
end,
|
||||||
validate_conn: fn conn ->
|
validate_conn: fn conn ->
|
||||||
Map.get(conn.assigns, :valid_signature, true)
|
Map.get(conn.assigns, :valid_signature, true)
|
||||||
|
@ -141,4 +145,18 @@ test "aliases redirected /object endpoints", _ do
|
||||||
assert ["/notice/#{act.id}", "/notice/#{act.id}?actor=someparam"] ==
|
assert ["/notice/#{act.id}", "/notice/#{act.id}?actor=someparam"] ==
|
||||||
HTTPSignaturePlug.route_aliases(conn)
|
HTTPSignaturePlug.route_aliases(conn)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "(created) psudoheader", _ do
|
||||||
|
conn = build_conn(:get, "/doesntmattter")
|
||||||
|
conn = HTTPSignaturePlug.maybe_put_created_psudoheader(conn)
|
||||||
|
created_header = List.keyfind(conn.req_headers, "(created)", 0)
|
||||||
|
assert {_, "1234567890"} = created_header
|
||||||
|
end
|
||||||
|
|
||||||
|
test "(expires) psudoheader", _ do
|
||||||
|
conn = build_conn(:get, "/doesntmattter")
|
||||||
|
conn = HTTPSignaturePlug.maybe_put_expires_psudoheader(conn)
|
||||||
|
expires_header = List.keyfind(conn.req_headers, "(expires)", 0)
|
||||||
|
assert {_, "1234567890"} = expires_header
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in a new issue