diff options
author | Alexander Strizhakov <alex.strizhakov@gmail.com> | 2020-07-16 19:57:27 +0300 |
---|---|---|
committer | Alexander Strizhakov <alex.strizhakov@gmail.com> | 2021-05-11 18:12:33 +0300 |
commit | 2538c741c0d1bf9c2d9c8e02953d3d6e63220e8f (patch) | |
tree | 85139b44fc541482a2d923985bff58f1c2ccbdb7 /lib | |
parent | 745375bdcf2679ff803dd4ebc4a8313a7b5fb157 (diff) | |
download | pleroma-feature/config-versioning.tar.gz |
config versioningfeature/config-versioning
- added DynamicSupervisor, which starts Pleroma deps and restarts config dependent deps
- added versioning for in database config. New version is created from
changes which are passed to config update/delete endpoint. Every version
contains backup with all changes added through update. Versioning
supports rollbacks with N steps. With a rollback, all versions that
come after the version on which the rollback was made are deleted.
Diffstat (limited to 'lib')
28 files changed, 1777 insertions, 1019 deletions
diff --git a/lib/mix/pleroma.ex b/lib/mix/pleroma.ex index 2b6c7d6bb..f7a52d16c 100644 --- a/lib/mix/pleroma.ex +++ b/lib/mix/pleroma.ex @@ -4,7 +4,6 @@ defmodule Mix.Pleroma do @apps [ - :restarter, :ecto, :ecto_sql, :postgrex, @@ -16,11 +15,14 @@ defmodule Mix.Pleroma do :fast_html, :oban ] + @cachex_children ["object", "user", "scrubber", "web_resp"] + @doc "Common functions to be reused in mix tasks" + @spec start_pleroma() :: {:ok, pid()} def start_pleroma do Pleroma.Config.Holder.save_default() - Pleroma.Config.Oban.warn() + Pleroma.Config.DeprecationWarnings.check_oban_config() Pleroma.Application.limiters_setup() Application.put_env(:phoenix, :serve_endpoints, false, persistent: true) @@ -47,37 +49,27 @@ defmodule Mix.Pleroma do plugins: [] ] - children = - [ - Pleroma.Repo, - Pleroma.Emoji, - {Pleroma.Config.TransferTask, false}, - Pleroma.Web.Endpoint, - {Oban, oban_config}, - {Majic.Pool, - [name: Pleroma.MajicPool, pool_size: Pleroma.Config.get([:majic_pool, :size], 2)]} - ] ++ - http_children(adapter) + children = [ + Pleroma.Repo, + Supervisor.child_spec({Task, &Pleroma.Application.Environment.load_from_db_and_update/0}, + id: :update_env + ), + Pleroma.Web.Endpoint, + Pleroma.Emoji, + {Oban, oban_config}, + {Majic.Pool, + [name: Pleroma.MajicPool, pool_size: Pleroma.Config.get([:majic_pool, :size], 2)]} + ] + + children = [Pleroma.Application.StartUpDependencies.adapter_module() | children] - cachex_children = Enum.map(@cachex_children, &Pleroma.Application.build_cachex(&1, [])) + cachex_children = + Enum.map(@cachex_children, &Pleroma.Application.StartUpDependencies.cachex_spec({&1, []})) Supervisor.start_link(children ++ cachex_children, strategy: :one_for_one, name: Pleroma.Supervisor ) - - if Pleroma.Config.get(:env) not in [:test, :benchmark] do - pleroma_rebooted?() - end - end - - defp pleroma_rebooted? do - if Restarter.Pleroma.rebooted?() do - :ok - else - Process.sleep(10) - pleroma_rebooted?() - end end def load_pleroma do @@ -129,11 +121,4 @@ defmodule Mix.Pleroma do def escape_sh_path(path) do ~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(') end - - defp http_children(Tesla.Adapter.Gun) do - Pleroma.Gun.ConnectionPool.children() ++ - [{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}] - end - - defp http_children(_), do: [] end diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 22502a522..41b287163 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -14,10 +14,13 @@ defmodule Mix.Tasks.Pleroma.Config do @shortdoc "Manages the location of the config" @moduledoc File.read!("docs/administration/CLI_tasks/config.md") - def run(["migrate_to_db"]) do + def run(["migrate_to_db" | options]) do check_configdb(fn -> start_pleroma() - migrate_to_db() + + {opts, _} = OptionParser.parse!(options, strict: [config: :string]) + + migrate_to_db(opts) end) end @@ -39,15 +42,13 @@ defmodule Mix.Tasks.Pleroma.Config do check_configdb(fn -> start_pleroma() - header = config_header() - settings = ConfigDB |> Repo.all() |> Enum.sort() unless settings == [] do - shell_info("#{header}") + shell_info("#{Pleroma.Config.Loader.config_header()}") Enum.each(settings, &dump(&1)) else @@ -73,9 +74,10 @@ defmodule Mix.Tasks.Pleroma.Config do check_configdb(fn -> start_pleroma() - group = maybe_atomize(group) - - dump_group(group) + group + |> maybe_atomize() + |> ConfigDB.get_all_by_group() + |> Enum.each(&dump/1) end) end @@ -97,17 +99,11 @@ defmodule Mix.Tasks.Pleroma.Config do end) end - def run(["reset", "--force"]) do + def run(["reset" | opts]) do check_configdb(fn -> start_pleroma() - truncatedb() - shell_info("The ConfigDB settings have been removed from the database.") - end) - end - def run(["reset"]) do - check_configdb(fn -> - start_pleroma() + {opts, []} = OptionParser.parse!(opts, strict: [force: :boolean]) shell_info("The following settings will be permanently removed:") @@ -118,8 +114,8 @@ defmodule Mix.Tasks.Pleroma.Config do shell_error("\nTHIS CANNOT BE UNDONE!") - if shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do - truncatedb() + if opts[:force] or shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do + Pleroma.Config.Versioning.reset() shell_info("The ConfigDB settings have been removed from the database.") else @@ -128,55 +124,65 @@ defmodule Mix.Tasks.Pleroma.Config do end) end - def run(["delete", "--force", group, key]) do - start_pleroma() - - group = maybe_atomize(group) - key = maybe_atomize(key) + def run(["delete", group]), do: delete(group, force: false) + def run(["delete", "--force", group]), do: delete(group, force: true) - with true <- key_exists?(group, key) do - shell_info("The following settings will be removed from ConfigDB:\n") + def run(["delete", group, key]), do: delete(group, key, force: false) + def run(["delete", "--force", group, key]), do: delete(group, key, force: true) - group - |> ConfigDB.get_by_group_and_key(key) - |> dump() + def run(["rollback" | options]) do + check_configdb(fn -> + start_pleroma() + {opts, _} = OptionParser.parse!(options, strict: [steps: :integer], aliases: [s: :steps]) - delete_key(group, key) - else - _ -> - shell_error("No settings in ConfigDB for #{inspect(group)}, #{inspect(key)}. Aborting.") - end + do_rollback(opts) + end) end - def run(["delete", "--force", group]) do + defp delete(group, opts) do start_pleroma() group = maybe_atomize(group) - with true <- group_exists?(group) do + configs = ConfigDB.get_all_by_group(group) + + if configs != [] do shell_info("The following settings will be removed from ConfigDB:\n") - dump_group(group) - delete_group(group) + Enum.each(configs, &dump/1) + + if opts[:force] or shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do + Enum.each(configs, fn config -> + Pleroma.Config.Versioning.new_version(%{ + group: config.group, + key: config.key, + delete: true + }) + end) + else + shell_error("No changes made.") + end else - _ -> shell_error("No settings in ConfigDB for #{inspect(group)}. Aborting.") + shell_error("No settings in ConfigDB for #{inspect(group)}. Aborting.") end end - def run(["delete", group, key]) do + defp delete(group, key, opts) do start_pleroma() group = maybe_atomize(group) key = maybe_atomize(key) - with true <- key_exists?(group, key) do + with %ConfigDB{} = config <- ConfigDB.get_by_group_and_key(group, key) do shell_info("The following settings will be removed from ConfigDB:\n") - group - |> ConfigDB.get_by_group_and_key(key) - |> dump() + dump(config) - if shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do - delete_key(group, key) + if opts[:force] or shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do + Pleroma.Config.Versioning.new_version(%{ + group: config.group, + key: config.key, + delete: true + }) else shell_error("No changes made.") end @@ -186,40 +192,36 @@ defmodule Mix.Tasks.Pleroma.Config do end end - def run(["delete", group]) do - start_pleroma() + defp do_rollback(opts) do + steps = opts[:steps] || 1 - group = maybe_atomize(group) + case Pleroma.Config.Versioning.rollback(steps) do + {:ok, _} -> + shell_info("Success rollback") - with true <- group_exists?(group) do - shell_info("The following settings will be removed from ConfigDB:\n") - dump_group(group) + {:error, :no_current_version} -> + shell_error("No version to rollback") - if shell_prompt("Are you sure you want to continue?", "n") in ~w(Yn Y y) do - delete_group(group) - else - shell_error("No changes made.") - end - else - _ -> shell_error("No settings in ConfigDB for #{inspect(group)}. Aborting.") + {:error, :rollback_not_possible} -> + shell_error("Rollback not possible. Incorrect steps value.") + + {:error, _, _, _} -> + shell_error("Problem with backup. Rollback not possible.") + + error -> + shell_error("error occuried: #{inspect(error)}") end end - @spec migrate_to_db(Path.t() | nil) :: any() - def migrate_to_db(file_path \\ nil) do + defp migrate_to_db(opts) do with :ok <- Pleroma.Config.DeprecationWarnings.warn() do - config_file = - if file_path do - file_path - else - if Pleroma.Config.get(:release) do - Pleroma.Config.get(:config_path) - else - "config/#{Pleroma.Config.get(:env)}.secret.exs" - end - end + config_file = opts[:config] || Pleroma.Application.config_path() - do_migrate_to_db(config_file) + if File.exists?(config_file) do + do_migrate_to_db(config_file) + else + shell_info("To migrate settings, you must define custom settings in #{config_file}.") + end else _ -> shell_error("Migration is not allowed until all deprecation warnings have been resolved.") @@ -227,33 +229,9 @@ defmodule Mix.Tasks.Pleroma.Config do end defp do_migrate_to_db(config_file) do - if File.exists?(config_file) do - shell_info("Migrating settings from file: #{Path.expand(config_file)}") - truncatedb() - - custom_config = - config_file - |> read_file() - |> elem(0) - - custom_config - |> Keyword.keys() - |> Enum.each(&create(&1, custom_config)) - else - shell_info("To migrate settings, you must define custom settings in #{config_file}.") - end - end - - defp create(group, settings) do - group - |> Pleroma.Config.Loader.filter_group(settings) - |> Enum.each(fn {key, value} -> - {:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value}) - - shell_info("Settings for key #{key} migrated.") - end) - - shell_info("Settings for group #{inspect(group)} migrated.") + shell_info("Migrating settings from file: #{Path.expand(config_file)}") + {:ok, _} = Pleroma.Config.Versioning.migrate(config_file) + shell_info("Settings migrated.") end defp migrate_from_db(opts) do @@ -296,48 +274,47 @@ defmodule Mix.Tasks.Pleroma.Config do end defp write_config(file, path, opts) do - IO.write(file, config_header()) - - ConfigDB - |> Repo.all() - |> Enum.each(&write_and_delete(&1, file, opts[:delete])) + IO.write(file, Pleroma.Config.Loader.config_header()) - :ok = File.close(file) - System.cmd("mix", ["format", path]) - end - - if Code.ensure_loaded?(Config.Reader) do - defp config_header, do: "import Config\r\n\r\n" - defp read_file(config_file), do: Config.Reader.read_imports!(config_file) - else - defp config_header, do: "use Mix.Config\r\n\r\n" - defp read_file(config_file), do: Mix.Config.eval!(config_file) - end - - defp write_and_delete(config, file, delete?) do - config - |> write(file) - |> delete(delete?) - end + changes = + ConfigDB + |> Repo.all() + |> Enum.reduce([], fn %{group: group} = config, acc -> + group_str = inspect(group) + value = inspect(config.value, limit: :infinity) + + msg = + if group in ConfigDB.groups_without_keys() do + IO.write(file, "config #{group_str}, #{value}\r\n\r\n") + "config #{group_str} was deleted." + else + key_str = inspect(config.key) + IO.write(file, "config #{group_str}, #{key_str}, #{value}\r\n\r\n") + "config #{group_str}, #{key_str} was deleted." + end - defp write(config, file) do - value = inspect(config.value, limit: :infinity) + if opts[:delete] do + shell_info(msg) - IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n") + change = + config + |> Map.take([:group, :key]) + |> Map.put(:delete, true) - config - end + [change | acc] + else + acc + end + end) - defp delete(config, true) do - {:ok, _} = Repo.delete(config) + if Keyword.get(opts, :delete, false) and changes != [] do + Pleroma.Config.Versioning.new_version(changes) + end - shell_info( - "config #{inspect(config.group)}, #{inspect(config.key)} was deleted from the ConfigDB." - ) + :ok = File.close(file) + System.cmd("mix", ["format", path]) end - defp delete(_config, _), do: :ok - defp dump(%ConfigDB{} = config) do value = inspect(config.value, limit: :infinity) @@ -346,31 +323,12 @@ defmodule Mix.Tasks.Pleroma.Config do defp dump(_), do: :noop - defp dump_group(group) when is_atom(group) do - group - |> ConfigDB.get_all_by_group() - |> Enum.each(&dump/1) - end - - defp group_exists?(group) do - group - |> ConfigDB.get_all_by_group() - |> Enum.any?() - end - - defp key_exists?(group, key) do - group - |> ConfigDB.get_by_group_and_key(key) - |> is_nil - |> Kernel.!() - end - defp maybe_atomize(arg) when is_atom(arg), do: arg defp maybe_atomize(":" <> arg), do: maybe_atomize(arg) defp maybe_atomize(arg) when is_binary(arg) do - if ConfigDB.module_name?(arg) do + if Pleroma.Config.Converter.module_name?(arg) do String.to_existing_atom("Elixir." <> arg) else String.to_atom(arg) @@ -387,23 +345,4 @@ defmodule Mix.Tasks.Pleroma.Config do ) end end - - defp delete_key(group, key) do - check_configdb(fn -> - ConfigDB.delete(%{group: group, key: key}) - end) - end - - defp delete_group(group) do - check_configdb(fn -> - group - |> ConfigDB.get_all_by_group() - |> Enum.each(&ConfigDB.delete/1) - end) - end - - defp truncatedb do - Ecto.Adapters.SQL.query!(Repo, "TRUNCATE config;") - Ecto.Adapters.SQL.query!(Repo, "ALTER SEQUENCE config_id_seq RESTART;") - end end diff --git a/lib/mix/tasks/pleroma/docs.ex b/lib/mix/tasks/pleroma/docs.ex index 45cca1c74..e0e9834f4 100644 --- a/lib/mix/tasks/pleroma/docs.ex +++ b/lib/mix/tasks/pleroma/docs.ex @@ -32,7 +32,7 @@ defmodule Mix.Tasks.Pleroma.Docs do defp do_run(implementation) do start_pleroma() - with descriptions <- Pleroma.Config.Loader.read("config/description.exs"), + with descriptions <- Pleroma.Config.Loader.read!("config/description.exs"), {:ok, file_path} <- Pleroma.Docs.Generator.process( implementation, diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index 06d399b2e..afe605f01 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -5,8 +5,6 @@ defmodule Pleroma.Application do use Application - import Cachex.Spec - alias Pleroma.Config require Logger @@ -15,12 +13,17 @@ defmodule Pleroma.Application do @version Mix.Project.config()[:version] @repository Mix.Project.config()[:source_url] @mix_env Mix.env() + @dynamic_supervisor Pleroma.Application.Supervisor + + @type env() :: :test | :benchmark | :dev | :prod def name, do: @name def version, do: @version def named_version, do: @name <> " " <> @version def repository, do: @repository + def dynamic_supervisor, do: @dynamic_supervisor + @spec user_agent() :: String.t() def user_agent do if Process.whereis(Pleroma.Web.Endpoint) do case Config.get([:http, :user_agent], :default) do @@ -37,9 +40,43 @@ defmodule Pleroma.Application do end end - # See http://elixir-lang.org/docs/stable/elixir/Application.html - # for more information on OTP Applications + @spec config_path() :: Path.t() + def config_path do + if Config.get(:release) do + Config.get(:config_path) + else + Config.get(:config_path_in_test) || "config/#{@mix_env}.secret.exs" + end + end + + @doc """ + Under main supervisor is started DynamicSupervisor, which later starts pleroma startup dependencies. + Pleroma start is splitted into three `phases`: + - running prestart requirements (runtime compilation, warnings, deprecations, monitoring, etc.) + - loading and updating environment (if database config is used and enabled) + - starting dependencies + """ + @impl true def start(_type, _args) do + children = [ + {DynamicSupervisor, strategy: :one_for_one, name: @dynamic_supervisor}, + {Pleroma.Application.ConfigDependentDeps, [dynamic_supervisor: @dynamic_supervisor]}, + Pleroma.Repo + ] + + {:ok, main_supervisor} = + Supervisor.start_link(children, strategy: :one_for_one, name: Pleroma.Supervisor) + + run_prestart_requirements() + + Pleroma.Application.Environment.load_from_db_and_update(pleroma_start: true) + + Pleroma.Application.StartUpDependencies.start_all(@mix_env) + + {:ok, main_supervisor} + end + + defp run_prestart_requirements do # Scrubbers are compiled at runtime and therefore will cause a conflict # every time the application is restarted, so we disable module # conflicts at runtime @@ -47,72 +84,26 @@ defmodule Pleroma.Application do # Disable warnings_as_errors at runtime, it breaks Phoenix live reload # due to protocol consolidation warnings Code.compiler_options(warnings_as_errors: false) + + # compilation in runtime + Pleroma.HTML.compile_scrubbers() + compile_custom_modules() + Pleroma.Docs.JSON.compile() + + # telemetry and prometheus Pleroma.Telemetry.Logger.attach() + setup_instrumenters() + Config.Holder.save_default() - Pleroma.HTML.compile_scrubbers() - Pleroma.Config.Oban.warn() + Config.DeprecationWarnings.warn() Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled() - Pleroma.ApplicationRequirements.verify!() - setup_instrumenters() - load_custom_modules() - Pleroma.Docs.JSON.compile() - limiters_setup() - adapter = Application.get_env(:tesla, :adapter) - - if adapter == Tesla.Adapter.Gun do - if version = Pleroma.OTPVersion.version() do - [major, minor] = - version - |> String.split(".") - |> Enum.map(&String.to_integer/1) - |> Enum.take(2) - - if (major == 22 and minor < 2) or major < 22 do - raise " - !!!OTP VERSION WARNING!!! - You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2. - " - end - else - raise " - !!!OTP VERSION WARNING!!! - To support correct handling of unordered certificates chains - OTP version must be > 22.2. - " - end - end - - # Define workers and child supervisors to be supervised - children = - [ - Pleroma.Repo, - Config.TransferTask, - Pleroma.Emoji, - Pleroma.Web.Plugs.RateLimiter.Supervisor - ] ++ - cachex_children() ++ - http_children(adapter, @mix_env) ++ - [ - Pleroma.Stats, - Pleroma.JobQueueMonitor, - {Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]}, - {Oban, Config.get(Oban)}, - Pleroma.Web.Endpoint - ] ++ - task_children(@mix_env) ++ - dont_run_in_test(@mix_env) ++ - chat_child(chat_enabled?()) ++ - [Pleroma.Gopher.Server] - - # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html - # for other strategies and supported options - opts = [strategy: :one_for_one, name: Pleroma.Supervisor] - result = Supervisor.start_link(children, opts) + limiters_setup() set_postgres_server_version() - result + Pleroma.Application.Requirements.verify!() end defp set_postgres_server_version do @@ -132,7 +123,7 @@ defmodule Pleroma.Application do :persistent_term.put({Pleroma.Repo, :postgres_version}, version) end - def load_custom_modules do + defp compile_custom_modules do dir = Config.get([:modules, :runtime_dir]) if dir && File.exists?(dir) do @@ -177,128 +168,6 @@ defmodule Pleroma.Application do PrometheusPhx.setup() end - defp cachex_children do - [ - build_cachex("used_captcha", ttl_interval: seconds_valid_interval()), - build_cachex("user", default_ttl: 25_000, ttl_interval: 1000, limit: 2500), - build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500), - build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000), - build_cachex("scrubber", limit: 2500), - build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500), - build_cachex("web_resp", limit: 2500), - build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10), - build_cachex("failed_proxy_url", limit: 2500), - build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000), - build_cachex("chat_message_id_idempotency_key", - expiration: chat_message_id_idempotency_key_expiration(), - limit: 500_000 - ) - ] - end - - defp emoji_packs_expiration, - do: expiration(default: :timer.seconds(5 * 60), interval: :timer.seconds(60)) - - defp idempotency_expiration, - do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60)) - - defp chat_message_id_idempotency_key_expiration, - do: expiration(default: :timer.minutes(2), interval: :timer.seconds(60)) - - defp seconds_valid_interval, - do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid])) - - @spec build_cachex(String.t(), keyword()) :: map() - def build_cachex(type, opts), - do: %{ - id: String.to_atom("cachex_" <> type), - start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]}, - type: :worker - } - - defp chat_enabled?, do: Config.get([:chat, :enabled]) - - defp dont_run_in_test(env) when env in [:test, :benchmark], do: [] - - defp dont_run_in_test(_) do - [ - {Registry, - [ - name: Pleroma.Web.Streamer.registry(), - keys: :duplicate, - partitions: System.schedulers_online() - ]} - ] ++ background_migrators() - end - - defp background_migrators do - [ - Pleroma.Migrators.HashtagsTableMigrator - ] - end - - defp chat_child(true) do - [ - Pleroma.Web.ChatChannel.ChatChannelState, - {Phoenix.PubSub, [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2]} - ] - end - - defp chat_child(_), do: [] - - defp task_children(:test) do - [ - %{ - id: :web_push_init, - start: {Task, :start_link, [&Pleroma.Web.Push.init/0]}, - restart: :temporary - } - ] - end - - defp task_children(_) do - [ - %{ - id: :web_push_init, - start: {Task, :start_link, [&Pleroma.Web.Push.init/0]}, - restart: :temporary - }, - %{ - id: :internal_fetch_init, - start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]}, - restart: :temporary - } - ] - end - - # start hackney and gun pools in tests - defp http_children(_, :test) do - http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil) - end - - defp http_children(Tesla.Adapter.Hackney, _) do - pools = [:federation, :media] - - pools = - if Config.get([Pleroma.Upload, :proxy_remote]) do - [:upload | pools] - else - pools - end - - for pool <- pools do - options = Config.get([:hackney_pools, pool]) - :hackney_pool.child_spec(pool, options) - end - end - - defp http_children(Tesla.Adapter.Gun, _) do - Pleroma.Gun.ConnectionPool.children() ++ - [{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}] - end - - defp http_children(_, _), do: [] - @spec limiters_setup() :: :ok def limiters_setup do config = Config.get(ConcurrentLimiter, []) diff --git a/lib/pleroma/application/chat_supervisor.ex b/lib/pleroma/application/chat_supervisor.ex new file mode 100644 index 000000000..4b6f0e740 --- /dev/null +++ b/lib/pleroma/application/chat_supervisor.ex @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.ChatSupervisor do + use Supervisor + + def start_link(_) do + Supervisor.start_link(__MODULE__, :no_args) + end + + def init(_) do + [ + Pleroma.Web.ChatChannel.ChatChannelState, + {Phoenix.PubSub, [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2]} + ] + |> Supervisor.init(strategy: :one_for_one) + end +end diff --git a/lib/pleroma/application/config_dependent_deps.ex b/lib/pleroma/application/config_dependent_deps.ex new file mode 100644 index 000000000..c6b26affd --- /dev/null +++ b/lib/pleroma/application/config_dependent_deps.ex @@ -0,0 +1,244 @@ +# # Pleroma: A lightweight social networking server +# # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# # SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.ConfigDependentDeps do + use GenServer + + require Logger + + @config_path_mods_relation [ + {{:pleroma, :chat}, Pleroma.Application.ChatSupervisor}, + {{:pleroma, Oban}, Oban}, + {{:pleroma, :rate_limit}, Pleroma.Web.Plugs.RateLimiter.Supervisor}, + {{:pleroma, :streamer}, Pleroma.Web.Streamer.registry()}, + {{:pleroma, :pools}, Pleroma.Gun.GunSupervisor}, + {{:pleroma, :connections_pool}, Pleroma.Gun.GunSupervisor}, + {{:pleroma, :hackney_pools}, Pleroma.HTTP.HackneySupervisor}, + {{:pleroma, :gopher}, Pleroma.Gopher.Server}, + {{:pleroma, Pleroma.Captcha, [:seconds_valid]}, Pleroma.Web.Endpoint}, + {{:pleroma, Pleroma.Upload, [:proxy_remote]}, + Pleroma.Application.StartUpDependencies.adapter_module()}, + {{:pleroma, :instance, [:upload_limit]}, Pleroma.Web.Endpoint}, + {{:pleroma, :fed_sockets, [:enabled]}, Pleroma.Web.Endpoint}, + {:eshhd, :eshhd}, + {:ex_aws, :ex_aws} + ] + + def start_link(opts) do + opts = Keyword.put_new(opts, :relations, @config_path_mods_relation) + + GenServer.start_link(__MODULE__, opts, name: opts[:name] || __MODULE__) + end + + @impl true + def init(opts) do + init_state = %{ + dynamic_supervisor: opts[:dynamic_supervisor], + relations: opts[:relations], + reboot_paths: [], + pids: %{} + } + + {:ok, init_state} + end + + def start_dependency(module, server \\ __MODULE__) do + GenServer.call(server, {:start_dependency, module}) + end + + def need_reboot?(server \\ __MODULE__) do + GenServer.call(server, :need_reboot?) + end + + def restart_dependencies(server \\ __MODULE__) do + GenServer.call(server, :restart_dependencies) + end + + def clear_state(server \\ __MODULE__) do + GenServer.call(server, :clear_state) + end + + def save_config_paths_for_restart(changes, server \\ __MODULE__) do + GenServer.call(server, {:save_config_paths, changes}) + end + + @impl true + def handle_call({:start_dependency, module}, _, state) do + {result, state} = + with {pid, state} when is_pid(pid) <- start_module(module, state) do + {{:ok, pid}, state} + else + error -> {error, state} + end + + {:reply, result, state} + end + + @impl true + def handle_call(:need_reboot?, _, state) do + {:reply, state[:reboot_paths] != [], state} + end + + @impl true + def handle_call(:restart_dependencies, _, state) do + {paths, state} = Map.get_and_update(state, :reboot_paths, &{&1, []}) + started_apps = Application.started_applications() + + {result, state} = + Enum.reduce_while(paths, {:ok, state}, fn + path, {:ok, acc} when is_tuple(path) -> + case restart(path, acc, acc[:pids][path], with_terminate: true) do + {pid, state} when is_pid(pid) -> + {:cont, {:ok, state}} + + :ignore -> + Logger.info("path #{inspect(path)} is ignored.") + {:cont, {:ok, acc}} + + error -> + {:halt, {error, acc}} + end + + app, {:ok, acc} + when is_atom(app) and app not in [:logger, :quack, :pleroma, :prometheus, :postgrex] -> + restart_app(app, started_apps) + {:cont, {:ok, acc}} + end) + + {:reply, result, state} + end + + @impl true + def handle_call(:clear_state, _, state) do + state = + state + |> Map.put(:reboot_paths, []) + |> Map.put(:pids, %{}) + + {:reply, :ok, state} + end + + @impl true + def handle_call({:save_config_paths, changes}, _, state) do + paths = + Enum.reduce(changes, state[:reboot_paths], fn + %{group: group, key: key, value: value}, acc -> + with {path, _} <- find_relation(state[:relations], group, key, value) do + if path not in acc do + [path | acc] + else + acc + end + else + _ -> + acc + end + end) + + {:reply, paths, put_in(state[:reboot_paths], paths)} + end + + @impl true + def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do + updated_state = + with {path, ^pid} <- + Enum.find(state[:pids], fn {_, registered_pid} -> registered_pid == pid end) do + {_new_pid, new_state} = restart(path, state, pid) + new_state + else + _ -> state + end + + {:noreply, updated_state} + end + + defp start_module(module, state) do + with {:ok, relations} <- find_relations(state[:relations], module) do + start_module(module, relations, state) + end + end + + defp start_module(module, relations, state) do + spec = + module + |> Pleroma.Application.StartUpDependencies.spec() + |> Supervisor.child_spec(restart: :temporary) + + with {:ok, pid} <- + DynamicSupervisor.start_child( + state[:dynamic_supervisor], + spec + ) do + pids = Map.new(relations, fn {path, _} -> {path, pid} end) + Process.monitor(pid) + {pid, put_in(state[:pids], Map.merge(state[:pids], pids))} + end + end + + defp restart(path, state, pid, opts \\ []) + + defp restart(path, state, nil, _) do + with {_, module} <- find_relation(state[:relations], path) do + start_module(module, state) + end + end + + defp restart(path, state, pid, opts) when is_pid(pid) do + with {_, module} <- find_relation(state[:relations], path), + {:ok, relations} <- find_relations(state[:relations], module) do + if opts[:with_terminate] do + :ok = DynamicSupervisor.terminate_child(state[:dynamic_supervisor], pid) + end + + paths_for_remove = Enum.map(relations, fn {path, _} -> path end) + state = put_in(state[:pids], Map.drop(state[:pids], paths_for_remove)) + + start_module(module, relations, state) + end + end + + defp restart_app(app, started_applications) do + with {^app, _, _} <- List.keyfind(started_applications, app, 0) do + :ok = Application.stop(app) + :ok = Application.start(app) + else + nil -> + Logger.info("#{app} is not started.") + + error -> + error + |> inspect() + |> Logger.error() + end + end + + defp find_relations(relations, module) do + case Enum.filter(relations, fn {_, mod} -> mod == module end) do + [] -> + {:error, :relations_not_found} + + relations -> + {:ok, relations} + end + end + + defp find_relation(relations, group, key, value) do + Enum.find(relations, fn + {g, _} when is_atom(g) -> + g == group + + {{g, k}, _} -> + g == group and k == key + + {{g, k, subkeys}, _} -> + g == group and k == key and Enum.any?(Keyword.keys(value), &(&1 in subkeys)) + end) + end + + def find_relation(relations, path) do + with nil <- Enum.find(relations, fn {key, _} -> key == path end) do + {:error, :relation_not_found} + end + end +end diff --git a/lib/pleroma/application/environment.ex b/lib/pleroma/application/environment.ex new file mode 100644 index 000000000..589be4726 --- /dev/null +++ b/lib/pleroma/application/environment.ex @@ -0,0 +1,103 @@ +# # Pleroma: A lightweight social networking server +# # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# # SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.Environment do + @moduledoc """ + Overwrites environment config with settings from config file or database. + """ + + require Logger + + @doc """ + Method is called on pleroma start. + Config dependent parts don't require restart, because are not started yet. + But started apps need restart. + """ + @spec load_from_db_and_update(keyword()) :: :ok + def load_from_db_and_update(opts \\ []) do + Pleroma.ConfigDB.all() + |> update(opts) + end + + @spec update([Pleroma.ConfigDB.t()], keyword()) :: :ok + def update(changes, opts \\ []) when is_list(changes) do + if Pleroma.Config.get(:configurable_from_database) do + defaults = Pleroma.Config.Holder.default_config() + + changes + |> filter_logger() + |> prepare_logger_changes(defaults) + |> Enum.each(&configure_logger/1) + + changes + |> Pleroma.ConfigDB.merge_changes_with_defaults(defaults) + |> Enum.each(&update_env(&1)) + + cond do + opts[:pleroma_start] -> + # restart only apps on pleroma start + changes + |> Enum.filter(fn %{group: group} -> + group not in [:logger, :quack, :pleroma, :prometheus, :postgrex] + end) + |> Pleroma.Application.ConfigDependentDeps.save_config_paths_for_restart() + + Pleroma.Application.ConfigDependentDeps.restart_dependencies() + + opts[:only_update] -> + Pleroma.Application.ConfigDependentDeps.save_config_paths_for_restart(changes) + + true -> + nil + end + end + + :ok + end + + defp filter_logger(changes) do + Enum.filter(changes, fn %{group: group} -> group in [:logger, :quack] end) + end + + defp prepare_logger_changes(changes, defaults) do + Enum.map(changes, fn %{group: group} = change -> + {change, Pleroma.ConfigDB.merge_change_value_with_default(change, defaults[group])} + end) + end + + defp configure_logger({%{group: :quack}, merged_value}) do + Logger.configure_backend(Quack.Logger, merged_value) + end + + defp configure_logger({%{group: :logger} = change, merged_value}) do + if change.value[:backends] do + Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1) + + Enum.each(merged_value[:backends], &Logger.add_backend/1) + end + + if change.value[:console] do + console = merged_value[:console] + console = put_in(console[:format], console[:format] <> "\n") + + Logger.configure_backend(:console, console) + end + + if change.value[:ex_syslogger] do + Logger.configure_backend({ExSyslogger, :ex_syslogger}, merged_value[:ex_syslogger]) + end + + Logger.configure(merged_value) + end + + defp update_env(%{group: group, key: key, value: nil}), do: Application.delete_env(group, key) + + defp update_env(%{group: group, value: config} = change) do + if group in Pleroma.ConfigDB.groups_without_keys() do + Application.put_all_env([{group, config}]) + else + Application.put_env(group, change.key, config) + end + end +end diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application/requirements.ex index c412dec5e..644769557 100644 --- a/lib/pleroma/application_requirements.ex +++ b/lib/pleroma/application/requirements.ex @@ -2,7 +2,7 @@ # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> # SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.ApplicationRequirements do +defmodule Pleroma.Application.Requirements do @moduledoc """ The module represents the collection of validations to runs before start server. """ @@ -18,6 +18,8 @@ defmodule Pleroma.ApplicationRequirements do @spec verify!() :: :ok | VerifyError.t() def verify! do + adapter = Application.get_env(:tesla, :adapter) + :ok |> check_system_commands!() |> check_confirmation_accounts!() @@ -25,11 +27,12 @@ defmodule Pleroma.ApplicationRequirements do |> check_welcome_message_config!() |> check_rum!() |> check_repo_pool_size!() - |> handle_result() + |> check_otp_version!(adapter) + |> handle_result!() end - defp handle_result(:ok), do: :ok - defp handle_result({:error, message}), do: raise(VerifyError, message: message) + defp handle_result!(:ok), do: :ok + defp handle_result!({:error, message}), do: raise(VerifyError, message: message) defp check_welcome_message_config!(:ok) do if Pleroma.Config.get([:welcome, :email, :enabled], false) and @@ -164,9 +167,9 @@ defmodule Pleroma.ApplicationRequirements do defp check_system_commands!(:ok) do filter_commands_statuses = [ - check_filter(Pleroma.Upload.Filters.Exiftool, "exiftool"), - check_filter(Pleroma.Upload.Filters.Mogrify, "mogrify"), - check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify") + check_filter!(Pleroma.Upload.Filters.Exiftool, "exiftool"), + check_filter!(Pleroma.Upload.Filters.Mogrify, "mogrify"), + check_filter!(Pleroma.Upload.Filters.Mogrifun, "mogrify") ] preview_proxy_commands_status = @@ -217,7 +220,7 @@ defmodule Pleroma.ApplicationRequirements do defp check_repo_pool_size!(result), do: result - defp check_filter(filter, command_required) do + defp check_filter!(filter, command_required) do filters = Config.get([Pleroma.Upload, :filters]) if filter in filters and not Pleroma.Utils.command_available?(command_required) do @@ -231,4 +234,32 @@ defmodule Pleroma.ApplicationRequirements do true end end + + defp check_otp_version!(:ok, Tesla.Adapter.Gun) do + if version = Pleroma.OTPVersion.version() do + [major, minor] = + version + |> String.split(".") + |> Enum.map(&String.to_integer/1) + |> Enum.take(2) + + if (major == 22 and minor < 2) or major < 22 do + Logger.error(" + !!!OTP VERSION ERROR!!! + You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2. + ") + {:error, "OTP version error"} + else + :ok + end + else + Logger.error(" + !!!OTP VERSION ERROR!!! + To support correct handling of unordered certificates chains - OTP version must be > 22.2. + ") + {:error, "OTP version error"} + end + end + + defp check_otp_version!(result, _), do: result end diff --git a/lib/pleroma/application/start_up_dependencies.ex b/lib/pleroma/application/start_up_dependencies.ex new file mode 100644 index 000000000..ce554a0fc --- /dev/null +++ b/lib/pleroma/application/start_up_dependencies.ex @@ -0,0 +1,182 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.StartUpDependencies do + alias Pleroma.Config + alias Pleroma.Web.Endpoint + + require Cachex.Spec + require Logger + + @type config_path() :: {atom(), atom()} | {atom(), atom(), [atom()]} + @type relation() :: {config_path(), module()} + + @spec start_all(Pleroma.Application.env()) :: + :ok | {:error, {:already_started, pid()} | :max_children | term()} + def start_all(env) do + with :ok <- start_common_deps(env), + :ok <- start_config_dependent_deps(env) do + :ok + end + end + + @spec adapter_module() :: module() + def adapter_module do + if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do + Pleroma.Gun.GunSupervisor + else + Pleroma.HTTP.HackneySupervisor + end + end + + @spec spec(module()) :: module() | {module(), keyword()} + def spec(Oban), do: {Oban, Config.get(Oban)} + + def spec(Pleroma.Web.StreamerRegistry) do + {Registry, + [ + name: Pleroma.Web.Streamer.registry(), + keys: :duplicate, + partitions: System.schedulers_online() + ]} + end + + def spec(child), do: child + + @spec cachex_spec({String.t(), keyword()}) :: :supervisor.child_spec() + def cachex_spec({type, opts}) do + %{ + id: String.to_atom("cachex_" <> type), + start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]}, + type: :worker + } + end + + defp start_common_deps(env) do + fun = fn child -> + DynamicSupervisor.start_child(Pleroma.Application.dynamic_supervisor(), spec(child)) + end + + [ + Pleroma.Emoji, + Pleroma.Stats, + Pleroma.JobQueueMonitor, + {Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]}, + %{ + id: :web_push_init, + start: {Task, :start_link, [&Pleroma.Web.Push.init/0]}, + restart: :temporary + } + ] + |> add_cachex_deps() + |> maybe_add_init_internal_fetch_actor_task(env) + |> maybe_add_background_migrator(env) + |> start_while(fun) + end + + defp start_config_dependent_deps(env) do + fun = fn child -> Pleroma.Application.ConfigDependentDeps.start_dependency(child) end + + [ + Pleroma.Web.Plugs.RateLimiter.Supervisor, + Oban, + Endpoint, + Pleroma.Gopher.Server + ] + |> add_http_children(env) + |> maybe_add(:streamer, env) + |> maybe_add_chat_child() + |> start_while(fun) + end + + defp start_while(deps, fun) do + Enum.reduce_while(deps, :ok, fn child, acc -> + case fun.(child) do + {:ok, _} -> + {:cont, acc} + + # consider this behavior is normal + :ignore -> + Logger.info("#{inspect(child)} is ignored.") + {:cont, acc} + + error -> + Logger.error("Child #{inspect(child)} can't be started. #{inspect(error)}") + {:halt, error} + end + end) + end + + @spec cachex_deps() :: [tuple()] + def cachex_deps do + captcha_clean_up_interval = + [Pleroma.Captcha, :seconds_valid] + |> Config.get!() + |> :timer.seconds() + + [ + {"used_captcha", expiration: Cachex.Spec.expiration(interval: captcha_clean_up_interval)}, + {"user", expiration: cachex_expiration(25_000, 1000), limit: 2500}, + {"object", expiration: cachex_expiration(25_000, 1000), limit: 2500}, + {"rich_media", + expiration: Cachex.Spec.expiration(default: :timer.minutes(120)), limit: 5000}, + {"scrubber", limit: 2500}, + {"idempotency", expiration: cachex_expiration(21_600, 60), limit: 2500}, + {"web_resp", limit: 2500}, + {"emoji_packs", expiration: cachex_expiration(300, 60), limit: 10}, + {"failed_proxy_url", limit: 2500}, + {"banned_urls", + expiration: Cachex.Spec.expiration(default: :timer.hours(24 * 30)), limit: 5_000}, + {"chat_message_id_idempotency_key", + expiration: cachex_expiration(:timer.minutes(2), :timer.seconds(60)), limit: 500_000} + ] + end + + defp add_cachex_deps(application_deps) do + cachex_deps() + |> Enum.reduce(application_deps, fn cachex_init_args, acc -> + [cachex_spec(cachex_init_args) | acc] + end) + end + + defp cachex_expiration(default, interval) do + Cachex.Spec.expiration(default: :timer.seconds(default), interval: :timer.seconds(interval)) + end + + defp maybe_add_init_internal_fetch_actor_task(children, :test), do: children + + defp maybe_add_init_internal_fetch_actor_task(children, _) do + [ + %{ + id: :internal_fetch_init, + start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]}, + restart: :temporary + } + | children + ] + end + + defp maybe_add_background_migrator(children, env) when env in [:test, :benchmark], do: children + + defp maybe_add_background_migrator(children, _) do + [Pleroma.Migrators.HashtagsTableMigrator | children] + end + + defp maybe_add(children, _, env) when env in [:test, :benchmark], do: children + defp maybe_add(children, :streamer, _), do: [Pleroma.Web.Streamer.registry() | children] + + defp add_http_children(children, :test) do + [Pleroma.HTTP.HackneySupervisor, Pleroma.Gun.GunSupervisor | children] + end + + defp add_http_children(children, _), do: [adapter_module() | children] + + defp maybe_add_chat_child(children) do + if Config.get([:chat, :enabled]) do + [Pleroma.Application.ChatSupervisor | children] + else + children + end + end +end diff --git a/lib/pleroma/config/converter.ex b/lib/pleroma/config/converter.ex new file mode 100644 index 000000000..86d7ea8e2 --- /dev/null +++ b/lib/pleroma/config/converter.ex @@ -0,0 +1,195 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Config.Converter do + @moduledoc """ + Converts json structures into elixir structures and types and vice versa. + """ + @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term() + def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do + arguments = + Enum.map(args, fn arg -> + if String.contains?(arg, ["{", "}"]) do + {elem, []} = Code.eval_string(arg) + elem + else + to_elixir_types(arg) + end + end) + + {:args, arguments} + end + + def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do + {:proxy_url, {string_to_elixir_types!(type), parse_host(host), port}} + end + + def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do + {partial_chain, []} = + entity + |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") + |> Code.eval_string() + + {:partial_chain, partial_chain} + end + + def to_elixir_types(%{"tuple" => entity}) do + Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1))) + end + + def to_elixir_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end) + end + + def to_elixir_types(entity) when is_list(entity) do + Enum.map(entity, &to_elixir_types/1) + end + + def to_elixir_types(entity) when is_binary(entity) do + entity + |> String.trim() + |> string_to_elixir_types!() + end + + def to_elixir_types(entity), do: entity + + defp parse_host("localhost"), do: :localhost + + defp parse_host(host) do + charlist = to_charlist(host) + + case :inet.parse_address(charlist) do + {:error, :einval} -> + charlist + + {:ok, ip} -> + ip + end + end + + @spec string_to_elixir_types!(String.t()) :: + atom() | Regex.t() | module() | String.t() | no_return() + def string_to_elixir_types!("~r" <> _pattern = regex) do + pattern = + ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u + + delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] + + with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- + Regex.named_captures(pattern, regex), + {:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter), + {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do + result + end + end + + def string_to_elixir_types!(":" <> atom), do: String.to_atom(atom) + + def string_to_elixir_types!(value) do + if module_name?(value) do + String.to_existing_atom("Elixir." <> value) + else + value + end + end + + defp find_valid_delimiter([], _string, _) do + raise(ArgumentError, message: "valid delimiter for Regex expression not found") + end + + defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter) + when is_tuple(delimiter) do + if String.contains?(pattern, closing) do + find_valid_delimiter(others, pattern, regex_delimiter) + else + {:ok, {leading, closing}} + end + end + + defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do + if String.contains?(pattern, delimiter) do + find_valid_delimiter(others, pattern, regex_delimiter) + else + {:ok, {delimiter, delimiter}} + end + end + + @spec module_name?(String.t()) :: boolean() + def module_name?(string) do + Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or + string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"] + end + + @spec to_json_types(term()) :: map() | list() | boolean() | String.t() | integer() + def to_json_types(entity) when is_list(entity) do + Enum.map(entity, &to_json_types/1) + end + + def to_json_types(%Regex{} = entity), do: inspect(entity) + + def to_json_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end) + end + + def to_json_types({:args, args}) when is_list(args) do + arguments = + Enum.map(args, fn + arg when is_tuple(arg) -> inspect(arg) + arg -> to_json_types(arg) + end) + + %{"tuple" => [":args", arguments]} + end + + def to_json_types({:proxy_url, {type, :localhost, port}}) do + %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]} + end + + def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do + ip = + host + |> :inet_parse.ntoa() + |> to_string() + + %{ + "tuple" => [ + ":proxy_url", + %{"tuple" => [to_json_types(type), ip, port]} + ] + } + end + + def to_json_types({:proxy_url, {type, host, port}}) do + %{ + "tuple" => [ + ":proxy_url", + %{"tuple" => [to_json_types(type), to_string(host), port]} + ] + } + end + + def to_json_types({:partial_chain, entity}), + do: %{"tuple" => [":partial_chain", inspect(entity)]} + + def to_json_types(entity) when is_tuple(entity) do + value = + entity + |> Tuple.to_list() + |> to_json_types() + + %{"tuple" => value} + end + + def to_json_types(entity) when is_binary(entity), do: entity + + def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do + entity + end + + def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do + ":#{entity}" + end + + def to_json_types(entity) when is_atom(entity), do: inspect(entity) +end diff --git a/lib/pleroma/config/deprecation_warnings.ex b/lib/pleroma/config/deprecation_warnings.ex index 24aa5993b..19868d174 100644 --- a/lib/pleroma/config/deprecation_warnings.ex +++ b/lib/pleroma/config/deprecation_warnings.ex @@ -41,7 +41,8 @@ defmodule Pleroma.Config.DeprecationWarnings do :ok <- check_gun_pool_options(), :ok <- check_activity_expiration_config(), :ok <- check_remote_ip_plug_name(), - :ok <- check_uploders_s3_public_endpoint() do + :ok <- check_uploders_s3_public_endpoint(), + :ok <- check_oban_config() do :ok else _ -> @@ -79,7 +80,7 @@ defmodule Pleroma.Config.DeprecationWarnings do move_namespace_and_warn(@mrf_config_map, warning_preface) end - @spec move_namespace_and_warn([config_map()], String.t()) :: :ok | nil + @spec move_namespace_and_warn([config_map()], String.t()) :: :ok | :error def move_namespace_and_warn(config_map, warning_preface) do warning = Enum.reduce(config_map, "", fn @@ -102,7 +103,7 @@ defmodule Pleroma.Config.DeprecationWarnings do end end - @spec check_media_proxy_whitelist_config() :: :ok | nil + @spec check_media_proxy_whitelist_config() :: :ok | :error def check_media_proxy_whitelist_config do whitelist = Config.get([:media_proxy, :whitelist]) @@ -163,7 +164,7 @@ defmodule Pleroma.Config.DeprecationWarnings do end end - @spec check_activity_expiration_config() :: :ok | nil + @spec check_activity_expiration_config() :: :ok | :error def check_activity_expiration_config do warning_preface = """ !!!DEPRECATION WARNING!!! @@ -215,4 +216,41 @@ defmodule Pleroma.Config.DeprecationWarnings do :ok end end + + @spec check_oban_config() :: :ok | :error + def check_oban_config do + oban_config = Config.get(Oban) + + {crontab, changed?} = + [ + Pleroma.Workers.Cron.StatsWorker, + Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker, + Pleroma.Workers.Cron.ClearOauthTokenWorker + ] + |> Enum.reduce({oban_config[:crontab], false}, fn removed_worker, {acc, changed?} -> + with acc when is_list(acc) <- acc, + setting when is_tuple(setting) <- + Enum.find(acc, fn {_, worker} -> worker == removed_worker end) do + """ + !!!OBAN CONFIG WARNING!!! + You are using old workers in Oban crontab settings, which were removed. + Please, remove setting from crontab in your config file (prod.secret.exs): #{ + inspect(setting) + } + """ + |> Logger.warn() + + {List.delete(acc, setting), true} + else + _ -> {acc, changed?} + end + end) + + if changed? do + Config.put(Oban, Keyword.put(oban_config, :crontab, crontab)) + :error + else + :ok + end + end end diff --git a/lib/pleroma/config/loader.ex b/lib/pleroma/config/loader.ex index b64d06707..69fd458c0 100644 --- a/lib/pleroma/config/loader.ex +++ b/lib/pleroma/config/loader.ex @@ -3,57 +3,73 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Config.Loader do + @reject_groups [ + :postgrex, + :tesla, + :phoenix, + :tzdata, + :http_signatures, + :web_push_encryption, + :floki, + :pbkdf2_elixir + ] + @reject_keys [ Pleroma.Repo, Pleroma.Web.Endpoint, :env, :configurable_from_database, :database, - :swarm - ] - - @reject_groups [ - :postgrex, - :tesla + :ecto_repos, + Pleroma.Gun, + Pleroma.ReverseProxy.Client, + Pleroma.Web.Auth.Authenticator ] if Code.ensure_loaded?(Config.Reader) do @reader Config.Reader - - def read(path), do: @reader.read!(path) + @config_header "import Config\r\n\r\n" else # support for Elixir less than 1.9 @reader Mix.Config - def read(path) do - path - |> @reader.eval!() - |> elem(0) - end + @config_header "use Mix.Config\r\n\r\n" end - @spec read(Path.t()) :: keyword() + @spec read!(Path.t()) :: keyword() + def read!(path), do: @reader.read!(path) @spec merge(keyword(), keyword()) :: keyword() def merge(c1, c2), do: @reader.merge(c1, c2) + @spec config_header() :: String.t() + def config_header, do: @config_header + @spec default_config() :: keyword() def default_config do - "config/config.exs" - |> read() - |> filter() - end + config = + "config/config.exs" + |> read!() + |> filter() + + logger_config = + :logger + |> Application.get_all_env() + |> Enum.filter(fn {key, _} -> key in [:backends, :console, :ex_syslogger] end) - defp filter(configs) do - configs - |> Keyword.keys() - |> Enum.reduce([], &Keyword.put(&2, &1, filter_group(&1, configs))) + merge(config, logger: logger_config) end - @spec filter_group(atom(), keyword()) :: keyword() - def filter_group(group, configs) do - Enum.reject(configs[group], fn {key, _v} -> - key in @reject_keys or group in @reject_groups or - (group == :phoenix and key == :serve_endpoints) + @spec filter(keyword()) :: keyword() + def filter(configs) do + Enum.reduce(configs, [], fn + {group, _settings}, group_acc when group in @reject_groups -> + group_acc + + {group, settings}, group_acc -> + Enum.reduce(settings, group_acc, fn + {key, _value}, acc when key in @reject_keys -> acc + setting, acc -> Keyword.update(acc, group, [setting], &Keyword.merge(&1, [setting])) + end) end) end end diff --git a/lib/pleroma/config/oban.ex b/lib/pleroma/config/oban.ex deleted file mode 100644 index 3e63bca40..000000000 --- a/lib/pleroma/config/oban.ex +++ /dev/null @@ -1,38 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Config.Oban do - require Logger - - def warn do - oban_config = Pleroma.Config.get(Oban) - - crontab = - [ - Pleroma.Workers.Cron.StatsWorker, - Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker, - Pleroma.Workers.Cron.ClearOauthTokenWorker - ] - |> Enum.reduce(oban_config[:crontab], fn removed_worker, acc -> - with acc when is_list(acc) <- acc, - setting when is_tuple(setting) <- - Enum.find(acc, fn {_, worker} -> worker == removed_worker end) do - """ - !!!OBAN CONFIG WARNING!!! - You are using old workers in Oban crontab settings, which were removed. - Please, remove setting from crontab in your config file (prod.secret.exs): #{ - inspect(setting) - } - """ - |> Logger.warn() - - List.delete(acc, setting) - else - _ -> acc - end - end) - - Pleroma.Config.put(Oban, Keyword.put(oban_config, :crontab, crontab)) - end -end diff --git a/lib/pleroma/config/transfer_task.ex b/lib/pleroma/config/transfer_task.ex deleted file mode 100644 index aad45aab8..000000000 --- a/lib/pleroma/config/transfer_task.ex +++ /dev/null @@ -1,201 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Config.TransferTask do - use Task - - alias Pleroma.Config - alias Pleroma.ConfigDB - alias Pleroma.Repo - - require Logger - - @type env() :: :test | :benchmark | :dev | :prod - - @reboot_time_keys [ - {:pleroma, :hackney_pools}, - {:pleroma, :chat}, - {:pleroma, Oban}, - {:pleroma, :rate_limit}, - {:pleroma, :markup}, - {:pleroma, :streamer}, - {:pleroma, :pools}, - {:pleroma, :connections_pool} - ] - - @reboot_time_subkeys [ - {:pleroma, Pleroma.Captcha, [:seconds_valid]}, - {:pleroma, Pleroma.Upload, [:proxy_remote]}, - {:pleroma, :instance, [:upload_limit]}, - {:pleroma, :gopher, [:enabled]} - ] - - def start_link(restart_pleroma? \\ true) do - load_and_update_env([], restart_pleroma?) - if Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo) - :ignore - end - - @spec load_and_update_env([ConfigDB.t()], boolean()) :: :ok - def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do - with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do - # We need to restart applications for loaded settings take effect - - {logger, other} = - (Repo.all(ConfigDB) ++ deleted_settings) - |> Enum.map(&merge_with_default/1) - |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end) - - logger - |> Enum.sort() - |> Enum.each(&configure/1) - - started_applications = Application.started_applications() - - # TODO: some problem with prometheus after restart! - reject = [nil, :prometheus, :postgrex] - - reject = - if restart_pleroma? do - reject - else - [:pleroma | reject] - end - - other - |> Enum.map(&update/1) - |> Enum.uniq() - |> Enum.reject(&(&1 in reject)) - |> maybe_set_pleroma_last() - |> Enum.each(&restart(started_applications, &1, Config.get(:env))) - - :ok - else - {:configurable, false} -> Restarter.Pleroma.rebooted() - end - end - - defp maybe_set_pleroma_last(apps) do - # to be ensured that pleroma will be restarted last - if :pleroma in apps do - apps - |> List.delete(:pleroma) - |> List.insert_at(-1, :pleroma) - else - Restarter.Pleroma.rebooted() - apps - end - end - - defp merge_with_default(%{group: group, key: key, value: value} = setting) do - default = Config.Holder.default_config(group, key) - - merged = - cond do - Ecto.get_meta(setting, :state) == :deleted -> default - can_be_merged?(default, value) -> ConfigDB.merge_group(group, key, default, value) - true -> value - end - - {group, key, value, merged} - end - - # change logger configuration in runtime, without restart - defp configure({:quack, key, _, merged}) do - Logger.configure_backend(Quack.Logger, [{key, merged}]) - :ok = update_env(:quack, key, merged) - end - - defp configure({_, :backends, _, merged}) do - # removing current backends - Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1) - - Enum.each(merged, &Logger.add_backend/1) - - :ok = update_env(:logger, :backends, merged) - end - - defp configure({_, key, _, merged}) when key in [:console, :ex_syslogger] do - merged = - if key == :console do - put_in(merged[:format], merged[:format] <> "\n") - else - merged - end - - backend = - if key == :ex_syslogger, - do: {ExSyslogger, :ex_syslogger}, - else: key - - Logger.configure_backend(backend, merged) - :ok = update_env(:logger, key, merged) - end - - defp configure({_, key, _, merged}) do - Logger.configure([{key, merged}]) - :ok = update_env(:logger, key, merged) - end - - defp update({group, key, value, merged}) do - try do - :ok = update_env(group, key, merged) - - if group != :pleroma or pleroma_need_restart?(group, key, value), do: group - rescue - error -> - error_msg = - "updating env causes error, group: #{inspect(group)}, key: #{inspect(key)}, value: #{ - inspect(value) - } error: #{inspect(error)}" - - Logger.warn(error_msg) - - nil - end - end - - defp update_env(group, key, nil), do: Application.delete_env(group, key) - defp update_env(group, key, value), do: Application.put_env(group, key, value) - - @spec pleroma_need_restart?(atom(), atom(), any()) :: boolean() - def pleroma_need_restart?(group, key, value) do - group_and_key_need_reboot?(group, key) or group_and_subkey_need_reboot?(group, key, value) - end - - defp group_and_key_need_reboot?(group, key) do - Enum.any?(@reboot_time_keys, fn {g, k} -> g == group and k == key end) - end - - defp group_and_subkey_need_reboot?(group, key, value) do - Keyword.keyword?(value) and - Enum.any?(@reboot_time_subkeys, fn {g, k, subkeys} -> - g == group and k == key and - Enum.any?(Keyword.keys(value), &(&1 in subkeys)) - end) - end - - defp restart(_, :pleroma, env), do: Restarter.Pleroma.restart_after_boot(env) - - defp restart(started_applications, app, _) do - with {^app, _, _} <- List.keyfind(started_applications, app, 0), - :ok <- Application.stop(app) do - :ok = Application.start(app) - else - nil -> - Logger.warn("#{app} is not started.") - - error -> - error - |> inspect() - |> Logger.warn() - end - end - - defp can_be_merged?(val1, val2) when is_list(val1) and is_list(val2) do - Keyword.keyword?(val1) and Keyword.keyword?(val2) - end - - defp can_be_merged?(_val1, _val2), do: false -end diff --git a/lib/pleroma/config/version.ex b/lib/pleroma/config/version.ex new file mode 100644 index 000000000..2f66cc039 --- /dev/null +++ b/lib/pleroma/config/version.ex @@ -0,0 +1,25 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Config.Version do + @moduledoc """ + IMPORTANT!!! + Before modifying records in the database directly, please read "Config versioning" in `docs/development/config_versioning.md`. + """ + + use Ecto.Schema + + import Ecto.Query, only: [from: 2] + + schema "config_versions" do + field(:backup, Pleroma.EctoType.Config.BinaryValue) + field(:current, :boolean, default: true) + + timestamps() + end + + def all do + from(v in __MODULE__, order_by: [desc: v.id]) |> Pleroma.Repo.all() + end +end diff --git a/lib/pleroma/config/versioning.ex b/lib/pleroma/config/versioning.ex new file mode 100644 index 000000000..b997da1db --- /dev/null +++ b/lib/pleroma/config/versioning.ex @@ -0,0 +1,292 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Config.Versioning do + @moduledoc """ + Module that manages versions of database configs. + """ + + import Ecto.Query, only: [from: 2] + + alias Ecto.Multi + alias Pleroma.Config.Version + alias Pleroma.ConfigDB + alias Pleroma.Repo + + @type change :: %{ + optional(:delete) => boolean(), + optional(:value) => any(), + group: atom(), + key: atom() | nil + } + + @doc """ + Creates new config version: + - convert changes to elixir types + - splits changes by type and processes them in `config` table + - sets all pointers to false + - gets all rows from `config` table and inserts them as keyword in `backup` field + """ + @spec new_version([change()] | change()) :: + {:ok, map()} | {:error, :no_changes} | {:error, atom() | tuple(), any(), any()} + def new_version([]), do: {:error, :empty_changes} + def new_version(change) when is_map(change), do: new_version([change]) + + def new_version(changes) when is_list(changes) do + changes + |> Enum.reduce(Multi.new(), fn + %{delete: true} = deletion, acc -> + Multi.run(acc, {:delete_or_update, deletion[:group], deletion[:key]}, fn _, _ -> + ConfigDB.delete_or_update(deletion) + end) + + operation, acc -> + {name, fun} = + if Keyword.keyword?(operation[:value]) or + (operation[:group] == :pleroma and + operation[:key] in ConfigDB.pleroma_not_keyword_values()) do + {:insert_or_update, + fn _, _ -> + ConfigDB.update_or_create(operation) + end} + else + {:error, + fn _, _ -> + {:error, {:value_must_be_keyword, operation}} + end} + end + + Multi.run(acc, {name, operation[:group], operation[:key]}, fun) + end) + |> set_current_flag_false_for_all_versions() + |> insert_new_version() + |> Repo.transaction() + end + + def new_version(_), do: {:error, :bad_format} + + defp set_current_flag_false_for_all_versions(multi) do + Multi.update_all(multi, :update_all_versions, Version, set: [current: false]) + end + + defp insert_new_version(multi) do + Multi.run(multi, :insert_version, fn repo, _ -> + %Version{ + backup: ConfigDB.all_as_keyword() + } + |> repo.insert() + end) + end + + @doc """ + Rollbacks config version by N steps: + - checks possibility for rollback + - truncates config table and restarts pk + - inserts config settings from backup + - sets all pointers to false + - sets current pointer to true for rollback version + - deletes versions after current + """ + @spec rollback(pos_integer()) :: + {:ok, map()} + | {:error, atom() | tuple(), any(), any()} + | {:error, :steps_format} + | {:error, :no_current_version} + | {:error, :rollback_not_possible} + def rollback(steps \\ 1) + + def rollback(steps) when is_integer(steps) and steps > 0 do + with version_id when is_integer(version_id) <- get_current_version_id(), + %Version{} = version <- get_version_by_steps(steps) do + do_rollback(version) + end + end + + def rollback(_), do: {:error, :steps_format} + + @doc """ + Same as `rollback/1`, but rollbacks for a given version id. + """ + @spec rollback_by_id(pos_integer()) :: + {:ok, map()} + | {:error, atom() | tuple(), any(), any()} + | {:error, :not_found} + | {:error, :version_is_already_current} + def rollback_by_id(id) when is_integer(id) do + with %Version{current: false} = version <- get_version_by_id(id) do + do_rollback(version) + else + %Version{current: true} -> {:error, :version_is_already_current} + error -> error + end + end + + defp get_current_version_id do + query = from(v in Version, where: v.current == true) + + with nil <- Repo.aggregate(query, :max, :id) do + {:error, :no_current_version} + end + end + + defp get_version_by_id(id) do + with nil <- Repo.get(Version, id) do + {:error, :not_found} + end + end + + defp get_version_by_steps(steps) do + query = from(v in Version, order_by: [desc: v.id], limit: 1, offset: ^steps) + + with nil <- Repo.one(query) do + {:error, :rollback_not_possible} + end + end + + defp do_rollback(version) do + multi = + truncate_config_table() + |> reset_pk_in_config_table() + + version.backup + |> ConfigDB.from_keyword_to_maps() + |> add_insert_commands(multi) + |> set_current_flag_false_for_all_versions() + |> Multi.update(:move_current_pointer, Ecto.Changeset.change(version, current: true)) + |> Multi.delete_all( + :delete_next_versions, + from(v in Version, where: v.id > ^version.id) + ) + |> Repo.transaction() + end + + defp truncate_config_table(multi \\ Multi.new()) do + Multi.run(multi, :truncate_config_table, fn repo, _ -> + repo.query("TRUNCATE config;") + end) + end + + defp reset_pk_in_config_table(multi) do + Multi.run(multi, :reset_pk, fn repo, _ -> + repo.query("ALTER SEQUENCE config_id_seq RESTART;") + end) + end + + defp add_insert_commands(changes, multi) do + Enum.reduce(changes, multi, fn change, acc -> + Multi.run(acc, {:insert, change[:group], change[:key]}, fn _, _ -> + ConfigDB.update_or_create(change) + end) + end) + end + + @doc """ + Resets config table and creates new empty version. + """ + @spec reset() :: {:ok, map()} | {:error, atom() | tuple(), any(), any()} + def reset do + truncate_config_table() + |> reset_pk_in_config_table() + |> set_current_flag_false_for_all_versions() + |> insert_new_version() + |> Repo.transaction() + end + + @doc """ + Migrates settings from config file into database: + - truncates config table and restarts pk + - inserts settings from config file + - sets all pointers to false + - gets all rows from `config` table and inserts them as keyword in `backup` field + """ + @spec migrate(Path.t()) :: {:ok, map()} | {:error, atom() | tuple(), any(), any()} + def migrate(config_path) do + multi = + truncate_config_table() + |> reset_pk_in_config_table() + + config_path + |> Pleroma.Config.Loader.read!() + |> Pleroma.Config.Loader.filter() + |> ConfigDB.from_keyword_to_maps() + |> add_insert_commands(multi) + |> set_current_flag_false_for_all_versions() + |> insert_new_version() + |> Repo.transaction() + end + + @doc """ + Common function to migrate old config namespace to the new one keeping the old value. + """ + @spec migrate_namespace({atom(), atom()}, {atom(), atom()}) :: + {:ok, map()} | {:error, atom() | tuple(), any(), any()} + def migrate_namespace({o_group, o_key}, {n_group, n_key}) do + config = ConfigDB.get_by_params(%{group: o_group, key: o_key}) + + configs_changes_fun = + if config do + fn -> + config + |> Ecto.Changeset.change(group: n_group, key: n_key) + |> Repo.update() + end + else + fn -> {:ok, nil} end + end + + versions_changes_fun = fn %{backup: backup} = version -> + with {value, rest} when not is_nil(value) <- pop_in(backup[o_group][o_key]) do + rest = + if rest[o_group] == [] do + Keyword.delete(rest, o_group) + else + rest + end + + updated_backup = + if Keyword.has_key?(rest, n_group) do + put_in(rest[n_group][n_key], value) + else + Keyword.put(rest, n_group, [{n_key, value}]) + end + + version + |> Ecto.Changeset.change(backup: updated_backup) + |> Repo.update() + else + _ -> {:ok, nil} + end + end + + migrate_configs_and_versions(configs_changes_fun, versions_changes_fun) + end + + @doc """ + Abstract function for config migrations to keep changes in config table and changes in versions backups in transaction. + Accepts two functions: + - first function makes changes to the configs + - second function makes changes to the backups in versions + """ + @spec migrate_configs_and_versions(function(), function()) :: + {:ok, map()} | {:error, atom() | tuple(), any(), any()} + def migrate_configs_and_versions(configs_changes_fun, version_change_fun) + when is_function(configs_changes_fun, 0) and + is_function(version_change_fun, 1) do + versions = Repo.all(Version) + + multi = + Multi.new() + |> Multi.run(:configs_changes, fn _, _ -> + configs_changes_fun.() + end) + + versions + |> Enum.reduce(multi, fn version, acc -> + Multi.run(acc, {:version_change, version.id}, fn _, _ -> + version_change_fun.(version) + end) + end) + |> Repo.transaction() + end +end diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex index cb57673e3..7a29096c5 100644 --- a/lib/pleroma/config_db.ex +++ b/lib/pleroma/config_db.ex @@ -6,8 +6,7 @@ defmodule Pleroma.ConfigDB do use Ecto.Schema import Ecto.Changeset - import Ecto.Query, only: [select: 3, from: 2] - import Pleroma.Web.Gettext + import Ecto.Query, only: [from: 2] alias __MODULE__ alias Pleroma.Repo @@ -22,6 +21,10 @@ defmodule Pleroma.ConfigDB do {:pleroma, :mrf_keyword, :replace} ] + @groups_without_keys [:quack, :mime, :cors_plug, :esshd, :ex_aws, :joken, :logger, :swoosh] + + @pleroma_not_keyword_values [Pleroma.Web.Auth.Authenticator, :admin_token] + schema "config" do field(:key, Pleroma.EctoType.Config.Atom) field(:group, Pleroma.EctoType.Config.Atom) @@ -31,13 +34,35 @@ defmodule Pleroma.ConfigDB do timestamps() end - @spec get_all_as_keyword() :: keyword() - def get_all_as_keyword do - ConfigDB - |> select([c], {c.group, c.key, c.value}) - |> Repo.all() - |> Enum.reduce([], fn {group, key, value}, acc -> - Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) + @spec all() :: [t()] + def all, do: Repo.all(ConfigDB) + + @spec all_with_db() :: [t()] + def all_with_db do + all() + |> Enum.map(fn + %{group: :pleroma, key: key} = change when key in @pleroma_not_keyword_values -> + %{change | db: [change.key]} + + %{value: value} = change -> + %{change | db: Keyword.keys(value)} + end) + end + + @spec all_as_keyword() :: keyword() + def all_as_keyword do + all() + |> as_keyword() + end + + @spec as_keyword([t()]) :: keyword() + def as_keyword(changes) do + Enum.reduce(changes, [], fn + %{group: group, key: nil, value: value}, acc -> + Keyword.update(acc, group, value, &Keyword.merge(&1, value)) + + %{group: group, key: key, value: value}, acc -> + Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) end) end @@ -52,14 +77,22 @@ defmodule Pleroma.ConfigDB do end @spec get_by_params(map()) :: ConfigDB.t() | nil - def get_by_params(%{group: _, key: _} = params), do: Repo.get_by(ConfigDB, params) + def get_by_params(%{group: group, key: key} = params) + when not is_nil(key) and not is_nil(group) do + Repo.get_by(ConfigDB, params) + end + + def get_by_params(%{group: group}) do + from(c in ConfigDB, where: c.group == ^group and is_nil(c.key)) |> Repo.one() + end @spec changeset(ConfigDB.t(), map()) :: Changeset.t() def changeset(config, params \\ %{}) do config |> cast(params, [:key, :group, :value]) - |> validate_required([:key, :group, :value]) + |> validate_required([:group, :value]) |> unique_constraint(:key, name: :config_group_key_index) + |> unique_constraint(:key, name: :config_group__key_is_null_index) end defp create(params) do @@ -74,319 +107,214 @@ defmodule Pleroma.ConfigDB do |> Repo.update() end - @spec get_db_keys(keyword(), any()) :: [String.t()] - def get_db_keys(value, key) do - keys = - if Keyword.keyword?(value) do - Keyword.keys(value) - else - [key] - end - - Enum.map(keys, &to_json_types(&1)) - end - - @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() - def merge_group(group, key, old_value, new_value) do - new_keys = to_mapset(new_value) - - intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list() - - merged_value = ConfigDB.merge(old_value, new_value) - - @full_subkey_update - |> Enum.map(fn - {g, k, subkey} when g == group and k == key -> - if subkey in intersect_keys, do: subkey, else: [] - - _ -> - [] - end) - |> List.flatten() - |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1])) - end - - defp to_mapset(keyword) do - keyword - |> Keyword.keys() - |> MapSet.new() - end - - @spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean() - def sub_key_full_update?(group, key, subkeys) do - Enum.any?(@full_subkey_update, fn {g, k, subkey} -> - g == group and k == key and subkey in subkeys - end) - end - - @spec merge(keyword(), keyword()) :: keyword() - def merge(config1, config2) when is_list(config1) and is_list(config2) do - Keyword.merge(config1, config2, fn _, app1, app2 -> - if Keyword.keyword?(app1) and Keyword.keyword?(app2) do - Keyword.merge(app1, app2, &deep_merge/3) - else - app2 - end - end) - end - - defp deep_merge(_key, value1, value2) do - if Keyword.keyword?(value1) and Keyword.keyword?(value2) do - Keyword.merge(value1, value2, &deep_merge/3) - else - value2 - end - end - + @doc """ + IMPORTANT!!! + Before modifying records in the database directly, please read "Config versioning" in `docs/development/config_versioning.md`. + """ @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} def update_or_create(params) do - params = Map.put(params, :value, to_elixir_types(params[:value])) search_opts = Map.take(params, [:group, :key]) - with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), - {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, - {_, true, config} <- - {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do + with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts) do new_value = merge_group(config.group, config.key, config.value, params[:value]) + update(config, %{value: new_value}) else - {reason, false, config} when reason in [:partial_update, :can_be_merged] -> - update(config, params) - nil -> create(params) end end - defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) - - defp only_full_update?(%ConfigDB{group: group, key: key}) do - full_key_update = [ - {:pleroma, :ecto_repos}, - {:quack, :meta}, - {:mime, :types}, - {:cors_plug, [:max_age, :methods, :expose, :headers]}, - {:swarm, :node_blacklist}, - {:logger, :backends} - ] - - Enum.any?(full_key_update, fn - {s_group, s_key} -> - group == s_group and ((is_list(s_key) and key in s_key) or key == s_key) - end) - end - + @doc """ + IMPORTANT!!! + Before modifying records in the database directly, please read "Config versioning" in `docs/development/config_versioning.md`. + """ @spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} def delete(%ConfigDB{} = config), do: Repo.delete(config) - def delete(params) do - search_opts = Map.delete(params, :subkeys) + @doc """ + IMPORTANT!!! + Before modifying records in the database directly, please read "Config versioning" in `docs/development/config_versioning.md`. + """ + @spec delete_or_update(map()) :: {:ok, t()} | {:ok, nil} | {:error, Changeset.t()} + def delete_or_update(%{group: _, key: key} = params) when not is_nil(key) do + search_opts = Map.take(params, [:group, :key]) - with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), - {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, - keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)), - {_, config, new_value} when new_value != [] <- - {:partial_remove, config, Keyword.drop(config.value, keys)} do - update(config, %{value: new_value}) + with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts) do + do_delete_or_update(config, params[:subkeys]) else - {:partial_remove, config, []} -> - Repo.delete(config) - - {config, nil} -> - Repo.delete(config) + _ -> {:ok, nil} + end + end - nil -> - err = - dgettext("errors", "Config with params %{params} not found", params: inspect(params)) + def delete_or_update(%{group: group}) do + query = from(c in ConfigDB, where: c.group == ^group) - {:error, err} + with {num, _} <- Repo.delete_all(query) do + {:ok, num} end end - @spec to_json_types(term()) :: map() | list() | boolean() | String.t() - def to_json_types(entity) when is_list(entity) do - Enum.map(entity, &to_json_types/1) + defp do_delete_or_update(%ConfigDB{} = config, subkeys) + when is_list(subkeys) and subkeys != [] do + new_value = Keyword.drop(config.value, subkeys) + + if new_value == [] do + delete(config) + else + update(config, %{value: new_value}) + end end - def to_json_types(%Regex{} = entity), do: inspect(entity) + defp do_delete_or_update(%ConfigDB{} = config, _), do: delete(config) - def to_json_types(entity) when is_map(entity) do - Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end) - end + defp merge_group(group, key, old_value, new_value) + when is_list(old_value) and is_list(new_value) do + new_keys = to_mapset(new_value) - def to_json_types({:args, args}) when is_list(args) do - arguments = - Enum.map(args, fn - arg when is_tuple(arg) -> inspect(arg) - arg -> to_json_types(arg) - end) + intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list() - %{"tuple" => [":args", arguments]} - end + merged_value = deep_merge(old_value, new_value) - def to_json_types({:proxy_url, {type, :localhost, port}}) do - %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]} + @full_subkey_update + |> Enum.reduce([], fn + {g, k, subkey}, acc when g == group and k == key -> + if subkey in intersect_keys do + [subkey | acc] + else + acc + end + + _, acc -> + acc + end) + |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1])) end - def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do - ip = - host - |> :inet_parse.ntoa() - |> to_string() + defp merge_group(_group, _key, _old_value, new_value) when is_list(new_value), do: new_value - %{ - "tuple" => [ - ":proxy_url", - %{"tuple" => [to_json_types(type), ip, port]} - ] - } + defp merge_group(:pleroma, key, _old_value, new_value) + when key in @pleroma_not_keyword_values do + new_value end - def to_json_types({:proxy_url, {type, host, port}}) do - %{ - "tuple" => [ - ":proxy_url", - %{"tuple" => [to_json_types(type), to_string(host), port]} - ] - } + defp to_mapset(keyword) when is_list(keyword) do + keyword + |> Keyword.keys() + |> MapSet.new() end - def to_json_types({:partial_chain, entity}), - do: %{"tuple" => [":partial_chain", inspect(entity)]} - - def to_json_types(entity) when is_tuple(entity) do - value = - entity - |> Tuple.to_list() - |> to_json_types() - - %{"tuple" => value} + defp deep_merge(config1, config2) when is_list(config1) and is_list(config2) do + Keyword.merge(config1, config2, fn _, app1, app2 -> + if Keyword.keyword?(app1) and Keyword.keyword?(app2) do + Keyword.merge(app1, app2, &deep_merge/3) + else + app2 + end + end) end - def to_json_types(entity) when is_binary(entity), do: entity + defp deep_merge(_key, value1, value2) do + if Keyword.keyword?(value1) and Keyword.keyword?(value2) do + Keyword.merge(value1, value2, &deep_merge/3) + else + value2 + end + end - def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do - entity + @spec reduce_defaults_and_merge_with_changes([t()], keyword()) :: {[t()], keyword()} + def reduce_defaults_and_merge_with_changes(changes, defaults) do + Enum.reduce(changes, {[], defaults}, &reduce_default_and_merge_with_change/2) end - def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do - ":#{entity}" + defp reduce_default_and_merge_with_change(%{group: group} = change, {acc, defaults}) + when group in @groups_without_keys do + {default, remaining_defaults} = Keyword.pop(defaults, group) + + change = merge_change_with_default(change, default) + {[change | acc], remaining_defaults} end - def to_json_types(entity) when is_atom(entity), do: inspect(entity) + defp reduce_default_and_merge_with_change(%{group: group, key: key} = change, {acc, defaults}) do + if defaults[group] do + {default, remaining_group_defaults} = Keyword.pop(defaults[group], key) - @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term() - def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do - arguments = - Enum.map(args, fn arg -> - if String.contains?(arg, ["{", "}"]) do - {elem, []} = Code.eval_string(arg) - elem + remaining_defaults = + if remaining_group_defaults == [] do + Keyword.delete(defaults, group) else - to_elixir_types(arg) + Keyword.put(defaults, group, remaining_group_defaults) end - end) - - {:args, arguments} - end - def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do - {:proxy_url, {string_to_elixir_types(type), parse_host(host), port}} - end - - def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do - {partial_chain, []} = - entity - |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") - |> Code.eval_string() + change = merge_change_with_default(change, default) - {:partial_chain, partial_chain} + {[change | acc], remaining_defaults} + else + {[change | acc], defaults} + end end - def to_elixir_types(%{"tuple" => entity}) do - Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1))) + @spec from_keyword_to_structs(keyword(), [] | [t()]) :: [t()] + def from_keyword_to_structs(keyword, initial_acc \\ []) do + Enum.reduce(keyword, initial_acc, &reduce_to_structs/2) end - def to_elixir_types(entity) when is_map(entity) do - Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end) + defp reduce_to_structs({group, config}, group_acc) when group in @groups_without_keys do + [struct(%ConfigDB{}, to_map(group, config)) | group_acc] end - def to_elixir_types(entity) when is_list(entity) do - Enum.map(entity, &to_elixir_types/1) + defp reduce_to_structs({group, config}, group_acc) do + Enum.reduce(config, group_acc, fn {key, value}, acc -> + [struct(%ConfigDB{}, to_map(group, key, value)) | acc] + end) end - def to_elixir_types(entity) when is_binary(entity) do - entity - |> String.trim() - |> string_to_elixir_types() + @spec from_keyword_to_maps(keyword(), [] | [map()]) :: [map()] + def from_keyword_to_maps(keyword, initial_acc \\ []) do + Enum.reduce(keyword, initial_acc, &reduce_to_maps/2) end - def to_elixir_types(entity), do: entity - - @spec string_to_elixir_types(String.t()) :: - atom() | Regex.t() | module() | String.t() | no_return() - def string_to_elixir_types("~r" <> _pattern = regex) do - pattern = - ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u - - delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] - - with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- - Regex.named_captures(pattern, regex), - {:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter), - {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do - result - end + defp reduce_to_maps({group, config}, group_acc) when group in @groups_without_keys do + [to_map(group, config) | group_acc] end - def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) - - def string_to_elixir_types(value) do - if module_name?(value) do - String.to_existing_atom("Elixir." <> value) - else - value - end + defp reduce_to_maps({group, config}, group_acc) do + Enum.reduce(config, group_acc, fn {key, value}, acc -> + [to_map(group, key, value) | acc] + end) end - defp parse_host("localhost"), do: :localhost + defp to_map(group, config), do: %{group: group, value: config} - defp parse_host(host) do - charlist = to_charlist(host) + defp to_map(group, key, value), do: %{group: group, key: key, value: value} - case :inet.parse_address(charlist) do - {:error, :einval} -> - charlist + @spec merge_changes_with_defaults([t()], keyword()) :: [t()] + def merge_changes_with_defaults(changes, defaults) when is_list(changes) do + Enum.map(changes, fn + %{group: group} = change when group in @groups_without_keys -> + merge_change_with_default(change, defaults[group]) - {:ok, ip} -> - ip - end + %{group: group, key: key} = change -> + merge_change_with_default(change, defaults[group][key]) + end) end - defp find_valid_delimiter([], _string, _) do - raise(ArgumentError, message: "valid delimiter for Regex expression not found") + defp merge_change_with_default(change, default) do + %{change | value: merge_change_value_with_default(change, default)} end - defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter) - when is_tuple(delimiter) do - if String.contains?(pattern, closing) do - find_valid_delimiter(others, pattern, regex_delimiter) + @spec merge_change_value_with_default(t(), keyword()) :: keyword() + def merge_change_value_with_default(change, default) do + if Ecto.get_meta(change, :state) == :deleted do + default else - {:ok, {leading, closing}} + merge_group(change.group, change.key, default, change.value) end end - defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do - if String.contains?(pattern, delimiter) do - find_valid_delimiter(others, pattern, regex_delimiter) - else - {:ok, {delimiter, delimiter}} - end - end + @spec groups_without_keys() :: [atom()] + def groups_without_keys, do: @groups_without_keys - @spec module_name?(String.t()) :: boolean() - def module_name?(string) do - Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or - string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"] - end + @spec pleroma_not_keyword_values() :: [atom()] + def pleroma_not_keyword_values, do: @pleroma_not_keyword_values end diff --git a/lib/pleroma/docs/json.ex b/lib/pleroma/docs/json.ex index f22432ea4..42ea15549 100644 --- a/lib/pleroma/docs/json.ex +++ b/lib/pleroma/docs/json.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Docs.JSON do @behaviour Pleroma.Docs.Generator @external_resource "config/description.exs" - @raw_config Pleroma.Config.Loader.read("config/description.exs") + @raw_config Pleroma.Config.Loader.read!("config/description.exs") @raw_descriptions @raw_config[:pleroma][:config_description] @term __MODULE__.Compiled diff --git a/lib/pleroma/ecto_type/config/atom.ex b/lib/pleroma/ecto_type/config/atom.ex index 3bf0bca5b..35b459c06 100644 --- a/lib/pleroma/ecto_type/config/atom.ex +++ b/lib/pleroma/ecto_type/config/atom.ex @@ -12,13 +12,13 @@ defmodule Pleroma.EctoType.Config.Atom do end def cast(key) when is_binary(key) do - {:ok, Pleroma.ConfigDB.string_to_elixir_types(key)} + {:ok, Pleroma.Config.Converter.string_to_elixir_types!(key)} end def cast(_), do: :error def load(key) do - {:ok, Pleroma.ConfigDB.string_to_elixir_types(key)} + {:ok, Pleroma.Config.Converter.string_to_elixir_types!(key)} end def dump(key) when is_atom(key), do: {:ok, inspect(key)} diff --git a/lib/pleroma/ecto_type/config/binary_value.ex b/lib/pleroma/ecto_type/config/binary_value.ex index 908220a65..19c5cda83 100644 --- a/lib/pleroma/ecto_type/config/binary_value.ex +++ b/lib/pleroma/ecto_type/config/binary_value.ex @@ -15,6 +15,10 @@ defmodule Pleroma.EctoType.Config.BinaryValue do end end + def cast(value) when is_map(value) or is_list(value) do + {:ok, Pleroma.Config.Converter.to_elixir_types(value)} + end + def cast(value), do: {:ok, value} def load(value) when is_binary(value) do diff --git a/lib/pleroma/gopher/server.ex b/lib/pleroma/gopher/server.ex index 1b85c49f5..2fa85ef66 100644 --- a/lib/pleroma/gopher/server.ex +++ b/lib/pleroma/gopher/server.ex @@ -12,13 +12,14 @@ defmodule Pleroma.Gopher.Server do port = Keyword.get(config, :port, 1234) if Keyword.get(config, :enabled, false) do - GenServer.start_link(__MODULE__, [ip, port], []) + GenServer.start_link(__MODULE__, [ip, port]) else Logger.info("Gopher server disabled") :ignore end end + @impl true def init([ip, port]) do Logger.info("Starting gopher server on #{port}") @@ -31,8 +32,14 @@ defmodule Pleroma.Gopher.Server do [] ) + Process.flag(:trap_exit, true) {:ok, %{ip: ip, port: port}} end + + @impl true + def terminate(_reason, _state) do + :ranch.stop_listener(:gopher) + end end defmodule Pleroma.Gopher.Server.ProtocolHandler do diff --git a/lib/pleroma/gun/gun_supervisor.ex b/lib/pleroma/gun/gun_supervisor.ex new file mode 100644 index 000000000..c72dfdd24 --- /dev/null +++ b/lib/pleroma/gun/gun_supervisor.ex @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Gun.GunSupervisor do + use Supervisor + + def start_link(_) do + Supervisor.start_link(__MODULE__, :no_args) + end + + def init(_) do + children = + Pleroma.Gun.ConnectionPool.children() ++ + [{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}] + + Supervisor.init(children, strategy: :one_for_one) + end +end diff --git a/lib/pleroma/http/hackney_supervisor.ex b/lib/pleroma/http/hackney_supervisor.ex new file mode 100644 index 000000000..0e36f0273 --- /dev/null +++ b/lib/pleroma/http/hackney_supervisor.ex @@ -0,0 +1,30 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.HTTP.HackneySupervisor do + use Supervisor + + def start_link(_) do + Supervisor.start_link(__MODULE__, :no_arg) + end + + def init(_) do + pools = [:federation, :media] + + pools = + if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do + [:upload | pools] + else + pools + end + + children = + for pool <- pools do + options = Pleroma.Config.get([:hackney_pools, pool]) + :hackney_pool.child_spec(pool, options) + end + + Supervisor.init(children, strategy: :one_for_one) + end +end diff --git a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex index 839ac1a8d..daa4c451c 100644 --- a/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/admin_api_controller.ex @@ -392,14 +392,14 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do def restart(conn, _params) do with :ok <- configurable_from_database() do - Restarter.Pleroma.restart(Config.get(:env), 50) + Task.start(Pleroma.Application.ConfigDependentDeps, :restart_dependencies, []) json(conn, %{}) end end def need_reboot(conn, _params) do - json(conn, %{need_reboot: Restarter.Pleroma.need_reboot?()}) + json(conn, %{need_reboot: Pleroma.Application.ConfigDependentDeps.need_reboot?()}) end defp configurable_from_database do diff --git a/lib/pleroma/web/admin_api/controllers/config_controller.ex b/lib/pleroma/web/admin_api/controllers/config_controller.ex index a718d7b8d..88ae9ed99 100644 --- a/lib/pleroma/web/admin_api/controllers/config_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/config_controller.ex @@ -5,19 +5,24 @@ defmodule Pleroma.Web.AdminAPI.ConfigController do use Pleroma.Web, :controller + import Pleroma.Web.ControllerHelper, only: [json_response: 3] + + alias Pleroma.Application alias Pleroma.Config alias Pleroma.ConfigDB alias Pleroma.Web.Plugs.OAuthScopesPlug plug(Pleroma.Web.ApiSpec.CastAndValidate) - plug(OAuthScopesPlug, %{scopes: ["admin:write"]} when action == :update) + plug(OAuthScopesPlug, %{scopes: ["admin:write"]} when action in [:update, :rollback]) plug( OAuthScopesPlug, %{scopes: ["admin:read"]} - when action in [:show, :descriptions] + when action in [:show, :descriptions, :versions] ) + plug(:check_possibility_configuration_from_database when action != :descriptions) + action_fallback(Pleroma.Web.AdminAPI.FallbackController) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.ConfigOperation @@ -29,100 +34,110 @@ defmodule Pleroma.Web.AdminAPI.ConfigController do end def show(conn, %{only_db: true}) do - with :ok <- configurable_from_database() do - configs = Pleroma.Repo.all(ConfigDB) + configs = ConfigDB.all_with_db() - render(conn, "index.json", %{ - configs: configs, - need_reboot: Restarter.Pleroma.need_reboot?() - }) - end + render(conn, "index.json", %{ + configs: configs, + need_reboot: Application.ConfigDependentDeps.need_reboot?() + }) end def show(conn, _params) do - with :ok <- configurable_from_database() do - configs = ConfigDB.get_all_as_keyword() - - merged = - Config.Holder.default_config() - |> ConfigDB.merge(configs) - |> Enum.map(fn {group, values} -> - Enum.map(values, fn {key, value} -> - db = - if configs[group][key] do - ConfigDB.get_db_keys(configs[group][key], key) - end + defaults = Config.Holder.default_config() + changes = ConfigDB.all_with_db() + + {changes_values_merged_with_defaults, remaining_defaults} = + ConfigDB.reduce_defaults_and_merge_with_changes(changes, defaults) - db_value = configs[group][key] + changes_merged_with_defaults = + ConfigDB.from_keyword_to_structs(remaining_defaults, changes_values_merged_with_defaults) - merged_value = - if not is_nil(db_value) and Keyword.keyword?(db_value) and - ConfigDB.sub_key_full_update?(group, key, Keyword.keys(db_value)) do - ConfigDB.merge_group(group, key, value, db_value) + render(conn, "index.json", %{ + configs: changes_merged_with_defaults, + need_reboot: Application.ConfigDependentDeps.need_reboot?() + }) + end + + def update(%{body_params: %{configs: configs}} = conn, _) do + result = + configs + |> Enum.filter(&whitelisted_config?/1) + |> Enum.map(&Config.Converter.to_elixir_types/1) + |> Config.Versioning.new_version() + + case result do + {:ok, changes} -> + inserts_and_deletions = + Enum.reduce(changes, [], fn + {{operation, _, _}, %ConfigDB{} = change}, acc + when operation in [:insert_or_update, :delete_or_update] -> + if Ecto.get_meta(change, :state) == :deleted do + [change | acc] else - value + if change.group == :pleroma and + change.key in ConfigDB.pleroma_not_keyword_values() do + [%{change | db: [change.key]} | acc] + else + [%{change | db: Keyword.keys(change.value)} | acc] + end end - %ConfigDB{ - group: group, - key: key, - value: merged_value - } - |> Pleroma.Maps.put_if_present(:db, db) + _, acc -> + acc end) - end) - |> List.flatten() - render(conn, "index.json", %{ - configs: merged, - need_reboot: Restarter.Pleroma.need_reboot?() - }) + Application.Environment.update(inserts_and_deletions, only_update: true) + + render(conn, "index.json", %{ + configs: Enum.reject(inserts_and_deletions, &(Ecto.get_meta(&1, :state) == :deleted)), + need_reboot: Application.ConfigDependentDeps.need_reboot?() + }) + + {:error, error} -> + {:error, "Updating config failed: #{inspect(error)}"} + + {:error, _, {error, operation}, _} -> + {:error, + "Updating config failed: #{inspect(error)}, group: #{operation[:group]}, key: #{ + operation[:key] + }, value: #{inspect(operation[:value])}"} end end - def update(%{body_params: %{configs: configs}} = conn, _) do - with :ok <- configurable_from_database() do - results = - configs - |> Enum.filter(&whitelisted_config?/1) - |> Enum.map(fn - %{group: group, key: key, delete: true} = params -> - ConfigDB.delete(%{group: group, key: key, subkeys: params[:subkeys]}) - - %{group: group, key: key, value: value} -> - ConfigDB.update_or_create(%{group: group, key: key, value: value}) - end) - |> Enum.reject(fn {result, _} -> result == :error end) - - {deleted, updated} = - results - |> Enum.map(fn {:ok, %{key: key, value: value} = config} -> - Map.put(config, :db, ConfigDB.get_db_keys(value, key)) - end) - |> Enum.split_with(&(Ecto.get_meta(&1, :state) == :deleted)) - - Config.TransferTask.load_and_update_env(deleted, false) - - if not Restarter.Pleroma.need_reboot?() do - changed_reboot_settings? = - (updated ++ deleted) - |> Enum.any?(&Config.TransferTask.pleroma_need_restart?(&1.group, &1.key, &1.value)) - - if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot() - end - - render(conn, "index.json", %{ - configs: updated, - need_reboot: Restarter.Pleroma.need_reboot?() - }) + def rollback(conn, %{id: id}) do + case Config.Versioning.rollback_by_id(id) do + {:ok, _} -> + json_response(conn, :no_content, "") + + {:error, :not_found} -> + {:error, :not_found} + + {:error, error} -> + {:error, "Rollback is not possible: #{inspect(error)}"} + + {:error, _, {error, operation}, _} -> + {:error, + "Rollback is not possible, backup restore error: #{inspect(error)}, operation error: #{ + inspect(operation) + }"} end end - defp configurable_from_database do + def versions(conn, _) do + versions = Pleroma.Config.Version.all() + + render(conn, "index.json", %{versions: versions}) + end + + defp check_possibility_configuration_from_database(conn, _) do if Config.get(:configurable_from_database) do - :ok + conn else - {:error, "You must enable configurable_from_database in your config file."} + Pleroma.Web.AdminAPI.FallbackController.call( + conn, + {:error, "You must enable configurable_from_database in your config file."} + ) + |> halt() end end diff --git a/lib/pleroma/web/admin_api/views/config_view.ex b/lib/pleroma/web/admin_api/views/config_view.ex index d29b4963d..6115c3405 100644 --- a/lib/pleroma/web/admin_api/views/config_view.ex +++ b/lib/pleroma/web/admin_api/views/config_view.ex @@ -5,8 +5,6 @@ defmodule Pleroma.Web.AdminAPI.ConfigView do use Pleroma.Web, :view - alias Pleroma.ConfigDB - def render("index.json", %{configs: configs} = params) do %{ configs: render_many(configs, __MODULE__, "show.json", as: :config), @@ -14,17 +12,23 @@ defmodule Pleroma.Web.AdminAPI.ConfigView do } end - def render("show.json", %{config: config}) do - map = %{ - key: ConfigDB.to_json_types(config.key), - group: ConfigDB.to_json_types(config.group), - value: ConfigDB.to_json_types(config.value) + def render("index.json", %{versions: versions}) do + %{ + versions: render_many(versions, __MODULE__, "show.json", as: :version) } + end + + def render("show.json", %{config: config}) do + config + |> Map.take([:group, :key, :value, :db]) + |> Map.new(fn + {k, v} -> {k, Pleroma.Config.Converter.to_json_types(v)} + end) + end - if config.db != [] do - Map.put(map, :db, config.db) - else - map - end + def render("show.json", %{version: version}) do + version + |> Map.take([:id, :current]) + |> Map.put(:inserted_at, Pleroma.Web.CommonAPI.Utils.to_masto_date(version.inserted_at)) end end diff --git a/lib/pleroma/web/api_spec/operations/admin/config_operation.ex b/lib/pleroma/web/api_spec/operations/admin/config_operation.ex index 30c3433b7..6d22191f1 100644 --- a/lib/pleroma/web/api_spec/operations/admin/config_operation.ex +++ b/lib/pleroma/web/api_spec/operations/admin/config_operation.ex @@ -53,7 +53,7 @@ defmodule Pleroma.Web.ApiSpec.Admin.ConfigOperation do type: :object, properties: %{ group: %Schema{type: :string}, - key: %Schema{type: :string}, + key: %Schema{type: :string, nullable: true}, value: any(), delete: %Schema{type: :boolean}, subkeys: %Schema{type: :array, items: %Schema{type: :string}} @@ -107,6 +107,56 @@ defmodule Pleroma.Web.ApiSpec.Admin.ConfigOperation do } end + def rollback_operation do + %Operation{ + tags: ["Admin", "Config"], + summary: "Rollback config changes.", + operationId: "AdminAPI.ConfigController.rollback", + security: [%{"oAuth" => ["write"]}], + parameters: [ + Operation.parameter(:id, :path, %Schema{type: :integer}, "Version id to rollback", + required: true + ) + | admin_api_params() + ], + responses: %{ + 204 => no_content_response(), + 400 => Operation.response("Bad Request", "application/json", ApiError), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def versions_operation do + %Operation{ + tags: ["Admin", "Config"], + summary: "Get list with config versions.", + operationId: "AdminAPI.ConfigController.versions", + security: [%{"oAuth" => ["read"]}], + parameters: admin_api_params(), + responses: %{ + 200 => + Operation.response("Config Version", "application/json", %Schema{ + type: :object, + properties: %{ + versions: %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + id: %Schema{type: :integer}, + current: %Schema{type: :boolean}, + inserted_at: %Schema{type: :string, format: :"date-time"} + } + } + } + } + }), + 400 => Operation.response("Bad Request", "application/json", ApiError) + } + } + end + defp any do %Schema{ oneOf: [ @@ -129,7 +179,7 @@ defmodule Pleroma.Web.ApiSpec.Admin.ConfigOperation do type: :object, properties: %{ group: %Schema{type: :string}, - key: %Schema{type: :string}, + key: %Schema{type: :string, nullable: true}, value: any() } } diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index ccf2ef796..101c9fdff 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -232,6 +232,8 @@ defmodule Pleroma.Web.Router do get("/config", ConfigController, :show) post("/config", ConfigController, :update) get("/config/descriptions", ConfigController, :descriptions) + get("/config/versions", ConfigController, :versions) + get("/config/versions/rollback/:id", ConfigController, :rollback) get("/need_reboot", AdminAPIController, :need_reboot) get("/restart", AdminAPIController, :restart) |