diff options
Diffstat (limited to 'lib/pleroma/application')
-rw-r--r-- | lib/pleroma/application/chat_supervisor.ex | 19 | ||||
-rw-r--r-- | lib/pleroma/application/config_dependent_deps.ex | 244 | ||||
-rw-r--r-- | lib/pleroma/application/environment.ex | 103 | ||||
-rw-r--r-- | lib/pleroma/application/requirements.ex | 265 | ||||
-rw-r--r-- | lib/pleroma/application/start_up_dependencies.ex | 182 |
5 files changed, 813 insertions, 0 deletions
diff --git a/lib/pleroma/application/chat_supervisor.ex b/lib/pleroma/application/chat_supervisor.ex new file mode 100644 index 000000000..4b6f0e740 --- /dev/null +++ b/lib/pleroma/application/chat_supervisor.ex @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.ChatSupervisor do + use Supervisor + + def start_link(_) do + Supervisor.start_link(__MODULE__, :no_args) + end + + def init(_) do + [ + Pleroma.Web.ChatChannel.ChatChannelState, + {Phoenix.PubSub, [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2]} + ] + |> Supervisor.init(strategy: :one_for_one) + end +end diff --git a/lib/pleroma/application/config_dependent_deps.ex b/lib/pleroma/application/config_dependent_deps.ex new file mode 100644 index 000000000..c6b26affd --- /dev/null +++ b/lib/pleroma/application/config_dependent_deps.ex @@ -0,0 +1,244 @@ +# # Pleroma: A lightweight social networking server +# # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# # SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.ConfigDependentDeps do + use GenServer + + require Logger + + @config_path_mods_relation [ + {{:pleroma, :chat}, Pleroma.Application.ChatSupervisor}, + {{:pleroma, Oban}, Oban}, + {{:pleroma, :rate_limit}, Pleroma.Web.Plugs.RateLimiter.Supervisor}, + {{:pleroma, :streamer}, Pleroma.Web.Streamer.registry()}, + {{:pleroma, :pools}, Pleroma.Gun.GunSupervisor}, + {{:pleroma, :connections_pool}, Pleroma.Gun.GunSupervisor}, + {{:pleroma, :hackney_pools}, Pleroma.HTTP.HackneySupervisor}, + {{:pleroma, :gopher}, Pleroma.Gopher.Server}, + {{:pleroma, Pleroma.Captcha, [:seconds_valid]}, Pleroma.Web.Endpoint}, + {{:pleroma, Pleroma.Upload, [:proxy_remote]}, + Pleroma.Application.StartUpDependencies.adapter_module()}, + {{:pleroma, :instance, [:upload_limit]}, Pleroma.Web.Endpoint}, + {{:pleroma, :fed_sockets, [:enabled]}, Pleroma.Web.Endpoint}, + {:eshhd, :eshhd}, + {:ex_aws, :ex_aws} + ] + + def start_link(opts) do + opts = Keyword.put_new(opts, :relations, @config_path_mods_relation) + + GenServer.start_link(__MODULE__, opts, name: opts[:name] || __MODULE__) + end + + @impl true + def init(opts) do + init_state = %{ + dynamic_supervisor: opts[:dynamic_supervisor], + relations: opts[:relations], + reboot_paths: [], + pids: %{} + } + + {:ok, init_state} + end + + def start_dependency(module, server \\ __MODULE__) do + GenServer.call(server, {:start_dependency, module}) + end + + def need_reboot?(server \\ __MODULE__) do + GenServer.call(server, :need_reboot?) + end + + def restart_dependencies(server \\ __MODULE__) do + GenServer.call(server, :restart_dependencies) + end + + def clear_state(server \\ __MODULE__) do + GenServer.call(server, :clear_state) + end + + def save_config_paths_for_restart(changes, server \\ __MODULE__) do + GenServer.call(server, {:save_config_paths, changes}) + end + + @impl true + def handle_call({:start_dependency, module}, _, state) do + {result, state} = + with {pid, state} when is_pid(pid) <- start_module(module, state) do + {{:ok, pid}, state} + else + error -> {error, state} + end + + {:reply, result, state} + end + + @impl true + def handle_call(:need_reboot?, _, state) do + {:reply, state[:reboot_paths] != [], state} + end + + @impl true + def handle_call(:restart_dependencies, _, state) do + {paths, state} = Map.get_and_update(state, :reboot_paths, &{&1, []}) + started_apps = Application.started_applications() + + {result, state} = + Enum.reduce_while(paths, {:ok, state}, fn + path, {:ok, acc} when is_tuple(path) -> + case restart(path, acc, acc[:pids][path], with_terminate: true) do + {pid, state} when is_pid(pid) -> + {:cont, {:ok, state}} + + :ignore -> + Logger.info("path #{inspect(path)} is ignored.") + {:cont, {:ok, acc}} + + error -> + {:halt, {error, acc}} + end + + app, {:ok, acc} + when is_atom(app) and app not in [:logger, :quack, :pleroma, :prometheus, :postgrex] -> + restart_app(app, started_apps) + {:cont, {:ok, acc}} + end) + + {:reply, result, state} + end + + @impl true + def handle_call(:clear_state, _, state) do + state = + state + |> Map.put(:reboot_paths, []) + |> Map.put(:pids, %{}) + + {:reply, :ok, state} + end + + @impl true + def handle_call({:save_config_paths, changes}, _, state) do + paths = + Enum.reduce(changes, state[:reboot_paths], fn + %{group: group, key: key, value: value}, acc -> + with {path, _} <- find_relation(state[:relations], group, key, value) do + if path not in acc do + [path | acc] + else + acc + end + else + _ -> + acc + end + end) + + {:reply, paths, put_in(state[:reboot_paths], paths)} + end + + @impl true + def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do + updated_state = + with {path, ^pid} <- + Enum.find(state[:pids], fn {_, registered_pid} -> registered_pid == pid end) do + {_new_pid, new_state} = restart(path, state, pid) + new_state + else + _ -> state + end + + {:noreply, updated_state} + end + + defp start_module(module, state) do + with {:ok, relations} <- find_relations(state[:relations], module) do + start_module(module, relations, state) + end + end + + defp start_module(module, relations, state) do + spec = + module + |> Pleroma.Application.StartUpDependencies.spec() + |> Supervisor.child_spec(restart: :temporary) + + with {:ok, pid} <- + DynamicSupervisor.start_child( + state[:dynamic_supervisor], + spec + ) do + pids = Map.new(relations, fn {path, _} -> {path, pid} end) + Process.monitor(pid) + {pid, put_in(state[:pids], Map.merge(state[:pids], pids))} + end + end + + defp restart(path, state, pid, opts \\ []) + + defp restart(path, state, nil, _) do + with {_, module} <- find_relation(state[:relations], path) do + start_module(module, state) + end + end + + defp restart(path, state, pid, opts) when is_pid(pid) do + with {_, module} <- find_relation(state[:relations], path), + {:ok, relations} <- find_relations(state[:relations], module) do + if opts[:with_terminate] do + :ok = DynamicSupervisor.terminate_child(state[:dynamic_supervisor], pid) + end + + paths_for_remove = Enum.map(relations, fn {path, _} -> path end) + state = put_in(state[:pids], Map.drop(state[:pids], paths_for_remove)) + + start_module(module, relations, state) + end + end + + defp restart_app(app, started_applications) do + with {^app, _, _} <- List.keyfind(started_applications, app, 0) do + :ok = Application.stop(app) + :ok = Application.start(app) + else + nil -> + Logger.info("#{app} is not started.") + + error -> + error + |> inspect() + |> Logger.error() + end + end + + defp find_relations(relations, module) do + case Enum.filter(relations, fn {_, mod} -> mod == module end) do + [] -> + {:error, :relations_not_found} + + relations -> + {:ok, relations} + end + end + + defp find_relation(relations, group, key, value) do + Enum.find(relations, fn + {g, _} when is_atom(g) -> + g == group + + {{g, k}, _} -> + g == group and k == key + + {{g, k, subkeys}, _} -> + g == group and k == key and Enum.any?(Keyword.keys(value), &(&1 in subkeys)) + end) + end + + def find_relation(relations, path) do + with nil <- Enum.find(relations, fn {key, _} -> key == path end) do + {:error, :relation_not_found} + end + end +end diff --git a/lib/pleroma/application/environment.ex b/lib/pleroma/application/environment.ex new file mode 100644 index 000000000..589be4726 --- /dev/null +++ b/lib/pleroma/application/environment.ex @@ -0,0 +1,103 @@ +# # Pleroma: A lightweight social networking server +# # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# # SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.Environment do + @moduledoc """ + Overwrites environment config with settings from config file or database. + """ + + require Logger + + @doc """ + Method is called on pleroma start. + Config dependent parts don't require restart, because are not started yet. + But started apps need restart. + """ + @spec load_from_db_and_update(keyword()) :: :ok + def load_from_db_and_update(opts \\ []) do + Pleroma.ConfigDB.all() + |> update(opts) + end + + @spec update([Pleroma.ConfigDB.t()], keyword()) :: :ok + def update(changes, opts \\ []) when is_list(changes) do + if Pleroma.Config.get(:configurable_from_database) do + defaults = Pleroma.Config.Holder.default_config() + + changes + |> filter_logger() + |> prepare_logger_changes(defaults) + |> Enum.each(&configure_logger/1) + + changes + |> Pleroma.ConfigDB.merge_changes_with_defaults(defaults) + |> Enum.each(&update_env(&1)) + + cond do + opts[:pleroma_start] -> + # restart only apps on pleroma start + changes + |> Enum.filter(fn %{group: group} -> + group not in [:logger, :quack, :pleroma, :prometheus, :postgrex] + end) + |> Pleroma.Application.ConfigDependentDeps.save_config_paths_for_restart() + + Pleroma.Application.ConfigDependentDeps.restart_dependencies() + + opts[:only_update] -> + Pleroma.Application.ConfigDependentDeps.save_config_paths_for_restart(changes) + + true -> + nil + end + end + + :ok + end + + defp filter_logger(changes) do + Enum.filter(changes, fn %{group: group} -> group in [:logger, :quack] end) + end + + defp prepare_logger_changes(changes, defaults) do + Enum.map(changes, fn %{group: group} = change -> + {change, Pleroma.ConfigDB.merge_change_value_with_default(change, defaults[group])} + end) + end + + defp configure_logger({%{group: :quack}, merged_value}) do + Logger.configure_backend(Quack.Logger, merged_value) + end + + defp configure_logger({%{group: :logger} = change, merged_value}) do + if change.value[:backends] do + Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1) + + Enum.each(merged_value[:backends], &Logger.add_backend/1) + end + + if change.value[:console] do + console = merged_value[:console] + console = put_in(console[:format], console[:format] <> "\n") + + Logger.configure_backend(:console, console) + end + + if change.value[:ex_syslogger] do + Logger.configure_backend({ExSyslogger, :ex_syslogger}, merged_value[:ex_syslogger]) + end + + Logger.configure(merged_value) + end + + defp update_env(%{group: group, key: key, value: nil}), do: Application.delete_env(group, key) + + defp update_env(%{group: group, value: config} = change) do + if group in Pleroma.ConfigDB.groups_without_keys() do + Application.put_all_env([{group, config}]) + else + Application.put_env(group, change.key, config) + end + end +end diff --git a/lib/pleroma/application/requirements.ex b/lib/pleroma/application/requirements.ex new file mode 100644 index 000000000..644769557 --- /dev/null +++ b/lib/pleroma/application/requirements.ex @@ -0,0 +1,265 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.Requirements do + @moduledoc """ + The module represents the collection of validations to runs before start server. + """ + + defmodule VerifyError, do: defexception([:message]) + + alias Pleroma.Config + alias Pleroma.Helpers.MediaHelper + + import Ecto.Query + + require Logger + + @spec verify!() :: :ok | VerifyError.t() + def verify! do + adapter = Application.get_env(:tesla, :adapter) + + :ok + |> check_system_commands!() + |> check_confirmation_accounts!() + |> check_migrations_applied!() + |> check_welcome_message_config!() + |> check_rum!() + |> check_repo_pool_size!() + |> check_otp_version!(adapter) + |> handle_result!() + end + + defp handle_result!(:ok), do: :ok + defp handle_result!({:error, message}), do: raise(VerifyError, message: message) + + defp check_welcome_message_config!(:ok) do + if Pleroma.Config.get([:welcome, :email, :enabled], false) and + not Pleroma.Emails.Mailer.enabled?() do + Logger.warn(""" + To send welcome emails, you need to enable the mailer. + Welcome emails will NOT be sent with the current config. + + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) + end + + :ok + end + + defp check_welcome_message_config!(result), do: result + + # Checks account confirmation email + # + def check_confirmation_accounts!(:ok) do + if Pleroma.Config.get([:instance, :account_activation_required]) && + not Pleroma.Emails.Mailer.enabled?() do + Logger.warn(""" + Account activation is required, but the mailer is disabled. + Users will NOT be able to confirm their accounts with this config. + Either disable account activation or enable the mailer. + + Disable account activation: + config :pleroma, :instance, account_activation_required: false + + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) + end + + :ok + end + + def check_confirmation_accounts!(result), do: result + + # Checks for pending migrations. + # + def check_migrations_applied!(:ok) do + unless Pleroma.Config.get( + [:i_am_aware_this_may_cause_data_loss, :disable_migration_check], + false + ) do + {_, res, _} = + Ecto.Migrator.with_repo(Pleroma.Repo, fn repo -> + down_migrations = + Ecto.Migrator.migrations(repo) + |> Enum.reject(fn + {:up, _, _} -> true + {:down, _, _} -> false + end) + + if length(down_migrations) > 0 do + down_migrations_text = + Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end) + + Logger.error( + "The following migrations were not applied:\n#{down_migrations_text}" <> + "If you want to start Pleroma anyway, set\n" <> + "config :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true" + ) + + {:error, "Unapplied Migrations detected"} + else + :ok + end + end) + + res + else + :ok + end + end + + def check_migrations_applied!(result), do: result + + # Checks for settings of RUM indexes. + # + defp check_rum!(:ok) do + {_, res, _} = + Ecto.Migrator.with_repo(Pleroma.Repo, fn repo -> + migrate = + from(o in "columns", + where: o.table_name == "objects", + where: o.column_name == "fts_content" + ) + |> repo.exists?(prefix: "information_schema") + + setting = Pleroma.Config.get([:database, :rum_enabled], false) + + do_check_rum!(setting, migrate) + end) + + res + end + + defp check_rum!(result), do: result + + defp do_check_rum!(setting, migrate) do + case {setting, migrate} do + {true, false} -> + Logger.error( + "Use `RUM` index is enabled, but were not applied migrations for it.\n" <> + "If you want to start Pleroma anyway, set\n" <> + "config :pleroma, :database, rum_enabled: false\n" <> + "Otherwise apply the following migrations:\n" <> + "`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`" + ) + + {:error, "Unapplied RUM Migrations detected"} + + {false, true} -> + Logger.error( + "Detected applied migrations to use `RUM` index, but `RUM` isn't enable in settings.\n" <> + "If you want to use `RUM`, set\n" <> + "config :pleroma, :database, rum_enabled: true\n" <> + "Otherwise roll `RUM` migrations back.\n" <> + "`mix ecto.rollback --migrations-path priv/repo/optional_migrations/rum_indexing/`" + ) + + {:error, "RUM Migrations detected"} + + _ -> + :ok + end + end + + defp check_system_commands!(:ok) do + filter_commands_statuses = [ + check_filter!(Pleroma.Upload.Filters.Exiftool, "exiftool"), + check_filter!(Pleroma.Upload.Filters.Mogrify, "mogrify"), + check_filter!(Pleroma.Upload.Filters.Mogrifun, "mogrify") + ] + + preview_proxy_commands_status = + if !Config.get([:media_preview_proxy, :enabled]) or + MediaHelper.missing_dependencies() == [] do + true + else + Logger.error( + "The following dependencies required by Media preview proxy " <> + "(which is currently enabled) are not installed: " <> + inspect(MediaHelper.missing_dependencies()) + ) + + false + end + + if Enum.all?([preview_proxy_commands_status | filter_commands_statuses], & &1) do + :ok + else + {:error, + "System commands missing. Check logs and see `docs/installation` for more details."} + end + end + + defp check_system_commands!(result), do: result + + defp check_repo_pool_size!(:ok) do + if Pleroma.Config.get([Pleroma.Repo, :pool_size], 10) != 10 and + not Pleroma.Config.get([:dangerzone, :override_repo_pool_size], false) do + Logger.error(""" + !!!CONFIG WARNING!!! + + The database pool size has been altered from the recommended value of 10. + + Please revert or ensure your database is tuned appropriately and then set + `config :pleroma, :dangerzone, override_repo_pool_size: true`. + + If you are experiencing database timeouts, please check the "Optimizing + your PostgreSQL performance" section in the documentation. If you still + encounter issues after that, please open an issue on the tracker. + """) + + {:error, "Repo.pool_size different than recommended value."} + else + :ok + end + end + + defp check_repo_pool_size!(result), do: result + + defp check_filter!(filter, command_required) do + filters = Config.get([Pleroma.Upload, :filters]) + + if filter in filters and not Pleroma.Utils.command_available?(command_required) do + Logger.error( + "#{filter} is specified in list of Pleroma.Upload filters, but the " <> + "#{command_required} command is not found" + ) + + false + else + true + end + end + + defp check_otp_version!(:ok, Tesla.Adapter.Gun) do + if version = Pleroma.OTPVersion.version() do + [major, minor] = + version + |> String.split(".") + |> Enum.map(&String.to_integer/1) + |> Enum.take(2) + + if (major == 22 and minor < 2) or major < 22 do + Logger.error(" + !!!OTP VERSION ERROR!!! + You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2. + ") + {:error, "OTP version error"} + else + :ok + end + else + Logger.error(" + !!!OTP VERSION ERROR!!! + To support correct handling of unordered certificates chains - OTP version must be > 22.2. + ") + {:error, "OTP version error"} + end + end + + defp check_otp_version!(result, _), do: result +end diff --git a/lib/pleroma/application/start_up_dependencies.ex b/lib/pleroma/application/start_up_dependencies.ex new file mode 100644 index 000000000..ce554a0fc --- /dev/null +++ b/lib/pleroma/application/start_up_dependencies.ex @@ -0,0 +1,182 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Application.StartUpDependencies do + alias Pleroma.Config + alias Pleroma.Web.Endpoint + + require Cachex.Spec + require Logger + + @type config_path() :: {atom(), atom()} | {atom(), atom(), [atom()]} + @type relation() :: {config_path(), module()} + + @spec start_all(Pleroma.Application.env()) :: + :ok | {:error, {:already_started, pid()} | :max_children | term()} + def start_all(env) do + with :ok <- start_common_deps(env), + :ok <- start_config_dependent_deps(env) do + :ok + end + end + + @spec adapter_module() :: module() + def adapter_module do + if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do + Pleroma.Gun.GunSupervisor + else + Pleroma.HTTP.HackneySupervisor + end + end + + @spec spec(module()) :: module() | {module(), keyword()} + def spec(Oban), do: {Oban, Config.get(Oban)} + + def spec(Pleroma.Web.StreamerRegistry) do + {Registry, + [ + name: Pleroma.Web.Streamer.registry(), + keys: :duplicate, + partitions: System.schedulers_online() + ]} + end + + def spec(child), do: child + + @spec cachex_spec({String.t(), keyword()}) :: :supervisor.child_spec() + def cachex_spec({type, opts}) do + %{ + id: String.to_atom("cachex_" <> type), + start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]}, + type: :worker + } + end + + defp start_common_deps(env) do + fun = fn child -> + DynamicSupervisor.start_child(Pleroma.Application.dynamic_supervisor(), spec(child)) + end + + [ + Pleroma.Emoji, + Pleroma.Stats, + Pleroma.JobQueueMonitor, + {Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]}, + %{ + id: :web_push_init, + start: {Task, :start_link, [&Pleroma.Web.Push.init/0]}, + restart: :temporary + } + ] + |> add_cachex_deps() + |> maybe_add_init_internal_fetch_actor_task(env) + |> maybe_add_background_migrator(env) + |> start_while(fun) + end + + defp start_config_dependent_deps(env) do + fun = fn child -> Pleroma.Application.ConfigDependentDeps.start_dependency(child) end + + [ + Pleroma.Web.Plugs.RateLimiter.Supervisor, + Oban, + Endpoint, + Pleroma.Gopher.Server + ] + |> add_http_children(env) + |> maybe_add(:streamer, env) + |> maybe_add_chat_child() + |> start_while(fun) + end + + defp start_while(deps, fun) do + Enum.reduce_while(deps, :ok, fn child, acc -> + case fun.(child) do + {:ok, _} -> + {:cont, acc} + + # consider this behavior is normal + :ignore -> + Logger.info("#{inspect(child)} is ignored.") + {:cont, acc} + + error -> + Logger.error("Child #{inspect(child)} can't be started. #{inspect(error)}") + {:halt, error} + end + end) + end + + @spec cachex_deps() :: [tuple()] + def cachex_deps do + captcha_clean_up_interval = + [Pleroma.Captcha, :seconds_valid] + |> Config.get!() + |> :timer.seconds() + + [ + {"used_captcha", expiration: Cachex.Spec.expiration(interval: captcha_clean_up_interval)}, + {"user", expiration: cachex_expiration(25_000, 1000), limit: 2500}, + {"object", expiration: cachex_expiration(25_000, 1000), limit: 2500}, + {"rich_media", + expiration: Cachex.Spec.expiration(default: :timer.minutes(120)), limit: 5000}, + {"scrubber", limit: 2500}, + {"idempotency", expiration: cachex_expiration(21_600, 60), limit: 2500}, + {"web_resp", limit: 2500}, + {"emoji_packs", expiration: cachex_expiration(300, 60), limit: 10}, + {"failed_proxy_url", limit: 2500}, + {"banned_urls", + expiration: Cachex.Spec.expiration(default: :timer.hours(24 * 30)), limit: 5_000}, + {"chat_message_id_idempotency_key", + expiration: cachex_expiration(:timer.minutes(2), :timer.seconds(60)), limit: 500_000} + ] + end + + defp add_cachex_deps(application_deps) do + cachex_deps() + |> Enum.reduce(application_deps, fn cachex_init_args, acc -> + [cachex_spec(cachex_init_args) | acc] + end) + end + + defp cachex_expiration(default, interval) do + Cachex.Spec.expiration(default: :timer.seconds(default), interval: :timer.seconds(interval)) + end + + defp maybe_add_init_internal_fetch_actor_task(children, :test), do: children + + defp maybe_add_init_internal_fetch_actor_task(children, _) do + [ + %{ + id: :internal_fetch_init, + start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]}, + restart: :temporary + } + | children + ] + end + + defp maybe_add_background_migrator(children, env) when env in [:test, :benchmark], do: children + + defp maybe_add_background_migrator(children, _) do + [Pleroma.Migrators.HashtagsTableMigrator | children] + end + + defp maybe_add(children, _, env) when env in [:test, :benchmark], do: children + defp maybe_add(children, :streamer, _), do: [Pleroma.Web.Streamer.registry() | children] + + defp add_http_children(children, :test) do + [Pleroma.HTTP.HackneySupervisor, Pleroma.Gun.GunSupervisor | children] + end + + defp add_http_children(children, _), do: [adapter_module() | children] + + defp maybe_add_chat_child(children) do + if Config.get([:chat, :enabled]) do + [Pleroma.Application.ChatSupervisor | children] + else + children + end + end +end |