Update configs

This commit is contained in:
Mayel de Borniol 2022-06-20 14:30:53 +12:00
parent 8f317f3fa4
commit c60a80ea59
27 changed files with 170 additions and 819 deletions

View file

@ -32,12 +32,12 @@ $ git clone https://github.com/bonfire-networks/bonfire-app.git bonfire
$ cd bonfire
```
3. Specify that you want to run in production:
`export MIX_ENV=prod`
3. Specify what flavour you want to run in production:
The first thing to do is choose what flavour of Bonfire you want to deploy, as each flavour uses different Docker images and set of configs. For example if you want to run the `classic` flavour (you may want to use direnv or something similar to persist this):
The first thing to do is choose what flavour of Bonfire you want to deploy, as each flavour uses different Docker images and set of configs. For example if you want to run the `classic` flavour:
`just config classic`
- `export MIX_ENV=prod FLAVOUR=classic` (you may also want to put this in the appropriate place in your system so your choice of flavour is remembered for next time)
- `just config`
This will initialise some default config (a .env file which won't be checked into git)

View file

@ -36,14 +36,14 @@ Bonfire is a flexible platform that powers a variety of social networks. The fir
- `reflow` (for community economic activities)
- `haha` (for learning new things)
Note that at the current time, the core team are focusing most of
their efforts on the classic flavour and this is where we recommend you start.
Note that at the current time, the core team are focusing most of their efforts on the classic flavour and this is where we recommend you start.
You first need to install [just](https://github.com/casey/just#packages) which is a handy tool (a `just` alternative) to run commands defined in `./justfile`.
So for example if you want to run the `classic` flavour, run:
`just config classic`
- `export FLAVOUR=classic` (you may also want to put this in the appropriate place in your system so your choice of flavour is remembered for next time)
- `just config`
### Configure

View file

@ -0,0 +1,10 @@
defmodule Bonfire.Repo.Migrations.ImportSharedUser do
use Ecto.Migration
import Bonfire.Data.SharedUser.Migration
# accounts & users
def up, do: migrate_shared_user()
def down, do: migrate_shared_user()
end

View file

@ -18,7 +18,7 @@ config :activity_pub, :instance,
federation_publisher_modules: [ActivityPubWeb.Publisher],
federation_reachability_timeout_days: 7,
federating: true,
rewrite_policy: [],
rewrite_policy: [Bonfire.Federate.ActivityPub.BoundariesMRF],
handle_unknown_activities: true
config :activity_pub, :http,

View file

@ -2,6 +2,9 @@ import Config
#### Base configuration
verbs = ["Boost", "Create", "Delete", "Edit", "Flag", "Follow", "Like", "Mention",
"Message", "Read", "Reply", "Request", "See", "Tag"]
# Choose password hashing backend
# Note that this corresponds with our dependencies in mix.exs
hasher = if config_env() in [:dev, :test], do: Pbkdf2, else: Argon2
@ -18,6 +21,7 @@ pointable_schema_extensions = [
:bonfire_data_activity_pub,
:bonfire_data_identity,
:bonfire_data_social,
:bonfire_data_edges,
:bonfire_tag,
:bonfire_classify,
:bonfire_data_shared_users,
@ -35,8 +39,19 @@ context_and_queries_extensions = pointable_schema_extensions ++ [
:bonfire_me,
:bonfire_social,
]
config :bonfire, :query_modules_search_path, context_and_queries_extensions
extensions_with_config = context_and_queries_extensions ++ [
:bonfire_boundaries,
:bonfire_federate_activitypub,
:bonfire_search,
:bonfire_mailer,
:bonfire_geolocate
]
config :bonfire, :verb_names, verbs
config :bonfire, :context_modules_search_path, context_and_queries_extensions
config :bonfire, :query_modules_search_path, context_and_queries_extensions
config :bonfire, :config_modules_search_path, extensions_with_config
# Search these apps/extensions for Verbs to index (i.e. they contain modules with a declare_verbs/0 function)
config :bonfire_data_access_control,
@ -150,7 +165,7 @@ common = fn names ->
end
end
edge = common.([:controlled, :activities, :request])
edge = common.([:controlled, :activities, :request, :created])
edges = common.([:controlled, :activities, :request, :created, :caretaker, :activity, :feed_publishes])
# first up, pointers could have all the mixins we're using. TODO
@ -343,7 +358,8 @@ config :bonfire_data_social, Activity,
@boost_ulid "300STANN0VNCERESHARESH0VTS"
@follow_ulid "70110WTHE1EADER1EADER1EADE"
has_many :feed_publishes, unquote(FeedPublish), unquote(mixin)
# ugly workaround needed for certain queries:
has_one :seen, unquote(Edge), foreign_key: :id, references: :id
# ugly workaround needed for certain queries (TODO: check if still needed)
has_one :activity, unquote(Activity), foreign_key: :id, references: :id
# mixins linked to the object rather than the activity:
has_one :created, unquote(Created), foreign_key: :id, references: :object_id
@ -372,7 +388,7 @@ config :bonfire_data_social, APActivity,
unquote_splicing(common.([:activity, :caretaker]))
end]
config :bonfire_data_social, Edge,
config :bonfire_data_edges, Edge,
[code: quote do
unquote_splicing(edge)
# TODO: requires composite foreign keys:
@ -461,7 +477,7 @@ config :bonfire_data_social, Post,
@like_ulid "11KES11KET0BE11KEDY0VKN0WS"
@boost_ulid "300STANN0VNCERESHARESH0VTS"
# mixins
unquote_splicing(common.([:activity, :caretaker, :created, :peered, :post_content, :replied]))
unquote_splicing(common.([:activities, :activity, :caretaker, :created, :peered, :post_content, :replied]))
# multimixins
unquote_splicing(common.([:controlled, :tagged, :tags, :files, :media, :feed_publishes]))
# has

View file

@ -1,10 +0,0 @@
import Config
config :bonfire_me,
templates_path: "lib"
config :bonfire_me, Bonfire.Me.Identity.Mails,
confirm_email: [subject: "Confirm your email - Bonfire"],
forgot_password: [subject: "Reset your password - Bonfire"]
#### Pointer class configuration

View file

@ -1,87 +0,0 @@
import Config
config :bonfire_social,
disabled: false
alias Bonfire.Data.Social.Post
alias Bonfire.Ecto.Acts, as: Ecto
alias Bonfire.Social.Acts.{
Activity,
ActivityPub,
Boundaries,
Caretaker,
Creator,
Edges,
Feeds,
Files,
LivePush,
MeiliSearch,
Posts,
Objects,
PostContents,
Tags,
Threaded,
}
delete_object = [
# Create a changeset for deletion
{Objects.Delete, on: :object},
# mark for deletion
{Bonfire.Ecto.Acts.Delete, on: :object,
delete_extra_associations: [
:tagged,
]
},
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our deletes
Ecto.Commit,
{MeiliSearch.Queue, on: :object}, # Enqueue for un-indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :object}, # Prepare for federation and add to deletion queue (oban).
]
config :bonfire_social, Bonfire.Social.Follows, []
config :bonfire_social, Bonfire.Social.Posts,
epics: [
publish: [
# Prep: a little bit of querying and a lot of preparing changesets
Posts.Publish, # Create a changeset for insertion
PostContents, # with a sanitised body and tags extracted,
{Caretaker, on: :post}, # a caretaker,
{Creator, on: :post}, # and a creator,
{Files, on: :post}, # possibly with uploaded files,
{Threaded, on: :post}, # possibly occurring in a thread,
{Tags, on: :post}, # with extracted tags fully hooked up,
{Boundaries, on: :post}, # and the appropriate boundaries established,
{Activity, on: :post}, # summarised by an activity,
{Feeds, on: :post}, # appearing in feeds.
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our inserts
Ecto.Commit,
# These things are free to happen casually in the background.
{LivePush, on: :post}, # Publish live feed updates via (in-memory) pubsub.
{MeiliSearch.Queue, on: :post}, # Enqueue for indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :post}, # Prepare for federation and do the queue insert (oban).
],
delete: delete_object,
]
config :bonfire_social, Bonfire.Social.Objects,
epics: [
delete: delete_object,
]

View file

@ -4,6 +4,7 @@ bonfire_data_access_control = "https://github.com/bonfire-networks/bonfire_data_
bonfire_data_activity_pub = "https://github.com/bonfire-networks/bonfire_data_activity_pub#main"
bonfire_data_identity = "https://github.com/bonfire-networks/bonfire_data_identity#main"
bonfire_data_social = "https://github.com/bonfire-networks/bonfire_data_social#main"
bonfire_data_edges = "https://github.com/bonfire-networks/bonfire_data_edges#main"
bonfire_ecto = "https://github.com/bonfire-networks/bonfire_ecto#main"
bonfire_epics = "https://github.com/bonfire-networks/bonfire_epics#main"
bonfire_me = "https://github.com/bonfire-networks/bonfire_me#main"

View file

@ -1,5 +1,5 @@
earmark = "~> 1.4.24" # handle markdown
earmark_parser = "~> 1.4.25" # parse markdown
earmark = "~> 1.5.0-pre1" # handle markdown
# earmark_parser = "~> 1.4.25" # parse markdown
# Web
# livebook = "~> 0.5.2"
surface = "~> 0.7.3"

View file

@ -2,7 +2,7 @@
# Add any extensions/deps with a package.json in their /assets directory here
# NOTE: any LV Hooks should also be added to ./deps_hooks.js
// TODO: make this more configurable? ie. autogenerate from active extensions with JS assets
# TODO: make this more configurable? ie. autogenerate from active extensions with JS assets
DEPS='bonfire_ui_common bonfire_editor_ck bonfire_editor_quill bonfire_geolocate bonfire_ui_kanban'

View file

@ -8,6 +8,10 @@ host = System.get_env("HOSTNAME", "localhost")
server_port = String.to_integer(System.get_env("SERVER_PORT", "4000"))
public_port = String.to_integer(System.get_env("PUBLIC_PORT", "4000"))
## load runtime configs directly via extension-provided modules
Bonfire.Common.Config.LoadExtensionsConfig.load_configs()
##
System.get_env("DATABASE_URL") || System.get_env("POSTGRES_PASSWORD") || System.get_env("CI") ||
raise """
Environment variables for database are missing.
@ -28,7 +32,7 @@ else
end
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||System.get_env("CI") ||
System.get_env("SECRET_KEY_BASE") || System.get_env("CI") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
@ -49,10 +53,14 @@ config :bonfire,
app_name: System.get_env("APP_NAME", "Bonfire"),
ap_base_path: System.get_env("AP_BASE_PATH", "/pub"),
github_token: System.get_env("GITHUB_TOKEN"),
show_debug_errors_in_dev: System.get_env("SHOW_DEBUG_IN_DEV"),
encryption_salt: encryption_salt,
signing_salt: signing_salt
start_server? = if config_env() == :test, do: System.get_env("START_SERVER", "true"), else: System.get_env("START_SERVER", "true")
config :bonfire, Bonfire.Web.Endpoint,
server: String.to_existing_atom(start_server?),
url: [
host: host,
port: public_port
@ -63,6 +71,18 @@ config :bonfire, Bonfire.Web.Endpoint,
secret_key_base: secret_key_base,
live_view: [signing_salt: signing_salt]
if System.get_env("SENTRY_DSN") do
IO.puts(
"Note: errors will be reported to Sentry."
)
config :sentry,
dsn: System.get_env("SENTRY_DSN")
if System.get_env("SENTRY_NAME") do
config :sentry, server_name: System.get_env("SENTRY_NAME")
end
end
# start prod-only config
if config_env() == :prod do
@ -71,14 +91,8 @@ if config_env() == :prod do
# ssl: true,
database: System.get_env("POSTGRES_DB", "bonfire"),
pool_size: String.to_integer(System.get_env("POOL_SIZE", "10")),
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false"))
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false")) # Note: keep this disabled if using ecto_dev_logger or EctoSparkles.Log instead #
config :sentry,
dsn: System.get_env("SENTRY_DSN")
if System.get_env("SENTRY_NAME") do
config :sentry, server_name: System.get_env("SENTRY_NAME")
end
end # prod only config
@ -88,135 +102,9 @@ if config_env() != :test do
config :bonfire, Bonfire.Common.Repo,
slow_query_ms: String.to_integer(System.get_env("SLOW_QUERY_MS", "100"))
# transactional emails
mail_blackhole = fn var ->
IO.puts(
"WARNING: The environment variable #{var} was not set or was set incorrectly, mail will NOT be sent."
)
config :bonfire, Bonfire.Mailer, adapter: Bamboo.LocalAdapter
end
mail_mailgun = fn ->
# API URI depends on whether you're registered with Mailgun in EU, US, etc (defaults to EU)
base_uri = System.get_env("MAIL_BASE_URI", "https://api.eu.mailgun.net/v3")
case System.get_env("MAIL_KEY") do
nil ->
mail_blackhole.("MAIL_KEY")
key ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through Mailgun.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.MailgunAdapter,
api_key: key,
base_uri: base_uri,
domain: domain,
reply_to: from
end
end
end
end
mail_smtp = fn ->
case System.get_env("MAIL_SERVER") do
nil ->
mail_blackhole.("MAIL_SERVER")
server ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_USER") do
nil ->
mail_blackhole.("MAIL_USER")
user ->
case System.get_env("MAIL_PASSWORD") do
nil ->
mail_blackhole.("MAIL_PASSWORD")
password ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through SMTP.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.SMTPAdapter,
server: server,
hostname: domain,
port: String.to_integer(System.get_env("MAIL_PORT", "587")),
username: user,
password: password,
tls: :always,
allowed_tls_versions: [:"tlsv1.2"],
ssl: false,
retries: 1,
auth: :always,
reply_to: from
end
end
end
end
end
end
case System.get_env("MAIL_BACKEND") do
"mailgun" -> mail_mailgun.()
"smtp" -> mail_smtp.()
_ -> mail_blackhole.("MAIL_BACKEND")
end
end
### copy-paste Bonfire extension configs that need to read env at runtime
## bonfire_search
config :bonfire_search,
disable_indexing: System.get_env("SEARCH_INDEXING_DISABLED", "false"),
instance: System.get_env("SEARCH_MEILI_INSTANCE", "http://localhost:7700"), # protocol, hostname and port
api_key: System.get_env("MEILI_MASTER_KEY", "make-sure-to-change-me") # secret key
## bonfire_livebook
if Code.ensure_loaded?(Livebook.Config) do
config :livebook, :root_path, Livebook.Config.root_path!("LIVEBOOK_ROOT_PATH")
if password = Livebook.Config.password!("LIVEBOOK_PASSWORD") do
config :livebook, authentication_mode: :password, password: password
else
config :livebook, authentication_mode: :token
config :livebook, token: System.get_env("LIVEBOOK_TOKEN", Livebook.Utils.random_id())
end
config :livebook,
:cookie,
Livebook.Config.cookie!("LIVEBOOK_COOKIE") || Livebook.Utils.random_cookie()
config :livebook,
:default_runtime,
Livebook.Config.default_runtime!("LIVEBOOK_DEFAULT_RUNTIME") ||
{Livebook.Runtime.Embedded, []}
if Code.ensure_loaded?(Livebook) do
Livebook.config_runtime()
end
# copy pasted config from extensions that needs to load at runtime
config :bonfire, :js_config,
mapbox_api_key: System.get_env("MAPBOX_API_KEY", "pk.eyJ1IjoibWF5ZWwiLCJhIjoiY2tlMmxzNXF5MGFpaDJ0bzR2M29id2EzOCJ9.QsmjD-zypsE0_wonLGCYlA")

View file

@ -18,7 +18,7 @@ config :activity_pub, :instance,
federation_publisher_modules: [ActivityPubWeb.Publisher],
federation_reachability_timeout_days: 7,
federating: true,
rewrite_policy: [],
rewrite_policy: [Bonfire.Federate.ActivityPub.BoundariesMRF],
handle_unknown_activities: true
config :activity_pub, :http,

View file

@ -2,6 +2,9 @@ import Config
#### Base configuration
verbs = ["Boost", "Create", "Delete", "Edit", "Flag", "Follow", "Like", "Mention",
"Message", "Read", "Reply", "Request", "See", "Tag"]
# Choose password hashing backend
# Note that this corresponds with our dependencies in mix.exs
hasher = if config_env() in [:dev, :test], do: Pbkdf2, else: Argon2
@ -18,6 +21,7 @@ pointable_schema_extensions = [
:bonfire_data_activity_pub,
:bonfire_data_identity,
:bonfire_data_social,
:bonfire_data_edges,
:bonfire_tag,
:bonfire_classify,
:bonfire_data_shared_users,
@ -35,8 +39,18 @@ context_and_queries_extensions = pointable_schema_extensions ++ [
:bonfire_me,
:bonfire_social,
]
config :bonfire, :query_modules_search_path, context_and_queries_extensions
extensions_with_config = context_and_queries_extensions ++ [
:bonfire_boundaries,
:bonfire_federate_activitypub,
:bonfire_search,
:bonfire_mailer
]
config :bonfire, :verb_names, verbs
config :bonfire, :context_modules_search_path, context_and_queries_extensions
config :bonfire, :query_modules_search_path, context_and_queries_extensions
config :bonfire, :config_modules_search_path, extensions_with_config
# Search these apps/extensions for Verbs to index (i.e. they contain modules with a declare_verbs/0 function)
config :bonfire_data_access_control,
@ -150,7 +164,7 @@ common = fn names ->
end
end
edge = common.([:controlled, :activities, :request])
edge = common.([:controlled, :activities, :request, :created])
edges = common.([:controlled, :activities, :request, :created, :caretaker, :activity, :feed_publishes])
# first up, pointers could have all the mixins we're using. TODO
@ -343,7 +357,8 @@ config :bonfire_data_social, Activity,
@boost_ulid "300STANN0VNCERESHARESH0VTS"
@follow_ulid "70110WTHE1EADER1EADER1EADE"
has_many :feed_publishes, unquote(FeedPublish), unquote(mixin)
# ugly workaround needed for certain queries:
has_one :seen, unquote(Edge), foreign_key: :id, references: :id
# ugly workaround needed for certain queries (TODO: check if still needed)
has_one :activity, unquote(Activity), foreign_key: :id, references: :id
# mixins linked to the object rather than the activity:
has_one :created, unquote(Created), foreign_key: :id, references: :object_id
@ -372,7 +387,7 @@ config :bonfire_data_social, APActivity,
unquote_splicing(common.([:activity, :caretaker]))
end]
config :bonfire_data_social, Edge,
config :bonfire_data_edges, Edge,
[code: quote do
unquote_splicing(edge)
# TODO: requires composite foreign keys:
@ -461,7 +476,7 @@ config :bonfire_data_social, Post,
@like_ulid "11KES11KET0BE11KEDY0VKN0WS"
@boost_ulid "300STANN0VNCERESHARESH0VTS"
# mixins
unquote_splicing(common.([:activity, :caretaker, :created, :peered, :post_content, :replied]))
unquote_splicing(common.([:activities, :activity, :caretaker, :created, :peered, :post_content, :replied]))
# multimixins
unquote_splicing(common.([:controlled, :tagged, :tags, :files, :media, :feed_publishes]))
# has

View file

@ -1,10 +0,0 @@
import Config
config :bonfire_me,
templates_path: "lib"
config :bonfire_me, Bonfire.Me.Identity.Mails,
confirm_email: [subject: "Confirm your email - Bonfire"],
forgot_password: [subject: "Reset your password - Bonfire"]
#### Pointer class configuration

View file

@ -1,87 +0,0 @@
import Config
config :bonfire_social,
disabled: false
alias Bonfire.Data.Social.Post
alias Bonfire.Ecto.Acts, as: Ecto
alias Bonfire.Social.Acts.{
Activity,
ActivityPub,
Boundaries,
Caretaker,
Creator,
Edges,
Feeds,
Files,
LivePush,
MeiliSearch,
Posts,
Objects,
PostContents,
Tags,
Threaded,
}
delete_object = [
# Create a changeset for deletion
{Objects.Delete, on: :object},
# mark for deletion
{Bonfire.Ecto.Acts.Delete, on: :object,
delete_extra_associations: [
:tagged,
]
},
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our deletes
Ecto.Commit,
{MeiliSearch.Queue, on: :object}, # Enqueue for un-indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :object}, # Prepare for federation and add to deletion queue (oban).
]
config :bonfire_social, Bonfire.Social.Follows, []
config :bonfire_social, Bonfire.Social.Posts,
epics: [
publish: [
# Prep: a little bit of querying and a lot of preparing changesets
Posts.Publish, # Create a changeset for insertion
PostContents, # with a sanitised body and tags extracted,
{Caretaker, on: :post}, # a caretaker,
{Creator, on: :post}, # and a creator,
{Files, on: :post}, # possibly with uploaded files,
{Threaded, on: :post}, # possibly occurring in a thread,
{Tags, on: :post}, # with extracted tags fully hooked up,
{Boundaries, on: :post}, # and the appropriate boundaries established,
{Activity, on: :post}, # summarised by an activity,
{Feeds, on: :post}, # appearing in feeds.
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our inserts
Ecto.Commit,
# These things are free to happen casually in the background.
{LivePush, on: :post}, # Publish live feed updates via (in-memory) pubsub.
{MeiliSearch.Queue, on: :post}, # Enqueue for indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :post}, # Prepare for federation and do the queue insert (oban).
],
delete: delete_object,
]
config :bonfire_social, Bonfire.Social.Objects,
epics: [
delete: delete_object,
]

View file

@ -8,6 +8,10 @@ host = System.get_env("HOSTNAME", "localhost")
server_port = String.to_integer(System.get_env("SERVER_PORT", "4000"))
public_port = String.to_integer(System.get_env("PUBLIC_PORT", "4000"))
## load runtime configs directly via extension-provided modules
Bonfire.Common.Config.LoadExtensionsConfig.load_configs()
##
System.get_env("DATABASE_URL") || System.get_env("POSTGRES_PASSWORD") || System.get_env("CI") ||
raise """
Environment variables for database are missing.
@ -28,7 +32,7 @@ else
end
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||System.get_env("CI") ||
System.get_env("SECRET_KEY_BASE") || System.get_env("CI") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
@ -49,10 +53,14 @@ config :bonfire,
app_name: System.get_env("APP_NAME", "Bonfire"),
ap_base_path: System.get_env("AP_BASE_PATH", "/pub"),
github_token: System.get_env("GITHUB_TOKEN"),
show_debug_errors_in_dev: System.get_env("SHOW_DEBUG_IN_DEV"),
encryption_salt: encryption_salt,
signing_salt: signing_salt
start_server? = if config_env() == :test, do: System.get_env("START_SERVER", "true"), else: System.get_env("START_SERVER", "true")
config :bonfire, Bonfire.Web.Endpoint,
server: String.to_existing_atom(start_server?),
url: [
host: host,
port: public_port
@ -63,6 +71,18 @@ config :bonfire, Bonfire.Web.Endpoint,
secret_key_base: secret_key_base,
live_view: [signing_salt: signing_salt]
if System.get_env("SENTRY_DSN") do
IO.puts(
"Note: errors will be reported to Sentry."
)
config :sentry,
dsn: System.get_env("SENTRY_DSN")
if System.get_env("SENTRY_NAME") do
config :sentry, server_name: System.get_env("SENTRY_NAME")
end
end
# start prod-only config
if config_env() == :prod do
@ -71,14 +91,8 @@ if config_env() == :prod do
# ssl: true,
database: System.get_env("POSTGRES_DB", "bonfire"),
pool_size: String.to_integer(System.get_env("POOL_SIZE", "10")),
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false"))
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false")) # Note: keep this disabled if using ecto_dev_logger or EctoSparkles.Log instead #
config :sentry,
dsn: System.get_env("SENTRY_DSN")
if System.get_env("SENTRY_NAME") do
config :sentry, server_name: System.get_env("SENTRY_NAME")
end
end # prod only config
@ -88,131 +102,9 @@ if config_env() != :test do
config :bonfire, Bonfire.Common.Repo,
slow_query_ms: String.to_integer(System.get_env("SLOW_QUERY_MS", "100"))
# transactional emails
mail_blackhole = fn var ->
IO.puts(
"WARNING: The environment variable #{var} was not set or was set incorrectly, mail will NOT be sent."
)
config :bonfire, Bonfire.Mailer, adapter: Bamboo.LocalAdapter
end
mail_mailgun = fn ->
# API URI depends on whether you're registered with Mailgun in EU, US, etc (defaults to EU)
base_uri = System.get_env("MAIL_BASE_URI", "https://api.eu.mailgun.net/v3")
case System.get_env("MAIL_KEY") do
nil ->
mail_blackhole.("MAIL_KEY")
key ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through Mailgun.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.MailgunAdapter,
api_key: key,
base_uri: base_uri,
domain: domain,
reply_to: from
end
end
end
end
mail_smtp = fn ->
case System.get_env("MAIL_SERVER") do
nil ->
mail_blackhole.("MAIL_SERVER")
server ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_USER") do
nil ->
mail_blackhole.("MAIL_USER")
user ->
case System.get_env("MAIL_PASSWORD") do
nil ->
mail_blackhole.("MAIL_PASSWORD")
password ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through SMTP.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.SMTPAdapter,
server: server,
hostname: domain,
port: String.to_integer(System.get_env("MAIL_PORT", "587")),
username: user,
password: password,
tls: :always,
allowed_tls_versions: [:"tlsv1.2"],
ssl: false,
retries: 1,
auth: :always,
reply_to: from
end
end
end
end
end
end
case System.get_env("MAIL_BACKEND") do
"mailgun" -> mail_mailgun.()
"smtp" -> mail_smtp.()
_ -> mail_blackhole.("MAIL_BACKEND")
end
end
### copy-paste Bonfire extension configs that need to read env at runtime
## bonfire_search
config :bonfire_search,
disable_indexing: System.get_env("SEARCH_INDEXING_DISABLED", "false"),
instance: System.get_env("SEARCH_MEILI_INSTANCE", "http://localhost:7700"), # protocol, hostname and port
api_key: System.get_env("MEILI_MASTER_KEY", "make-sure-to-change-me") # secret key
## bonfire_livebook
if Code.ensure_loaded?(Livebook.Config) do
config :livebook, :root_path, Livebook.Config.root_path!("LIVEBOOK_ROOT_PATH")
if password = Livebook.Config.password!("LIVEBOOK_PASSWORD") do
config :livebook, authentication_mode: :password, password: password
else
config :livebook, authentication_mode: :token
config :livebook, token: System.get_env("LIVEBOOK_TOKEN", Livebook.Utils.random_id())
end
config :livebook,
:cookie,
Livebook.Config.cookie!("LIVEBOOK_COOKIE") || Livebook.Utils.random_cookie()
config :livebook,
:default_runtime,
Livebook.Config.default_runtime!("LIVEBOOK_DEFAULT_RUNTIME") ||
{Livebook.Runtime.Embedded, []}
if Code.ensure_loaded?(Livebook) do
Livebook.config_runtime()
end

View file

@ -19,7 +19,7 @@ config :activity_pub, :instance,
federation_publisher_modules: [ActivityPubWeb.Publisher],
federation_reachability_timeout_days: 7,
federating: true,
rewrite_policy: [],
rewrite_policy: [Bonfire.Federate.ActivityPub.BoundariesMRF],
handle_unknown_activities: true
config :activity_pub, :http,

View file

@ -2,6 +2,9 @@ import Config
#### Base configuration
verbs = ["Boost", "Create", "Delete", "Edit", "Flag", "Follow", "Like", "Mention",
"Message", "Read", "Reply", "Request", "See", "Tag"]
# Choose password hashing backend
# Note that this corresponds with our dependencies in mix.exs
hasher = if config_env() in [:dev, :test], do: Pbkdf2, else: Argon2
@ -18,6 +21,7 @@ pointable_schema_extensions = [
:bonfire_data_activity_pub,
:bonfire_data_identity,
:bonfire_data_social,
:bonfire_data_edges,
:bonfire_tag,
:bonfire_classify,
:bonfire_data_shared_users,
@ -35,8 +39,18 @@ context_and_queries_extensions = pointable_schema_extensions ++ [
:bonfire_me,
:bonfire_social,
]
config :bonfire, :query_modules_search_path, context_and_queries_extensions
extensions_with_config = context_and_queries_extensions ++ [
:bonfire_boundaries,
:bonfire_federate_activitypub,
:bonfire_search,
:bonfire_mailer
]
config :bonfire, :verb_names, verbs
config :bonfire, :context_modules_search_path, context_and_queries_extensions
config :bonfire, :query_modules_search_path, context_and_queries_extensions
config :bonfire, :config_modules_search_path, extensions_with_config
# Search these apps/extensions for Verbs to index (i.e. they contain modules with a declare_verbs/0 function)
config :bonfire_data_access_control,
@ -150,7 +164,7 @@ common = fn names ->
end
end
edge = common.([:controlled, :activities, :request])
edge = common.([:controlled, :activities, :request, :created])
edges = common.([:controlled, :activities, :request, :created, :caretaker, :activity, :feed_publishes])
# first up, pointers could have all the mixins we're using. TODO
@ -343,7 +357,8 @@ config :bonfire_data_social, Activity,
@boost_ulid "300STANN0VNCERESHARESH0VTS"
@follow_ulid "70110WTHE1EADER1EADER1EADE"
has_many :feed_publishes, unquote(FeedPublish), unquote(mixin)
# ugly workaround needed for certain queries:
has_one :seen, unquote(Edge), foreign_key: :id, references: :id
# ugly workaround needed for certain queries (TODO: check if still needed)
has_one :activity, unquote(Activity), foreign_key: :id, references: :id
# mixins linked to the object rather than the activity:
has_one :created, unquote(Created), foreign_key: :id, references: :object_id
@ -372,7 +387,7 @@ config :bonfire_data_social, APActivity,
unquote_splicing(common.([:activity, :caretaker]))
end]
config :bonfire_data_social, Edge,
config :bonfire_data_edges, Edge,
[code: quote do
unquote_splicing(edge)
# TODO: requires composite foreign keys:
@ -461,7 +476,7 @@ config :bonfire_data_social, Post,
@like_ulid "11KES11KET0BE11KEDY0VKN0WS"
@boost_ulid "300STANN0VNCERESHARESH0VTS"
# mixins
unquote_splicing(common.([:activity, :caretaker, :created, :peered, :post_content, :replied]))
unquote_splicing(common.([:activities, :activity, :caretaker, :created, :peered, :post_content, :replied]))
# multimixins
unquote_splicing(common.([:controlled, :tagged, :tags, :files, :media, :feed_publishes]))
# has

View file

@ -1,10 +0,0 @@
import Config
config :bonfire_me,
templates_path: "lib"
config :bonfire_me, Bonfire.Me.Identity.Mails,
confirm_email: [subject: "Confirm your email - Bonfire"],
forgot_password: [subject: "Reset your password - Bonfire"]
#### Pointer class configuration

View file

@ -1,87 +0,0 @@
import Config
config :bonfire_social,
disabled: false
alias Bonfire.Data.Social.Post
alias Bonfire.Ecto.Acts, as: Ecto
alias Bonfire.Social.Acts.{
Activity,
ActivityPub,
Boundaries,
Caretaker,
Creator,
Edges,
Feeds,
Files,
LivePush,
MeiliSearch,
Posts,
Objects,
PostContents,
Tags,
Threaded,
}
delete_object = [
# Create a changeset for deletion
{Objects.Delete, on: :object},
# mark for deletion
{Bonfire.Ecto.Acts.Delete, on: :object,
delete_extra_associations: [
:tagged,
]
},
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our deletes
Ecto.Commit,
{MeiliSearch.Queue, on: :object}, # Enqueue for un-indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :object}, # Prepare for federation and add to deletion queue (oban).
]
config :bonfire_social, Bonfire.Social.Follows, []
config :bonfire_social, Bonfire.Social.Posts,
epics: [
publish: [
# Prep: a little bit of querying and a lot of preparing changesets
Posts.Publish, # Create a changeset for insertion
PostContents, # with a sanitised body and tags extracted,
{Caretaker, on: :post}, # a caretaker,
{Creator, on: :post}, # and a creator,
{Files, on: :post}, # possibly with uploaded files,
{Threaded, on: :post}, # possibly occurring in a thread,
{Tags, on: :post}, # with extracted tags fully hooked up,
{Boundaries, on: :post}, # and the appropriate boundaries established,
{Activity, on: :post}, # summarised by an activity,
{Feeds, on: :post}, # appearing in feeds.
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our inserts
Ecto.Commit,
# These things are free to happen casually in the background.
{LivePush, on: :post}, # Publish live feed updates via (in-memory) pubsub.
{MeiliSearch.Queue, on: :post}, # Enqueue for indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :post}, # Prepare for federation and do the queue insert (oban).
],
delete: delete_object,
]
config :bonfire_social, Bonfire.Social.Objects,
epics: [
delete: delete_object,
]

View file

@ -8,6 +8,10 @@ host = System.get_env("HOSTNAME", "localhost")
server_port = String.to_integer(System.get_env("SERVER_PORT", "4000"))
public_port = String.to_integer(System.get_env("PUBLIC_PORT", "4000"))
## load runtime configs directly via extension-provided modules
Bonfire.Common.Config.LoadExtensionsConfig.load_configs()
##
System.get_env("DATABASE_URL") || System.get_env("POSTGRES_PASSWORD") || System.get_env("CI") ||
raise """
Environment variables for database are missing.
@ -28,7 +32,7 @@ else
end
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||System.get_env("CI") ||
System.get_env("SECRET_KEY_BASE") || System.get_env("CI") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
@ -48,12 +52,15 @@ config :bonfire,
host: host,
app_name: System.get_env("APP_NAME", "Bonfire"),
ap_base_path: System.get_env("AP_BASE_PATH", "/pub"),
invite_only: System.get_env("INVITE_ONLY", "true") !="false",
github_token: System.get_env("GITHUB_TOKEN"),
show_debug_errors_in_dev: System.get_env("SHOW_DEBUG_IN_DEV"),
encryption_salt: encryption_salt,
signing_salt: signing_salt
start_server? = if config_env() == :test, do: System.get_env("START_SERVER", "true"), else: System.get_env("START_SERVER", "true")
config :bonfire, Bonfire.Web.Endpoint,
server: String.to_existing_atom(start_server?),
url: [
host: host,
port: public_port
@ -64,161 +71,40 @@ config :bonfire, Bonfire.Web.Endpoint,
secret_key_base: secret_key_base,
live_view: [signing_salt: signing_salt]
if System.get_env("SENTRY_DSN") do
IO.puts(
"Note: errors will be reported to Sentry."
)
config :sentry,
dsn: System.get_env("SENTRY_DSN")
if System.get_env("SENTRY_NAME") do
config :sentry, server_name: System.get_env("SENTRY_NAME")
end
end
# start prod-only config
if config_env() == :prod do
config :logger, level: String.to_atom(System.get_env("LOG_LEVEL", "info"))
config :bonfire, Bonfire.Common.Repo,
# ssl: true,
database: System.get_env("POSTGRES_DB", "bonfire"),
pool_size: String.to_integer(System.get_env("POOL_SIZE", "10")),
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false"))
config :sentry, dsn: System.get_env("SENTRY_DSN")
log: String.to_atom(System.get_env("DB_QUERIES_LOG_LEVEL", "false")) # Note: keep this disabled if using ecto_dev_logger or EctoSparkles.Log instead #
end # prod only config
config :sentry, server_name: System.get_env("SENTRY_NAME") || host
# start prod and dev only config
if config_env() != :test do
config :bonfire, Bonfire.Common.Repo,
slow_query_ms: String.to_integer(System.get_env("SLOW_QUERY_MS", "100"))
# transactional emails
mail_blackhole = fn var ->
IO.puts(
"WARNING: The environment variable #{var} was not set or was set incorrectly, mail will NOT be sent."
)
config :bonfire, Bonfire.Mailer, adapter: Bamboo.LocalAdapter
end
mail_mailgun = fn ->
# API URI depends on whether you're registered with Mailgun in EU, US, etc (defaults to EU)
base_uri = System.get_env("MAIL_BASE_URI", "https://api.eu.mailgun.net/v3")
case System.get_env("MAIL_KEY") do
nil ->
mail_blackhole.("MAIL_KEY")
key ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through Mailgun.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.MailgunAdapter,
api_key: key,
base_uri: base_uri,
domain: domain,
reply_to: from
end
end
end
end
mail_smtp = fn ->
case System.get_env("MAIL_SERVER") do
nil ->
mail_blackhole.("MAIL_SERVER")
server ->
case System.get_env("MAIL_DOMAIN") do
nil ->
mail_blackhole.("MAIL_DOMAIN")
domain ->
case System.get_env("MAIL_USER") do
nil ->
mail_blackhole.("MAIL_USER")
user ->
case System.get_env("MAIL_PASSWORD") do
nil ->
mail_blackhole.("MAIL_PASSWORD")
password ->
case System.get_env("MAIL_FROM") do
nil ->
mail_blackhole.("MAIL_FROM")
from ->
IO.puts("NOTE: Transactional emails will be sent through SMTP.")
config :bonfire, Bonfire.Mailer,
adapter: Bamboo.SMTPAdapter,
server: server,
hostname: domain,
port: String.to_integer(System.get_env("MAIL_PORT", "587")),
username: user,
password: password,
tls: :always,
allowed_tls_versions: [:"tlsv1.2"],
ssl: false,
retries: 1,
auth: :always,
reply_to: from
end
end
end
end
end
end
case System.get_env("MAIL_BACKEND") do
"mailgun" -> mail_mailgun.()
"smtp" -> mail_smtp.()
_ -> mail_blackhole.("MAIL_BACKEND")
end
end
### copy-paste Bonfire extension configs that need to read env at runtime
## bonfire_search
config :bonfire_search,
disable_indexing: System.get_env("SEARCH_INDEXING_DISABLED", "false"),
instance: System.get_env("SEARCH_MEILI_INSTANCE", "http://localhost:7700"), # protocol, hostname and port
api_key: System.get_env("MEILI_MASTER_KEY", "make-sure-to-change-me") # secret key
## bonfire_livebook
if Code.ensure_loaded?(Livebook.Config) do
config :livebook, :root_path, Livebook.Config.root_path!("LIVEBOOK_ROOT_PATH")
if password = Livebook.Config.password!("LIVEBOOK_PASSWORD") do
config :livebook, authentication_mode: :password, password: password
else
config :livebook, authentication_mode: :token
config :livebook, token: System.get_env("LIVEBOOK_TOKEN", Livebook.Utils.random_id())
end
config :livebook,
:cookie,
Livebook.Config.cookie!("LIVEBOOK_COOKIE") || Livebook.Utils.random_cookie()
config :livebook,
:default_runtime,
Livebook.Config.default_runtime!("LIVEBOOK_DEFAULT_RUNTIME") ||
{Livebook.Runtime.Embedded, []}
if Code.ensure_loaded?(Livebook) do
Livebook.config_runtime()
end
# copy pasted config from extensions that needs to load at runtime
config :bonfire, :js_config,
mapbox_api_key: System.get_env("MAPBOX_API_KEY", "pk.eyJ1IjoibWF5ZWwiLCJhIjoiY2tlMmxzNXF5MGFpaDJ0bzR2M29id2EzOCJ9.QsmjD-zypsE0_wonLGCYlA")

View file

@ -1 +0,0 @@
../../classic/config/bonfire_me.exs

View file

@ -1,87 +0,0 @@
import Config
config :bonfire_social,
disabled: false
alias Bonfire.Data.Social.Post
alias Bonfire.Ecto.Acts, as: Ecto
alias Bonfire.Social.Acts.{
Activity,
ActivityPub,
Boundaries,
Caretaker,
Creator,
Edges,
Feeds,
Files,
LivePush,
MeiliSearch,
Posts,
Objects,
PostContents,
Tags,
Threaded,
}
delete_object = [
# Create a changeset for deletion
{Objects.Delete, on: :object},
# mark for deletion
{Bonfire.Ecto.Acts.Delete, on: :object,
delete_extra_associations: [
:tagged,
]
},
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our deletes
Ecto.Commit,
{MeiliSearch.Queue, on: :object}, # Enqueue for un-indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :object}, # Prepare for federation and add to deletion queue (oban).
]
config :bonfire_social, Bonfire.Social.Follows, []
config :bonfire_social, Bonfire.Social.Posts,
epics: [
publish: [
# Prep: a little bit of querying and a lot of preparing changesets
Posts.Publish, # Create a changeset for insertion
PostContents, # with a sanitised body and tags extracted,
{Caretaker, on: :post}, # a caretaker,
{Creator, on: :post}, # and a creator,
{Files, on: :post}, # possibly with uploaded files,
{Threaded, on: :post}, # possibly occurring in a thread,
{Tags, on: :post}, # with extracted tags fully hooked up,
{Boundaries, on: :post}, # and the appropriate boundaries established,
{Activity, on: :post}, # summarised by an activity,
{Feeds, on: :post}, # appearing in feeds.
# Now we have a short critical section
Ecto.Begin,
Ecto.Work, # Run our inserts
Ecto.Commit,
# These things are free to happen casually in the background.
{LivePush, on: :post}, # Publish live feed updates via (in-memory) pubsub.
{MeiliSearch.Queue, on: :post}, # Enqueue for indexing by meilisearch
# Oban would rather we put these here than in the transaction
# above because it knows better than us, obviously.
{ActivityPub, on: :post}, # Prepare for federation and do the queue insert (oban).
],
delete: delete_object,
]
config :bonfire_social, Bonfire.Social.Objects,
epics: [
delete: delete_object,
]

View file

@ -71,9 +71,15 @@ pre-setup flavour='classic':
@chmod 700 .erlang.cookie
# Initialise env files, and create some required folders, files and softlinks
config select_flavour:
config:
@just flavour $FLAVOUR
# Initialise a specific flavour, with its env files, and create some required folders, files and softlinks
flavour select_flavour:
@echo "Switching to flavour '$select_flavour'..."
@just pre-setup $select_flavour
@just deps-clean-data
@just js-deps-get
@echo "You can now edit your config for flavour '$select_flavour' in /.env and ./config/ more generally."
pre-init:
@ -88,10 +94,9 @@ init: pre-init services
# First run - prepare environment and dependencies
setup:
just pre-setup $FLAVOUR
just flavour $FLAVOUR
just build
just mix setup
just js-deps-get
# Prepare environment and dependencies
prepare:

View file

@ -13,6 +13,7 @@ defmodule Bonfire.API.GraphQL.Schema do
require Logger
alias Bonfire.API.GraphQL.SchemaUtils
alias Bonfire.API.GraphQL.Middleware.CollapseErrors
alias Absinthe.Resolution.Helpers
@doc """
Define dataloaders

View file

@ -21,7 +21,7 @@ defmodule Bonfire.MixProject do
deps_prefixes: [
docs: ["bonfire_", "pointers", "paginator", "ecto_shorts", "ecto_sparkles", "absinthe_client", "activity_pub", "arrows", "ecto_materialized_path", "flexto", "grumble", "linkify", "verbs", "voodoo", "waffle", "zest"],
test: ["bonfire_", "pointers", "paginator", "ecto_shorts", "ecto_sparkles", "activity_pub"],
data: ["bonfire_data_", "pointers", "bonfire_boundaries", "bonfire_tag", "bonfire_classify", "bonfire_geolocate", "bonfire_quantify", "bonfire_valueflows"],
data: ["bonfire_data_", "bonfire_data_edges", "pointers", "bonfire_boundaries", "bonfire_tag", "bonfire_classify", "bonfire_geolocate", "bonfire_quantify", "bonfire_valueflows"],
api: ["bonfire_me", "bonfire_social", "bonfire_tag", "bonfire_classify", "bonfire_geolocate", "bonfire_valueflows"],
localise: ["bonfire_"],
localise_self: []
@ -107,7 +107,7 @@ defmodule Bonfire.MixProject do
],
"bonfire.deps.update": ["deps.update " <> deps_to_update()],
"bonfire.deps.clean": ["deps.clean " <> deps_to_clean(:localise) <> " --build"],
"bonfire.deps.clean.data": ["deps.clean " <> deps_to_clean(:data) <> " --build"],
"bonfire.deps.clean.data": ["deps.clean " <> (deps_to_clean(:data) |> IO.inspect) <> " --build"],
"bonfire.deps.clean.api": ["deps.clean " <> deps_to_clean(:api) <> " --build"],
"bonfire.deps.recompile": ["deps.compile " <> deps_to_update() <> " --force"],
"bonfire.deps": ["bonfire.deps.update", "bonfire.deps.clean.data"],

View file

@ -31,5 +31,6 @@ ExUnit.start(
# end)
Application.put_env(:wallaby, :base_url, Bonfire.Web.Endpoint.url())
chromedriver_path = Bonfire.Common.Config.get([:wallaby, :chromedriver, :path])
# TODO: skip browser-based tests if no driver is available
if File.exists?(Bonfire.Common.Config.get([:wallaby, :chromedriver, :path])), do: {:ok, _} = Application.ensure_all_started(:wallaby)
if chromedriver_path && File.exists?(chromedriver_path), do: {:ok, _} = Application.ensure_all_started(:wallaby), else: IO.inspect("Note: Wallaby will not run because the chromedriver is missing")