Merge pull request #4 from dato/data_upload_max_size_mb

Support DATA_UPLOAD_MAX_MEMORY_MiB, only, in .env
This commit is contained in:
Hugh Rundle 2024-02-06 18:25:44 +11:00 committed by GitHub
commit 8773caa26b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 8 additions and 6 deletions

View file

@ -138,9 +138,9 @@ TWO_FACTOR_LOGIN_MAX_SECONDS=60
# Value should be a comma-separated list of host names.
CSP_ADDITIONAL_HOSTS=
# Increase if users are having trouble uploading BookWyrm export files.
# Default value is 100MB
DATA_UPLOAD_MAX_MEMORY_SIZE=104857600
# Time before being logged out (in seconds)
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
# Maximum allowed memory for file uploads (increase if users are having trouble
# uploading BookWyrm export files).
# DATA_UPLOAD_MAX_MEMORY_MiB=100

View file

@ -446,4 +446,6 @@ if HTTP_X_FORWARDED_PROTO:
# user with the same username - in which case you should change it!
INSTANCE_ACTOR_USERNAME = "bookwyrm.instance.actor"
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_SIZE", 104857600)
# We only allow specifying DATA_UPLOAD_MAX_MEMORY_SIZE in MiB from .env
# (note the difference in variable names).
DATA_UPLOAD_MAX_MEMORY_SIZE = env.int("DATA_UPLOAD_MAX_MEMORY_MiB", 100) << 20