subclass boto3 session instead of adding new env value

Thanks Dato!
This commit is contained in:
Hugh Rundle 2024-01-29 14:10:36 +11:00
parent f96ddaa3e1
commit 5f7be848fc
No known key found for this signature in database
GPG key ID: A7E35779918253F9
2 changed files with 3 additions and 7 deletions

View file

@ -81,7 +81,6 @@ AWS_SECRET_ACCESS_KEY=
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
# AWS_S3_REGION_NAME=None # "fr-par"
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
# S3_ENDPOINT_URL=None # same as AWS_S3_ENDPOINT_URL - needed for non-AWS for user exports
# Commented are example values if you use Azure Blob Storage
# USE_AZURE=true

View file

@ -29,9 +29,9 @@ logger = logging.getLogger(__name__)
class BookwyrmAwsSession(BotoSession):
"""a boto session that always uses settings.AWS_S3_ENDPOINT_URL"""
def client(service_name, **kwargs):
def client(self, *args, **kwargs): # pylint: disable=arguments-differ
kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL
return super().client(service_name, **kwargs)
return super().client("s3", *args, **kwargs)
class BookwyrmExportJob(ParentJob):
@ -42,9 +42,7 @@ class BookwyrmExportJob(ParentJob):
else:
storage = storage_backends.ExportsFileStorage
export_data = FileField(
null=True, storage=storage
) # use custom storage backend here
export_data = FileField(null=True, storage=storage)
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
json_completed = BooleanField(default=False)
@ -70,7 +68,6 @@ class BookwyrmExportJob(ParentJob):
self.json_completed = True
self.save(update_fields=["json_completed"])
# add json file to tarfile
tar_job = AddFileToTar.objects.create(
parent_job=self, parent_export_job=self
)