# from main import settings_deploy_production as settings
import os
from tempfile import SpooledTemporaryFile
from django.conf import settings
from storages.backends.s3boto3 import S3Boto3Storage


class StaticStorage(S3Boto3Storage):
    def _get_security_token(self):
        return None

    region_name = settings.AWS_S3_REGION_NAME
    # default_acl = None
    location = settings.AWS_STATIC_LOCATION
    default_acl = 'public-read'


class PublicMediaStorage(S3Boto3Storage):
    def _get_security_token(self):
        return None

    region_name = settings.AWS_S3_REGION_NAME
    location = settings.AWS_PUBLIC_MEDIA_LOCATION
    file_overwrite = False
    default_acl = 'public-read'


class PrivateMediaStorage(S3Boto3Storage):
    def _get_security_token(self):
        return None

    region_name = settings.AWS_S3_REGION_NAME
    location = settings.AWS_PRIVATE_MEDIA_LOCATION
    default_acl = 'private'
    file_overwrite = False
    custom_domain = False

    def _save(self, name, content):
        """
        We create a clone of the content file as when this is passed to
        boto3 it wrongly closes the file upon upload where as the storage
        backend expects it to still be open
        """
        # Seek our content back to the start
        content.seek(0, os.SEEK_SET)

        # Create a temporary file that will write to disk after a specified
        # size. This file will be automatically deleted when closed by
        # boto3 or after exiting the `with` statement if the boto3 is fixed
        with SpooledTemporaryFile() as content_autoclose:

            # Write our original content into our copy that will be closed by boto3
            content_autoclose.write(content.read())

            # Upload the object which will auto close the
            # content_autoclose instance
            return super(PrivateMediaStorage, self)._save(name, content_autoclose)
