-
Notifications
You must be signed in to change notification settings - Fork 103
Description
When I try to read the Media Library while connected to my S3 Static repository, I get this error.
It seems that boto3 doesn't have a list method.
Everything works fine when I use a local Static folder.
ERRROR MESSAGE:
| Request Method: | GET |
|---|---|
| http://127.0.0.1:8080/energy/admin/media-library/browse/ | |
| 1.10.8 | |
| AttributeError | |
| 's3.Bucket' object has no attribute 'list' | |
| /Users/user1/anaconda/envs/QS36/lib/python3.6/site-packages/filebrowser_safe/storage.py in isdir, line 85 | |
| /Users/user1/anaconda/envs/QS36/bin/python | |
| 3.6.2 | |
| ['/Users/user1/GitHub/WebPresence/qsunifybanner', '/Users/user1/anaconda/envs/QS36/lib/python36.zip', '/Users/user1/anaconda/envs/QS36/lib/python3.6', '/Users/user1/anaconda/envs/QS36/lib/python3.6/lib-dynload', '/Users/user1/.local/lib/python3.6/site-packages', '/Users/user1/anaconda/envs/QS36/lib/python3.6/site-packages'] |
SETTINGS:
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import sys
import imp
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.file = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
else:
########### Bucket and creds
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_PRELOAD_METADATA = True # helps collectstatic do updates
########## Optional settings
STATIC_ROOT = os.path.join(PROJECT_ROOT, "static")
# Static storage
STATICFILES_LOCATION = 'static'
# Media storage
MEDIAFILES_LOCATION = 'static/media'
STATICFILES_STORAGE = 'custom_storages.custom_storages.StaticStorage'
DEFAULT_FILE_STORAGE = 'custom_storages.custom_storages.MediaStorage'
AWS_S3_CUSTOM_DOMAIN = 's3.amazonaws.com/%s' % AWS_STORAGE_BUCKET_NAME # if not using cloudfront
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
########### Optional settings
# tells AWS to add properties to the files, such that when they
# get served from s3 they come with this header telling the browser to cache for
# life
AWS_HEADERS = {
'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT',
'Cache-Control': 'max-age=86400',
}
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
# Used to make sure that only changed files are uploaded with collectstatic
AWS_PRELOAD_METADATA = True
AWS_QUERYSTRING_AUTH = False
########### required
# ADMIN_MEDIA_PREFIX = STATIC_URL + 'grappelli/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
# MEDIA_URL = "https://%s/%s/" % (CLOUDFRONT_DOMAIN, MEDIAFILES_LOCATION)
MEDIA_ROOT= ""
#############################################
# If using django-compressor it needs to temp cache files somewhere
# make sure this matches your COMPRESS_ROOT too
COMPRESS_STORAGE = STATICFILES_STORAGE
COMPRESS_URL = STATIC_URL
COMPRESS_ROOT = STATIC_ROOT
COMPRESS_ENABLED = True
COMPRESS_PARSER = 'compressor.parser.HtmlParser'
CUSTOM STORAGE:
class CachedS3BotoStorage(S3Boto3Storage):
"""
S3 storage backend that saves the files locally, too.
"""
def init(self, *args, **kwargs):
super(CachedS3BotoStorage, self).init(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
self.local_storage._save(name, content)
super(CachedS3BotoStorage, self).save(name, self.local_storage._open(name))
return name
class MediaStorage(S3Boto3Storage, S3BotoStorageMixin):
location = settings.MEDIAFILES_LOCATION
StaticStorage = lambda: CachedS3BotoStorage(location=settings.STATICFILES_LOCATION)