Skip to content
This repository was archived by the owner on Apr 21, 2021. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 50 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,14 @@ Required::
ckanext.s3filestore.aws_access_key_id = Your-Access-Key-ID
ckanext.s3filestore.aws_secret_access_key = Your-Secret-Access-Key
ckanext.s3filestore.aws_bucket_name = a-bucket-to-store-your-stuff
ckanext.s3filestore.host_name = host-to-S3-cloud storage
ckanext.s3filestore.region_name= region-name
ckanext.s3filestore.region_name = region-name
ckanext.s3filestore.signature_version = signature (s3v4)

Optional::

# required only when using Minio or a custom Amazon S3 endpoint
ckanext.s3filestore.host_name = host-to-S3-cloud storage

# An optional path to prepend to keys
ckanext.s3filestore.aws_storage_path = my-site-name

Expand All @@ -76,6 +78,52 @@ Optional::
# The ckan storage path option must also be set correctly for the fallback to work
ckan.storage_path = path/to/storage/directory

# access to bucket is checked on startup, disable it by setting to false
ckanext.s3filestore.check_access_on_startup = false

# bucket is created if not exists, disable this feature by setting to false
ckanext.s3filestore.create_if_not_exists = false


-----------------------
Amazon S3 Usage Example
-----------------------

You should have an AWS S3 regional bucket and an access key / secret attached to a user with the following IAM policy (replace BUCKET_NAME with your bucket name)::

{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "statement1",
"Effect": "Allow",
"Action": [
"s3:GetBucketLocation",
"s3:HeadBucket",
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject",
"s3:PutObjectAcl",
"s3:DeleteObject"
],
"Resource": [
"arn:aws:s3:::BUCKET_NAME",
"arn:aws:s3:::BUCKET_NAME/*"
]
}
]
}

config settings::

ckanext.s3filestore.aws_access_key_id = Your-Access-Key-ID
ckanext.s3filestore.aws_secret_access_key = Your-Secret-Access-Key
ckanext.s3filestore.aws_bucket_name = Your-Bucket-Name
ckanext.s3filestore.region_name = region-name
ckanext.s3filestore.signature_version = s3v4
ckanext.s3filestore.check_access_on_startup = false
ckanext.s3filestore.create_if_not_exists = false


------------------------
Development Installation
Expand Down
14 changes: 9 additions & 5 deletions ckanext/s3filestore/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from ckan.common import _, request, c, response
from botocore.exceptions import ClientError

from ckanext.s3filestore.uploader import S3Uploader
from ckanext.s3filestore.uploader import S3Uploader, get_host_name
import webob

import logging
Expand Down Expand Up @@ -45,9 +45,11 @@ def resource_download(self, id, resource_id, filename=None):
if rsc.get('url_type') == 'upload':
upload = uploader.get_resource_uploader(rsc)
bucket_name = config.get('ckanext.s3filestore.aws_bucket_name')
region = config.get('ckanext.s3filestore.region_name')
host_name = config.get('ckanext.s3filestore.host_name')
bucket = upload.get_s3_bucket(bucket_name)
host_name = get_host_name(
config.get('ckanext.s3filestore.host_name'),
config.get('ckanext.s3filestore.region_name'))
bucket = upload.get_s3_bucket(bucket_name, toolkit.asbool(config.get(
'ckanext.s3filestore.create_if_not_exists', True)))

if filename is None:
filename = os.path.basename(rsc['url'])
Expand Down Expand Up @@ -130,7 +132,9 @@ def filesystem_resource_download(self, id, resource_id, filename=None):

def uploaded_file_redirect(self, upload_to, filename):
'''Redirect static file requests to their location on S3.'''
host_name = config.get('ckanext.s3filestore.host_name')
host_name = get_host_name(
config.get('ckanext.s3filestore.host_name'),
config.get('ckanext.s3filestore.region_name'))
# Remove last characted if it's a slash
if host_name[-1] == '/':
host_name = host_name[:-1]
Expand Down
5 changes: 3 additions & 2 deletions ckanext/s3filestore/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ def configure(self, config):
'ckanext.s3filestore.aws_bucket_name',
'ckanext.s3filestore.region_name',
'ckanext.s3filestore.signature_version',
'ckanext.s3filestore.host_name'
)
for option in config_options:
if not config.get(option, None):
Expand All @@ -39,7 +38,9 @@ def configure(self, config):
config.get('ckanext.s3filestore.check_access_on_startup',
True)):
ckanext.s3filestore.uploader.BaseS3Uploader().get_s3_bucket(
config.get('ckanext.s3filestore.aws_bucket_name'))
config.get('ckanext.s3filestore.aws_bucket_name'),
toolkit.asbool(config.get('ckanext.s3filestore.create_if_not_exists', True))
)

# IUploader

Expand Down
19 changes: 16 additions & 3 deletions ckanext/s3filestore/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ def _get_underlying_file(wrapper):
return wrapper.file


def get_host_name(host_name, region_name):
if host_name:
return host_name
else:
return 'https://s3-{}.amazonaws.com'.format(region_name)


class S3FileStoreException(Exception):
pass

Expand All @@ -44,8 +51,12 @@ def __init__(self):
self.s_key = config.get('ckanext.s3filestore.aws_secret_access_key')
self.region = config.get('ckanext.s3filestore.region_name')
self.signature = config.get('ckanext.s3filestore.signature_version')
self.host_name = config.get('ckanext.s3filestore.host_name')
self.bucket = self.get_s3_bucket(self.bucket_name)
self.host_name = get_host_name(
config.get('ckanext.s3filestore.host_name'),
config.get('ckanext.s3filestore.region_name'))
self.create_if_not_exists = toolkit.asbool(config.get(
'ckanext.s3filestore.create_if_not_exists', True))
self.bucket = self.get_s3_bucket(self.bucket_name, self.create_if_not_exists)

def get_directory(self, id, storage_path):
directory = os.path.join(storage_path, id)
Expand All @@ -56,7 +67,7 @@ def get_s3_session(self):
aws_secret_access_key=self.s_key,
region_name=self.region)

def get_s3_bucket(self, bucket_name):
def get_s3_bucket(self, bucket_name, create_if_not_exists=True):
'''Return a boto bucket, creating it if it doesn't exist.'''

# make s3 connection using boto3
Expand All @@ -65,6 +76,8 @@ def get_s3_bucket(self, bucket_name):
config=botocore.client.Config(
signature_version=self.signature))
bucket = s3.Bucket(bucket_name)
if not create_if_not_exists:
return bucket
try:
if s3.Bucket(bucket.name) in s3.buckets.all():
log.info('Bucket {0} found!'.format(bucket_name))
Expand Down