diff --git a/.travis.yml b/.travis.yml index 9e286448..99ad7ade 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,6 +21,9 @@ before_install: - sudo mysql -e "use mysql; update user set authentication_string=PASSWORD('password') where User='root'; update user set plugin='mysql_native_password';FLUSH PRIVILEGES;" - sudo mysql_upgrade -u root -ppassword - sudo service mysql restart + - sudo rm -f /etc/boto.cfg + - export AWS_SECRET_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE + - export AWS_ACCESS_KEY_ID=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY install: - pip install -r development.txt before_script: diff --git a/CTFd/utils/uploads/uploaders.py b/CTFd/utils/uploads/uploaders.py index 31eca7fe..e080ce02 100644 --- a/CTFd/utils/uploads/uploaders.py +++ b/CTFd/utils/uploads/uploaders.py @@ -97,6 +97,7 @@ class S3Uploader(BaseUploader): def upload(self, file_obj, filename): filename = filter(self._clean_filename, secure_filename(filename).replace(' ', '_')) + filename = ''.join(filename) if len(filename) <= 0: return False @@ -122,7 +123,8 @@ class S3Uploader(BaseUploader): def sync(self): local_folder = current_app.config.get('UPLOAD_FOLDER') - bucket_list = self.s3.list_objects(Bucket=self.bucket)['Contents'] + # If the bucket is empty then Contents will not be in the response + bucket_list = self.s3.list_objects(Bucket=self.bucket).get('Contents', []) for s3_key in bucket_list: s3_object = s3_key['Key'] diff --git a/development.txt b/development.txt index 2d0ad2b0..bc4a521a 100644 --- a/development.txt +++ b/development.txt @@ -9,3 +9,4 @@ psycopg2==2.7.5 psycopg2-binary==2.7.5 codecov==2.0.15 nose-randomly==1.2.5 +moto==1.3.7 diff --git a/tests/utils/test_uploaders.py b/tests/utils/test_uploaders.py new file mode 100644 index 00000000..03bc5014 --- /dev/null +++ b/tests/utils/test_uploaders.py @@ -0,0 +1,59 @@ +import boto3 +from moto import mock_s3 +from tests.helpers import * +from CTFd.utils.uploads import S3Uploader, FilesystemUploader, rmdir +from CTFd.utils import binary_type +from six import BytesIO +import os + + +@mock_s3 +def test_s3_uploader(): + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='bucket') + + app = create_ctfd() + with app.app_context(): + app.config['UPLOAD_PROVIDER'] = 's3' + app.config['AWS_ACCESS_KEY_ID'] = 'AKIAIOSFODNN7EXAMPLE' + app.config['AWS_SECRET_ACCESS_KEY'] = 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + app.config['AWS_S3_BUCKET'] = 'bucket' + + uploader = S3Uploader() + + assert uploader.s3 + assert uploader.bucket == 'bucket' + + fake_file = BytesIO('fakedfile'.encode()) + path = uploader.upload(fake_file, 'fake_file.txt') + + assert 'fake_file.txt' in uploader.download(path).location + destroy_ctfd(app) + + +@mock_s3 +def test_s3_sync(): + conn = boto3.resource('s3', region_name='us-east-1') + conn.create_bucket(Bucket='bucket') + + app = create_ctfd() + with app.app_context(): + app.config['UPLOAD_PROVIDER'] = 's3' + app.config['AWS_ACCESS_KEY_ID'] = 'AKIAIOSFODNN7EXAMPLE' + app.config['AWS_SECRET_ACCESS_KEY'] = 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + app.config['AWS_S3_BUCKET'] = 'bucket' + + uploader = S3Uploader() + uploader.sync() + + fake_file = BytesIO('fakedfile'.encode()) + path = uploader.upload(fake_file, 'fake_file.txt') + full_path = os.path.join(app.config['UPLOAD_FOLDER'], path) + + try: + uploader.sync() + with open(full_path) as f: + assert f.read() == 'fakedfile' + finally: + rmdir(os.path.dirname(full_path)) + destroy_ctfd(app)