Fix syncing down an empty S3 bucket (#783)

* Fix syncing down an empty S3 bucket
* Add test for S3 uploader
* Remove boto.cfg because of https://github.com/travis-ci/travis-ci/issues/7940#issuecomment-310759657
* Specify dummy AWS creds in travis.yml
* Fix S3Uploader in Python 3 and fix test
selenium-screenshot-testing
Kevin Chung 2018-12-06 01:39:39 -05:00 committed by GitHub
parent 473acdbdc3
commit 547fe61870
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 66 additions and 1 deletions

View File

@ -21,6 +21,9 @@ before_install:
- sudo mysql -e "use mysql; update user set authentication_string=PASSWORD('password') where User='root'; update user set plugin='mysql_native_password';FLUSH PRIVILEGES;"
- sudo mysql_upgrade -u root -ppassword
- sudo service mysql restart
- sudo rm -f /etc/boto.cfg
- export AWS_SECRET_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE
- export AWS_ACCESS_KEY_ID=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
install:
- pip install -r development.txt
before_script:

View File

@ -97,6 +97,7 @@ class S3Uploader(BaseUploader):
def upload(self, file_obj, filename):
filename = filter(self._clean_filename, secure_filename(filename).replace(' ', '_'))
filename = ''.join(filename)
if len(filename) <= 0:
return False
@ -122,7 +123,8 @@ class S3Uploader(BaseUploader):
def sync(self):
local_folder = current_app.config.get('UPLOAD_FOLDER')
bucket_list = self.s3.list_objects(Bucket=self.bucket)['Contents']
# If the bucket is empty then Contents will not be in the response
bucket_list = self.s3.list_objects(Bucket=self.bucket).get('Contents', [])
for s3_key in bucket_list:
s3_object = s3_key['Key']

View File

@ -9,3 +9,4 @@ psycopg2==2.7.5
psycopg2-binary==2.7.5
codecov==2.0.15
nose-randomly==1.2.5
moto==1.3.7

View File

@ -0,0 +1,59 @@
import boto3
from moto import mock_s3
from tests.helpers import *
from CTFd.utils.uploads import S3Uploader, FilesystemUploader, rmdir
from CTFd.utils import binary_type
from six import BytesIO
import os
@mock_s3
def test_s3_uploader():
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='bucket')
app = create_ctfd()
with app.app_context():
app.config['UPLOAD_PROVIDER'] = 's3'
app.config['AWS_ACCESS_KEY_ID'] = 'AKIAIOSFODNN7EXAMPLE'
app.config['AWS_SECRET_ACCESS_KEY'] = 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
app.config['AWS_S3_BUCKET'] = 'bucket'
uploader = S3Uploader()
assert uploader.s3
assert uploader.bucket == 'bucket'
fake_file = BytesIO('fakedfile'.encode())
path = uploader.upload(fake_file, 'fake_file.txt')
assert 'fake_file.txt' in uploader.download(path).location
destroy_ctfd(app)
@mock_s3
def test_s3_sync():
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='bucket')
app = create_ctfd()
with app.app_context():
app.config['UPLOAD_PROVIDER'] = 's3'
app.config['AWS_ACCESS_KEY_ID'] = 'AKIAIOSFODNN7EXAMPLE'
app.config['AWS_SECRET_ACCESS_KEY'] = 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
app.config['AWS_S3_BUCKET'] = 'bucket'
uploader = S3Uploader()
uploader.sync()
fake_file = BytesIO('fakedfile'.encode())
path = uploader.upload(fake_file, 'fake_file.txt')
full_path = os.path.join(app.config['UPLOAD_FOLDER'], path)
try:
uploader.sync()
with open(full_path) as f:
assert f.read() == 'fakedfile'
finally:
rmdir(os.path.dirname(full_path))
destroy_ctfd(app)