Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Assume S3 is enabled #298

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion app.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
"AWS_ACCESS_KEY_ID": {
"required": true
},
"AWS_ENABLED": "yes",
"AWS_S3_BUILDS_BUCKET": "builds-staging.cloudpebble.net",
"AWS_S3_EXPORT_BUCKET": "export-staging.cloudpebble.net",
"AWS_S3_SOURCE_BUCKET": "source-staging.cloudpebble.net",
Expand Down
1 change: 0 additions & 1 deletion cloudpebble/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,6 @@
MAILCHIMP_API_KEY = _environ.get('MAILCHIMP_API_KEY', None)
MAILCHIMP_LIST_ID = _environ.get('MAILCHIMP_LIST_ID', None)

AWS_ENABLED = 'AWS_ENABLED' in _environ
AWS_ACCESS_KEY_ID = _environ.get('AWS_ACCESS_KEY_ID', None)
AWS_SECRET_ACCESS_KEY = _environ.get('AWS_SECRET_ACCESS_KEY', None)

Expand Down
1 change: 1 addition & 0 deletions ide/api/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,7 @@ def save_project_dependencies(request, project_id):
else:
send_td_event('cloudpebble_save_project_settings', request=request, project=project)


@require_POST
@login_required
@json_view
Expand Down
16 changes: 5 additions & 11 deletions ide/api/resource.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.db import transaction, IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
Expand Down Expand Up @@ -238,13 +237,8 @@ def show_resource(request, project_id, resource_id, variant):
}
content_disposition = "attachment; filename=\"%s\"" % resource.file_name
content_type = content_types[resource.kind]
if settings.AWS_ENABLED:
headers = {
'response-content-disposition': content_disposition,
'Content-Type': content_type
}
return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers))
else:
response = HttpResponse(open(variant.local_filename), content_type=content_type)
response['Content-Disposition'] = content_disposition
return response
headers = {
'response-content-disposition': content_disposition,
'Content-Type': content_type
}
return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers))
52 changes: 9 additions & 43 deletions ide/models/build.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
import uuid
import json
import shutil
import os
import os.path
from django.conf import settings
from django.db import models
from ide.models.project import Project
Expand Down Expand Up @@ -42,19 +39,10 @@ class BuildResult(IdeModel):
finished = models.DateTimeField(blank=True, null=True)

def _get_dir(self):
if settings.AWS_ENABLED:
return '%s/' % self.uuid
else:
path = '%s%s/%s/%s/' % (settings.MEDIA_ROOT, self.uuid[0], self.uuid[1], self.uuid)
if not os.path.exists(path):
os.makedirs(path)
return path
return '%s/' % self.uuid

def get_url(self):
if settings.AWS_ENABLED:
return "%s%s/" % (settings.MEDIA_URL, self.uuid)
else:
return '%s%s/%s/%s/' % (settings.MEDIA_URL, self.uuid[0], self.uuid[1], self.uuid)
return "%s%s/" % (settings.MEDIA_URL, self.uuid)

@property
def pbw(self):
Expand Down Expand Up @@ -88,46 +76,24 @@ def get_debug_info_filename(self, platform, kind):
return self._get_dir() + self.DEBUG_INFO_MAP[platform][kind]

def save_build_log(self, text):
if not settings.AWS_ENABLED:
with open(self.build_log, 'w') as f:
f.write(text)
else:
s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain')
s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain')

def read_build_log(self):
if not settings.AWS_ENABLED:
with open(self.build_log, 'r') as f:
return f.read()
else:
return s3.read_file('builds', self.build_log)
return s3.read_file('builds', self.build_log)

def save_debug_info(self, json_info, platform, kind):
text = json.dumps(json_info)
if not settings.AWS_ENABLED:
with open(self.get_debug_info_filename(platform, kind), 'w') as f:
f.write(text)
else:
s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json')
s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json')

def save_package(self, package_path):
if not settings.AWS_ENABLED:
shutil.move(package_path, self.package)
else:
filename = '%s.tar.gz' % self.project.app_short_name.replace('/', '-')
s3.upload_file('builds', self.package, package_path, public=True, download_filename=filename, content_type='application/gzip')
filename = '%s.tar.gz' % self.project.app_short_name.replace('/', '-')
s3.upload_file('builds', self.package, package_path, public=True, download_filename=filename, content_type='application/gzip')

def save_pbw(self, pbw_path):
if not settings.AWS_ENABLED:
shutil.move(pbw_path, self.pbw)
else:
s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-'))
s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-'))

def save_simplyjs(self, javascript):
if not settings.AWS_ENABLED:
with open(self.simplyjs, 'w') as f:
f.write(javascript)
else:
s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript')
s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript')

def get_sizes(self):
sizes = {}
Expand Down
1 change: 1 addition & 0 deletions ide/models/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from django.db.models.signals import pre_save
from django.dispatch import receiver


class IdeModel(models.Model):
class Meta:
abstract = True
Expand Down
56 changes: 7 additions & 49 deletions ide/models/s3file.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import shutil
import os
import logging

from django.utils.translation import ugettext as _
Expand All @@ -18,7 +16,6 @@ class S3File(IdeModel):
bucket_name = 'source'
folder = None
project = None
_create_local_if_not_exists = False

@property
def padded_id(self):
Expand All @@ -37,41 +34,11 @@ def s3_id(self):
def s3_path(self):
return '%s/%s' % (self.folder, self.s3_id)

def _get_contents_local(self):
try:
return open(self.local_filename).read()
except IOError:
if self._create_local_if_not_exists:
return ''
else:
raise

def _save_string_local(self, string):
if not os.path.exists(os.path.dirname(self.local_filename)):
os.makedirs(os.path.dirname(self.local_filename))
with open(self.local_filename, 'wb') as out:
out.write(string)

def _copy_to_path_local(self, path):
try:
shutil.copy(self.local_filename, path)
except IOError as err:
if err.errno == 2 and self._crete_local_if_not_exists:
open(path, 'w').close() # create the file if it's missing.
else:
raise

def get_contents(self):
if not settings.AWS_ENABLED:
return self._get_contents_local()
else:
return s3.read_file(self.bucket_name, self.s3_path)
return s3.read_file(self.bucket_name, self.s3_path)

def save_string(self, string):
if not settings.AWS_ENABLED:
self._save_string_local(string)
else:
s3.save_file(self.bucket_name, self.s3_path, string)
s3.save_file(self.bucket_name, self.s3_path, string)
if self.project:
self.project.last_modified = now()
self.project.save()
Expand All @@ -85,10 +52,7 @@ def save_text(self, content):
self.save_string(content.encode('utf-8'))

def copy_to_path(self, path):
if not settings.AWS_ENABLED:
self._copy_to_path_local(path)
else:
s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path)
s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path)

class Meta(IdeModel.Meta):
abstract = True
Expand All @@ -97,13 +61,7 @@ class Meta(IdeModel.Meta):
@receiver(post_delete)
def delete_file(sender, instance, **kwargs):
if issubclass(sender, S3File):
if settings.AWS_ENABLED:
try:
s3.delete_file(sender.bucket_name, instance.s3_path)
except:
logger.exception("Failed to delete S3 file")
else:
try:
os.unlink(instance.local_filename)
except OSError:
pass
try:
s3.delete_file(sender.bucket_name, instance.s3_path)
except:
logger.exception("Failed to delete S3 file")
1 change: 0 additions & 1 deletion ide/models/textfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ class TextFile(S3File):
""" TextFile adds support to S3File for last-modified timestamps and code folding """
last_modified = models.DateTimeField(blank=True, null=True, auto_now=True)
folded_lines = models.TextField(default="[]")
_create_local_if_not_exists = True

def was_modified_since(self, expected_modification_time):
if isinstance(expected_modification_time, int):
Expand Down
15 changes: 4 additions & 11 deletions ide/tasks/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,9 @@ def create_archive(project_id):

send_td_event('cloudpebble_export_project', project=project)

if not settings.AWS_ENABLED:
outfile = '%s%s/%s.zip' % (settings.EXPORT_DIRECTORY, u, prefix)
os.makedirs(os.path.dirname(outfile), 0755)
shutil.copy(filename, outfile)
os.chmod(outfile, 0644)
return '%s%s/%s.zip' % (settings.EXPORT_ROOT, u, prefix)
else:
outfile = '%s/%s.zip' % (u, prefix)
s3.upload_file('export', outfile, filename, public=True, content_type='application/zip')
return '%s%s' % (settings.EXPORT_ROOT, outfile)
outfile = '%s/%s.zip' % (u, prefix)
s3.upload_file('export', outfile, filename, public=True, content_type='application/zip')
return '%s%s' % (settings.EXPORT_ROOT, outfile)


@task(acks_late=True)
Expand Down Expand Up @@ -309,4 +302,4 @@ def make_valid_filename(zip_entry):
'reason': str(e)
}
}, user=project.owner)
raise
raise
72 changes: 26 additions & 46 deletions utils/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,37 +27,33 @@ def __init__(self):
self.s3 = None

def configure(self):
if settings.AWS_ENABLED:
if settings.AWS_S3_FAKE_S3 is None:
# The host must be manually specified in Python 2.7.9+ due to
# https://github.com/boto/boto/issues/2836 this bug in boto with .s in
# bucket names.
host = settings.AWS_S3_HOST if settings.AWS_S3_HOST else NoHostProvided

self.s3 = boto.connect_s3(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY,
host=host,
calling_format=OrdinaryCallingFormat()
)
else:
host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2]
port = int(port)
self.s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port,
host=host, calling_format=OrdinaryCallingFormat())
_ensure_bucket_exists(self.s3, settings.AWS_S3_SOURCE_BUCKET)
_ensure_bucket_exists(self.s3, settings.AWS_S3_EXPORT_BUCKET)
_ensure_bucket_exists(self.s3, settings.AWS_S3_BUILDS_BUCKET)

self.buckets = {
'source': self.s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET),
'export': self.s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET),
'builds': self.s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET),
}
self.configured = True
if settings.AWS_S3_FAKE_S3 is None:
# The host must be manually specified in Python 2.7.9+ due to
# https://github.com/boto/boto/issues/2836 this bug in boto with .s in
# bucket names.
host = settings.AWS_S3_HOST if settings.AWS_S3_HOST else NoHostProvided

self.s3 = boto.connect_s3(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY,
host=host,
calling_format=OrdinaryCallingFormat()
)
else:
self.s3 = None
self.buckets = None
host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2]
port = int(port)
self.s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port,
host=host, calling_format=OrdinaryCallingFormat())
_ensure_bucket_exists(self.s3, settings.AWS_S3_SOURCE_BUCKET)
_ensure_bucket_exists(self.s3, settings.AWS_S3_EXPORT_BUCKET)
_ensure_bucket_exists(self.s3, settings.AWS_S3_BUILDS_BUCKET)

self.buckets = {
'source': self.s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET),
'export': self.s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET),
'builds': self.s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET),
}
self.configured = True

def __getitem__(self, item):
if settings.TESTING:
Expand All @@ -70,38 +66,24 @@ def __getitem__(self, item):
_buckets = BucketHolder()


def _requires_aws(fn):
if settings.AWS_ENABLED:
return fn
else:
def complain(*args, **kwargs):
raise Exception("AWS_ENABLED must be True to call %s" % fn.__name__)

return complain


@_requires_aws
def read_file(bucket_name, path):
bucket = _buckets[bucket_name]
key = bucket.get_key(path)
return key.get_contents_as_string()


@_requires_aws
def read_file_to_filesystem(bucket_name, path, destination):
bucket = _buckets[bucket_name]
key = bucket.get_key(path)
key.get_contents_to_filename(destination)


@_requires_aws
def delete_file(bucket_name, path):
bucket = _buckets[bucket_name]
key = bucket.get_key(path)
key.delete()


@_requires_aws
def save_file(bucket_name, path, value, public=False, content_type='application/octet-stream'):
bucket = _buckets[bucket_name]
key = Key(bucket)
Expand All @@ -115,7 +97,6 @@ def save_file(bucket_name, path, value, public=False, content_type='application/
key.set_contents_from_string(value, policy=policy, headers={'Content-Type': content_type})


@_requires_aws
def upload_file(bucket_name, dest_path, src_path, public=False, content_type='application/octet-stream',
download_filename=None):
bucket = _buckets[bucket_name]
Expand All @@ -137,7 +118,6 @@ def upload_file(bucket_name, dest_path, src_path, public=False, content_type='ap
key.set_contents_from_filename(src_path, policy=policy, headers=headers)


@_requires_aws
def get_signed_url(bucket_name, path, headers=None):
bucket = _buckets[bucket_name]
key = bucket.get_key(path)
Expand Down