diff --git a/admin/aws.py b/admin/aws.py index d9dec95f77..96be93b30c 100644 --- a/admin/aws.py +++ b/admin/aws.py @@ -7,9 +7,12 @@ import os from characteristic import attributes, Attribute from effect import Effect, sync_performer, TypeDispatcher -from effect.do import do +from effect.do import do, do_return import boto +from boto.s3.website import RoutingRules +from pyrsistent import PClass, pmap_field, PMap, field, discard, freeze +from twisted.python.filepath import FilePath @attributes([ @@ -55,7 +58,7 @@ class UpdateS3ErrorPage(object): def error_key(self): """ """ - return '{}error_pages/404.html'.format(self.target_prefix) + return u'{}error_pages/404.html'.format(self.target_prefix) @sync_performer @@ -281,10 +284,51 @@ def perform_download_s3_key(dispatcher, intent): bucket = s3.get_bucket(intent.source_bucket) key = bucket.get_key(intent.source_key) + if key is None: + raise KeyError(intent.source_key) with intent.target_path.open('w') as target_file: key.get_contents_to_file(target_file) +@attributes([ + "source_bucket", + "source_key", +]) +class ReadS3Key(object): + """ + Read a file from S3. + + :ivar bytes source_bucket: Name of bucket to read key from. + :ivar bytes source_key: Name of key to read. + """ + + +@sync_performer +@do +def perform_read_s3_key(dispatcher, intent): + """ + See :class:`ReadS3Key`. + """ + target_file = FilePath( + u'/tmp/{}.perform_read_s3_key'.format( + __file__.replace(u"/", "!"), + ) + ).temporarySibling() + target_file.requireCreate(False) + try: + yield Effect( + DownloadS3Key( + source_bucket=intent.source_bucket, + source_key=intent.source_key, + target_path=target_file, + ) + ) + yield do_return(target_file.getContent()) + finally: + if target_file.exists(): + target_file.remove() + + @attributes([ "source_path", "target_bucket", @@ -364,6 +408,7 @@ def perform_upload_s3_key(dispatcher, intent): CopyS3Keys: perform_copy_s3_keys, DownloadS3KeyRecursively: perform_download_s3_key_recursively, DownloadS3Key: perform_download_s3_key, + ReadS3Key: perform_read_s3_key, UploadToS3Recursively: perform_upload_s3_key_recursively, UploadToS3: perform_upload_s3_key, CreateCloudFrontInvalidation: perform_create_cloudfront_invalidation, @@ -380,11 +425,31 @@ def __new__(cls, value, content_type): return self -@attributes([ - Attribute('routing_rules'), - Attribute('s3_buckets'), - Attribute('error_key', default_factory=dict) -]) +class FakeAWSState(PClass): + """ + The immutable state of ``FakeAWS`` + + :ivar routing_rules: Dictionary of routing rules for S3 buckets. They are + represented as dictonaries mapping key prefixes to replacements. Other + types of rules and attributes are supported or represented. + :ivar s3_buckets: Dictionary of fake S3 buckets. Each bucket is represented + as a dictonary mapping keys to contents. Other attributes are ignored. + :ivar cloudfront_invalidations: List of + :class:`CreateCloudFrontInvalidation` that have been requested. + """ + routing_rules = pmap_field( + key_type=unicode, + value_type=RoutingRules + ) + s3_buckets = pmap_field( + key_type=unicode, + value_type=PMap + ) + error_key = pmap_field(key_type=unicode, value_type=unicode) + cloudfront_invalidations = field(initial=freeze([])) + + +@attributes(['state']) class FakeAWS(object): """ Enough of a fake implementation of AWS to test @@ -399,14 +464,17 @@ class FakeAWS(object): :class:`CreateCloudFrontInvalidation` that have been requested. """ def __init__(self): - self.cloudfront_invalidations = [] + self.initial_state = self.state @sync_performer def _perform_update_s3_routing_rules(self, dispatcher, intent): """ See :class:`UpdateS3RoutingRule`. """ - self.routing_rules[intent.bucket] = intent.routing_rules + self.state = self.state.transform( + ['routing_rules', intent.bucket], + intent.routing_rules + ) @sync_performer def _perform_update_s3_error_page(self, dispatcher, intent): @@ -414,8 +482,11 @@ def _perform_update_s3_error_page(self, dispatcher, intent): See :class:`UpdateS3ErrorPage`. """ new_error_key = intent.error_key - old_error_key = self.error_key.get(intent.bucket) - self.error_key[intent.bucket] = new_error_key + old_error_key = self.state.error_key.get(intent.bucket) + self.state = self.state.transform( + [u'error_key', intent.bucket], + new_error_key + ) if old_error_key == new_error_key: return None return old_error_key @@ -425,34 +496,42 @@ def _perform_create_cloudfront_invalidation(self, dispatcher, intent): """ See :class:`CreateCloudFrontInvalidation`. """ - self.cloudfront_invalidations.append(intent) + self.state = self.state.transform( + ['cloudfront_invalidations'], + lambda l: l.append(intent) + ) @sync_performer def _perform_delete_s3_keys(self, dispatcher, intent): """ See :class:`DeleteS3Keys`. """ - bucket = self.s3_buckets[intent.bucket] for key in intent.keys: - del bucket[intent.prefix + key] + self.state = self.state.transform( + ['s3_buckets', intent.bucket, intent.prefix + key], + discard, + ) @sync_performer def _perform_copy_s3_keys(self, dispatcher, intent): """ See :class:`CopyS3Keys`. """ - source_bucket = self.s3_buckets[intent.source_bucket] - destination_bucket = self.s3_buckets[intent.destination_bucket] + source_bucket = self.state.s3_buckets[intent.source_bucket] for key in intent.keys: - destination_bucket[intent.destination_prefix + key] = ( - source_bucket[intent.source_prefix + key]) + self.state = self.state.transform( + ['s3_buckets', + intent.destination_bucket, + intent.destination_prefix + key], + source_bucket[intent.source_prefix + key] + ) @sync_performer def _perform_list_s3_keys(self, dispatcher, intent): """ See :class:`ListS3Keys`. """ - bucket = self.s3_buckets[intent.bucket] + bucket = self.state.s3_buckets[intent.bucket] return {key[len(intent.prefix):] for key in bucket if key.startswith(intent.prefix)} @@ -462,7 +541,7 @@ def _perform_download_s3_key(self, dispatcher, intent): """ See :class:`DownloadS3Key`. """ - bucket = self.s3_buckets[intent.source_bucket] + bucket = self.state.s3_buckets[intent.source_bucket] intent.target_path.setContent(bucket[intent.source_key]) @sync_performer @@ -470,13 +549,18 @@ def _perform_upload_s3_key(self, dispatcher, intent): """ See :class:`UploadToS3`. """ - bucket = self.s3_buckets[intent.target_bucket] + with intent.file.open() as source_file: content = source_file.read() + # XXX: Need to think about this. + # The fake currently only allows unicode content. content_type = intent.content_type if content_type is not None: content = ContentTypeUnicode(content, content_type) - bucket[intent.target_key] = content + self.state = self.state.transform( + ['s3_buckets', intent.target_bucket, intent.target_key], + content + ) def get_dispatcher(self): """ @@ -487,6 +571,7 @@ def get_dispatcher(self): # Share implementation with real implementation DownloadS3KeyRecursively: perform_download_s3_key_recursively, UploadToS3Recursively: perform_upload_s3_key_recursively, + ReadS3Key: perform_read_s3_key, # Fake implementation UpdateS3RoutingRules: self._perform_update_s3_routing_rules, diff --git a/admin/release.py b/admin/release.py index ad0be4d01f..ba23f5dc6e 100644 --- a/admin/release.py +++ b/admin/release.py @@ -10,6 +10,7 @@ import yaml import os +import re import sys import tempfile import virtualenv @@ -52,6 +53,7 @@ DeleteS3Keys, CopyS3Keys, DownloadS3KeyRecursively, + ReadS3Key, UploadToS3, UploadToS3Recursively, CreateCloudFrontInvalidation, @@ -91,6 +93,16 @@ class DocumentationRelease(Exception): """ +@attributes(['documentation_version', 'expected_version']) +class UnexpectedDocumentationVersion(Exception): + """ + Raised if the source documentation is found to have a different version + than is being published. + """ + def __str__(self): + return self.__repr__() + + class Environments(Names): """ The environments that documentation can be published to. @@ -142,17 +154,25 @@ class DocumentationConfiguration(object): DOCUMENTATION_CONFIGURATIONS = { Environments.PRODUCTION: DocumentationConfiguration( - documentation_bucket="clusterhq-docs", - cloudfront_cname="docs.clusterhq.com", - dev_bucket="clusterhq-staging-docs"), + documentation_bucket=u"clusterhq-docs", + cloudfront_cname=u"docs.clusterhq.com", + dev_bucket=u"clusterhq-staging-docs"), Environments.STAGING: DocumentationConfiguration( - documentation_bucket="clusterhq-staging-docs", - cloudfront_cname="docs.staging.clusterhq.com", - dev_bucket="clusterhq-staging-docs"), + documentation_bucket=u"clusterhq-staging-docs", + cloudfront_cname=u"docs.staging.clusterhq.com", + dev_bucket=u"clusterhq-staging-docs"), } +def strip_html_tags(html): + """ + :param unicode html: The HTML content. + :returns: ``html`` with all HTML tags removed. + """ + return re.sub(r"]+>", "", html) + + def parse_routing_rules(routing_config, hostname): """ Parse routing rule description. @@ -224,15 +244,27 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): raise NotTagged() configuration = DOCUMENTATION_CONFIGURATIONS[environment] - dev_prefix = 'release/flocker-%s/' % (flocker_version,) - version_prefix = 'en/%s/' % (get_doc_version(doc_version),) + dev_prefix = u'release/flocker-{}/'.format(flocker_version) + version_prefix = u'en/{}/'.format(get_doc_version(doc_version)) is_dev = not is_release(doc_version) if is_dev: - stable_prefix = "en/devel/" + stable_prefix = u"en/devel/" else: - stable_prefix = "en/latest/" + stable_prefix = u"en/latest/" + found_version_html = yield Effect( + ReadS3Key( + source_bucket=configuration.dev_bucket, + source_key=dev_prefix + u"version.html", + ) + ) + found_version_number = strip_html_tags(found_version_html).strip() + if found_version_number != doc_version: + raise UnexpectedDocumentationVersion( + documentation_version=found_version_number, + expected_version=doc_version + ) # Get the list of keys in the new documentation. new_version_keys = yield Effect( ListS3Keys(bucket=configuration.dev_bucket, @@ -287,6 +319,7 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): UpdateS3ErrorPage(bucket=configuration.documentation_bucket, target_prefix=version_prefix)) + # XXX: We also need to calculate and invalidate the changed "latest" keys. # The changed keys are the new keys, the keys that were deleted from this # version, and the keys for the previous version. changed_keys = (new_version_keys | existing_version_keys) @@ -294,9 +327,9 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): # S3 serves /index.html when given /, so any changed /index.html means # that / changed as well. # Note that we check for '/index.html' but remove 'index.html' - changed_keys |= {key_name[:-len('index.html')] + changed_keys |= {key_name[:-len(u'index.html')] for key_name in changed_keys - if key_name.endswith('/index.html')} + if key_name.endswith(u'/index.html')} # Always update the root. changed_keys |= {''} @@ -306,7 +339,6 @@ def publish_docs(flocker_version, doc_version, environment, routing_config): changed_paths = {prefix + key_name for key_name in changed_keys for prefix in [stable_prefix, version_prefix]} - yield Effect(UpdateS3RoutingRules( bucket=configuration.documentation_bucket, routing_rules=parse_routing_rules( diff --git a/admin/test/test_release.py b/admin/test/test_release.py index 0db5520150..42e6da80fc 100644 --- a/admin/test/test_release.py +++ b/admin/test/test_release.py @@ -8,6 +8,7 @@ from hashlib import sha256 from gzip import GzipFile +from random import randrange from StringIO import StringIO import tempfile from textwrap import dedent @@ -27,21 +28,23 @@ from twisted.python.procutils import which from twisted.python.usage import UsageError +from pyrsistent import freeze, thaw, PClass, field + from .. import release from ..release import ( upload_python_packages, upload_packages, update_repo, parse_routing_rules, publish_docs, Environments, - DocumentationRelease, DOCUMENTATION_CONFIGURATIONS, NotTagged, NotARelease, + DocumentationRelease, NotTagged, NotARelease, calculate_base_branch, create_release_branch, CreateReleaseBranchOptions, BranchExists, TagExists, UploadOptions, create_pip_index, upload_pip_index, publish_homebrew_recipe, PushFailed, - update_license_file, + update_license_file, UnexpectedDocumentationVersion ) from ..packaging import Distribution -from ..aws import FakeAWS, CreateCloudFrontInvalidation +from ..aws import FakeAWS, CreateCloudFrontInvalidation, FakeAWSState from ..yum import FakeYum, yum_dispatcher from flocker.testtools import TestCase @@ -177,6 +180,139 @@ def test_long_match_first(self, hostname): ])) +STATE_EMPTY = FakeAWSState( + s3_buckets={ + u"clusterhq-staging-docs": freeze({}), + u"clusterhq-docs": freeze({}), + } +) + +WEEKLY_RELEASE_VERSION = u"1.10.3.dev2" + +STATE_WEEKLY_PRE_PUBLICATION = STATE_EMPTY.transform( + [u"s3_buckets", u"clusterhq-staging-docs"], + freeze({ + u"release/flocker-{}/version.html".format( + WEEKLY_RELEASE_VERSION + ): WEEKLY_RELEASE_VERSION, + u"release/flocker-{}/index.html".format( + WEEKLY_RELEASE_VERSION + ): u'index-content', + }) +) + +STATE_WEEKLY_POST_PUBLICATION = STATE_WEEKLY_PRE_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-docs"], + freeze({ + u"en/{}/version.html".format( + WEEKLY_RELEASE_VERSION + ): WEEKLY_RELEASE_VERSION, + u"en/{}/index.html".format( + WEEKLY_RELEASE_VERSION + ): u'index-content', + u"en/devel/version.html": WEEKLY_RELEASE_VERSION, + u"en/devel/index.html": u'index-content', + }) +).transform( + [u"cloudfront_invalidations"], + lambda l: l.append( + CreateCloudFrontInvalidation( + cname=u'docs.clusterhq.com', + paths={u'en/devel/', + u'en/devel/index.html', + u'en/devel/version.html', + u'en/1.10.3.dev2/', + u'en/1.10.3.dev2/index.html', + u'en/1.10.3.dev2/version.html'} + ) + ) +).transform( + [u"routing_rules", u"clusterhq-docs"], + RoutingRules([]) +) + +MARKETING_RELEASE_VERSION = u"1.10.3" + +STATE_MARKETING_PRE_PUBLICATION = STATE_WEEKLY_POST_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-staging-docs"], + lambda b: b.update({ + u"release/flocker-{}/version.html".format( + MARKETING_RELEASE_VERSION + ): MARKETING_RELEASE_VERSION + }) +).transform( + [u"cloudfront_invalidations"], + freeze([]) +) + +STATE_MARKETING_POST_PUBLICATION = STATE_MARKETING_PRE_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-docs"], + lambda b: b.update({ + u"en/{}/version.html".format( + MARKETING_RELEASE_VERSION + ): MARKETING_RELEASE_VERSION, + u"en/latest/version.html": MARKETING_RELEASE_VERSION + }) +).transform( + [u"cloudfront_invalidations"], + lambda l: l.append( + CreateCloudFrontInvalidation( + cname=u'docs.clusterhq.com', + paths={u'en/latest/', + u'en/latest/version.html', + u'en/1.10.3/', + u'en/1.10.3/version.html'} + ) + ) +).transform( + [u"routing_rules", u"clusterhq-docs"], + RoutingRules([]) +).transform( + [u"error_key", u"clusterhq-docs"], + u'en/1.10.3/error_pages/404.html', +) + +POST1_RELEASE_VERSION = u"1.10.3.post1" +STATE_POST1_PRE_PUBLICATION = STATE_MARKETING_POST_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-staging-docs"], + lambda b: b.update({ + u"release/flocker-{}/version.html".format( + POST1_RELEASE_VERSION + ): MARKETING_RELEASE_VERSION, + u"release/flocker-{}/index.html".format( + POST1_RELEASE_VERSION + ): u"new-index-content" + }) +).transform( + [u"cloudfront_invalidations"], + freeze([]) +) + + +STATE_POST1_POST_PUBLICATION = STATE_POST1_PRE_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-docs"], + lambda b: b.update({ + u"en/{}/index.html".format( + MARKETING_RELEASE_VERSION + ): u"new-index-content", + u"en/latest/index.html": u"new-index-content", + }) +).transform( + [u"cloudfront_invalidations"], + lambda l: l.append( + CreateCloudFrontInvalidation( + cname=u'docs.clusterhq.com', + paths={u'en/1.10.3/', + u'en/latest/', + u'en/latest/index.html', + u'en/latest/version.html', + u'en/1.10.3/version.html', + u'en/1.10.3/index.html'} + ) + ) +) + + class PublishDocsTests(TestCase): """ Tests for :func:``publish_docs``. @@ -199,524 +335,199 @@ def publish_docs(self, aws, environment=environment, routing_config=routing_config)) - def test_copies_documentation(self): + def test_documentation_version_mismatch(self): """ - Calling :func:`publish_docs` copies documentation from - ``s3://clusterhq-staging-docs/release/flocker-/`` to - ``s3://clusterhq-staging-docs/en//`` and - ``s3://clusterhq-staging-docs/en/latest/``. + If the version number in the ``version.html`` file in the source + directory does not match the destination version number, + ``UnexpectedDocumentationVersion`` is raised with the mismatched + version numbers. """ + unexpected_version = u"0.0.0" + expected_version = WEEKLY_RELEASE_VERSION + aws = FakeAWS( - routing_rules={}, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': - 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': - 'sub-index-content', - 'release/flocker-0.3.0+444.gf05215b/other.html': - 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': - 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': - 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': - 'bad-other', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) + state=STATE_WEEKLY_PRE_PUBLICATION.transform( + [u"s3_buckets", u"clusterhq-staging-docs", + u"release/flocker-{}/version.html".format( + WEEKLY_RELEASE_VERSION + )], + unexpected_version + ) + ) + exception = self.assertRaises( + UnexpectedDocumentationVersion, + self.publish_docs, + aws=aws, + flocker_version=expected_version, + doc_version=expected_version, + environment=Environments.STAGING + ) self.assertEqual( - aws.s3_buckets['clusterhq-staging-docs'], { - # originals - 'index.html': '', - 'en/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': - 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': - 'sub-index-content', - 'release/flocker-0.3.0+444.gf05215b/other.html': - 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': - 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': - 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': - 'bad-other', - # and new copies - 'en/latest/index.html': 'index-content', - 'en/latest/sub/index.html': 'sub-index-content', - 'en/latest/other.html': 'other-content', - 'en/0.3.1/index.html': 'index-content', - 'en/0.3.1/sub/index.html': 'sub-index-content', - 'en/0.3.1/other.html': 'other-content', - }) + (unexpected_version, expected_version), + (exception.documentation_version, + exception.expected_version) + ) - def test_copies_documentation_production(self): + def test_copies_documentation_production_weekly(self): """ Calling :func:`publish_docs` in production copies documentation from ``s3://clusterhq-staging-docs/release/flocker-/`` to ``s3://clusterhq-docs/en//`` and - ``s3://clusterhq-docs/en/latest/``. + ``s3://clusterhq-docs/en/devel/`` for weekly releases. """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - }, - 'clusterhq-staging-docs': { - 'release/flocker-0.3.1/index.html': - 'index-content', - 'release/flocker-0.3.1/sub/index.html': - 'sub-index-content', - 'release/flocker-0.3.1/other.html': - 'other-content', - 'release/flocker-0.3.0+392.gd50b558/index.html': - 'bad-index', - 'release/flocker-0.3.0+392.gd50b558/sub/index.html': - 'bad-sub-index', - 'release/flocker-0.3.0+392.gd50b558/other.html': - 'bad-other', - } - }) - self.publish_docs(aws, '0.3.1', '0.3.1', - environment=Environments.PRODUCTION) + aws = FakeAWS(state=STATE_WEEKLY_PRE_PUBLICATION) + + self.publish_docs( + aws=aws, + flocker_version=WEEKLY_RELEASE_VERSION, + doc_version=WEEKLY_RELEASE_VERSION, + environment=Environments.PRODUCTION + ) + self.assertEqual( - aws.s3_buckets['clusterhq-docs'], { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': 'index-content', - 'en/latest/sub/index.html': 'sub-index-content', - 'en/latest/other.html': 'other-content', - 'en/0.3.1/index.html': 'index-content', - 'en/0.3.1/sub/index.html': 'sub-index-content', - 'en/0.3.1/other.html': 'other-content', - }) + STATE_WEEKLY_POST_PUBLICATION, + aws.state + ) + + def test_copies_documentation_production_marketing(self): + """ + Calling :func:`publish_docs` in production copies documentation from + ``s3://clusterhq-staging-docs/release/flocker-/`` to + ``s3://clusterhq-docs/en//`` and + ``s3://clusterhq-docs/en/latest/`` for marketing releases. + """ + aws = FakeAWS(state=STATE_MARKETING_PRE_PUBLICATION) + + self.publish_docs( + aws=aws, + flocker_version=MARKETING_RELEASE_VERSION, + doc_version=MARKETING_RELEASE_VERSION, + environment=Environments.PRODUCTION + ) - def test_deletes_removed_documentation(self): + self.assertEqual( + STATE_MARKETING_POST_PUBLICATION, + aws.state + ) + + def test_overwrites_existing_documentation(self): """ Calling :func:`publish_docs` replaces documentation from ``s3://clusterhq-staging-docs/en//``. with documentation from ``s3://clusterhq-staging-docs/release/flocker-/``. - In particular, files with changed content are updated, and removed - files are deleted. - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'en/0.3.1/index.html': 'old-index-content', - 'en/0.3.1/sub/index.html': 'old-sub-index-content', - 'en/0.3.1/other.html': 'other-content', - 'release/flocker-0.3.0+444.gf05215b/index.html': - 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': - 'sub-index-content', - }, + Files with changed content are updated. + """ + initial_state = STATE_MARKETING_PRE_PUBLICATION.transform( + [u's3_buckets', u'clusterhq-docs'], + lambda b: b.update({ + u"en/{}/version.html".format( + MARKETING_RELEASE_VERSION + ): u"0.0.0", + u"en/latest/version.html": u"0.0.0", }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) + ) + aws = FakeAWS(state=initial_state) + + self.publish_docs( + aws=aws, + flocker_version=MARKETING_RELEASE_VERSION, + doc_version=MARKETING_RELEASE_VERSION, + environment=Environments.PRODUCTION + ) + self.assertEqual( - aws.s3_buckets['clusterhq-staging-docs'], { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': 'index-content', - 'en/latest/sub/index.html': 'sub-index-content', - 'en/0.3.1/index.html': 'index-content', - 'en/0.3.1/sub/index.html': 'sub-index-content', - # and the originals - 'release/flocker-0.3.0+444.gf05215b/index.html': - 'index-content', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': - 'sub-index-content', - }) + STATE_MARKETING_POST_PUBLICATION, + aws.state + ) - def test_updated_routing_rules(self): + @skipIf( + True, + "XXX This fails because ``publish_docs`` doesn't do a separate" + "calculation of changed keys with the ``en/latest`` prefix." + ) + def test_deletes_and_invalidates_documentation(self): """ - Calling :func:`publish_docs` updates the routing rules for the - "clusterhq-staging-docs" bucket. + Calling :func:`publish_docs` deletes documentation pages from + ``s3://clusterhq-staging-docs/en//``. + that do not exist in + ``s3://clusterhq-staging-docs/release/flocker-/``. + """ - aws = FakeAWS( - routing_rules={}, - s3_buckets={ - 'clusterhq-staging-docs': { - }, + initial_state = STATE_MARKETING_PRE_PUBLICATION.transform( + [u's3_buckets', u'clusterhq-docs'], + lambda b: b.update({ + u"en/{}/unexpected_file.html".format( + MARKETING_RELEASE_VERSION + ): "unexpected_content", + u"en/latest/another/unexpected_file.html": "blah", }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING, - routing_config={ - "prefix/": {"key/": {"replace_key": "replace"}}, - }) - self.assertThat( - aws.routing_rules['clusterhq-staging-docs'], - MatchesRoutingRules([ - RoutingRule.when(key_prefix="prefix/key/").then_redirect( - replace_key="prefix/replace", - hostname="docs.staging.clusterhq.com", - protocol="https", - http_redirect_code="302", - ), - ])) + ) + aws = FakeAWS(state=initial_state) + + self.publish_docs( + aws=aws, + flocker_version=MARKETING_RELEASE_VERSION, + doc_version=MARKETING_RELEASE_VERSION, + environment=Environments.PRODUCTION + ) + + # Also invalidates the deleted keys. + [original_invalidation] = getattr( + STATE_MARKETING_POST_PUBLICATION, + u"cloudfront_invalidations", + ) + new_invalidation = CreateCloudFrontInvalidation( + cname=original_invalidation.cname, + paths=original_invalidation.paths.copy() + ) + new_invalidation.paths.update({ + u'en/latest/another/unexpected_file.html', + u'en/1.10.3/unexpected_file.html' + }) + self.assertEqual( + STATE_MARKETING_POST_PUBLICATION.transform( + [u"cloudfront_invalidations"], + freeze([new_invalidation]) + ), + aws.state + ) def test_updated_routing_rules_production(self): """ Calling :func:`publish_docs` updates the routing rules for the "clusterhq-docs" bucket. """ - aws = FakeAWS( - routing_rules={}, - s3_buckets={ - 'clusterhq-docs': { - }, - 'clusterhq-staging-docs': { + initial_state = STATE_MARKETING_PRE_PUBLICATION + aws = FakeAWS(state=initial_state) + + self.publish_docs( + aws=aws, + flocker_version=MARKETING_RELEASE_VERSION, + doc_version=MARKETING_RELEASE_VERSION, + environment=Environments.PRODUCTION, + routing_config={ + u"prefix/": { + u"key/": {u"replace_key": u"replace"} }, }) - self.publish_docs(aws, '0.3.1', '0.3.1', - environment=Environments.PRODUCTION, - routing_config={ - "prefix/": {"key/": {"replace_key": "replace"}}, - }) self.assertThat( - aws.routing_rules['clusterhq-docs'], + aws.state.routing_rules[u'clusterhq-docs'], MatchesRoutingRules([ - RoutingRule.when(key_prefix="prefix/key/").then_redirect( - replace_key="prefix/replace", - hostname="docs.clusterhq.com", - protocol="https", - http_redirect_code="302", + RoutingRule.when(key_prefix=u"prefix/key/").then_redirect( + replace_key=u"prefix/replace", + hostname=u"docs.clusterhq.com", + protocol=u"https", + http_redirect_code=u"302", ), ])) - def test_creates_cloudfront_invalidation_new_files(self): - """ - Calling :func:`publish_docs` with a release or documentation version - creates an invalidation for - - en/latest/ - - en// - each for every path in the new documentation for . - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'en/0.3.1/index.html': '', - 'en/0.3.1/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/other.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/latest/', - 'en/latest/index.html', - 'en/latest/sub/', - 'en/latest/sub/index.html', - 'en/latest/sub/other.html', - 'en/0.3.1/', - 'en/0.3.1/index.html', - 'en/0.3.1/sub/', - 'en/0.3.1/sub/index.html', - 'en/0.3.1/sub/other.html', - }), - ]) - - def test_creates_cloudfront_invalidation_trailing_index(self): - """ - Calling :func:`publish_docs` with a release or documentation version - doesn't creates an invalidation for files that end in ``index.html``. - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub_index.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/latest/', - 'en/latest/sub_index.html', - 'en/0.3.1/', - 'en/0.3.1/sub_index.html', - }), - ]) - - def test_creates_cloudfront_invalidation_removed_files(self): - """ - Calling :func:`publish_docs` with a release or documentation version - creates an invalidation for - - en/latest/ - - en// - each for every path in the old documentation for . - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'en/0.3.1/index.html': '', - 'en/0.3.1/sub/index.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/latest/', - 'en/latest/index.html', - 'en/latest/sub/', - 'en/latest/sub/index.html', - 'en/0.3.1/', - 'en/0.3.1/index.html', - 'en/0.3.1/sub/', - 'en/0.3.1/sub/index.html', - }), - ]) - - def test_creates_cloudfront_invalidation_previous_version(self): - """ - Calling :func:`publish_docs` with a release or documentation version - creates an invalidation for - - en/latest/ - - en// - each for every path in the documentation for version that was - previously `en/latest/`. - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'en/0.3.0/index.html': '', - 'en/0.3.0/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/latest/', - 'en/latest/index.html', - 'en/latest/sub/', - 'en/latest/sub/index.html', - 'en/0.3.1/', - 'en/0.3.1/index.html', - 'en/0.3.1/sub/', - 'en/0.3.1/sub/index.html', - }), - ]) - - def test_creates_cloudfront_invalidation_devel_new_files(self): - """ - Calling :func:`publish_docs` with a development version creates an - invalidation for - - en/devel/ - - en// - each for every path in the new documentation for . - """ - aws = FakeAWS( - routing_rules={ - 'clusterhq-staging-docs': { - 'en/devel/': 'en/0.3.0/', - }, - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/devel/index.html': '', - 'en/0.3.1.dev1/index.html': '', - 'en/0.3.1.dev1/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/other.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1.dev1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/devel/', - 'en/devel/index.html', - 'en/devel/sub/', - 'en/devel/sub/index.html', - 'en/devel/sub/other.html', - 'en/0.3.1.dev1/', - 'en/0.3.1.dev1/index.html', - 'en/0.3.1.dev1/sub/', - 'en/0.3.1.dev1/sub/index.html', - 'en/0.3.1.dev1/sub/other.html', - }), - ]) - - def test_creates_cloudfront_invalidation_devel_removed_files(self): - """ - Calling :func:`publish_docs` with a development version creates an - invalidation for - - en/devel/ - - en// - each for every path in the old documentation for . - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/devel/index.html': '', - 'en/0.3.1.dev1/index.html': '', - 'en/0.3.1.dev1/sub/index.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1.dev1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/devel/', - 'en/devel/index.html', - 'en/devel/sub/', - 'en/devel/sub/index.html', - 'en/0.3.1.dev1/', - 'en/0.3.1.dev1/index.html', - 'en/0.3.1.dev1/sub/', - 'en/0.3.1.dev1/sub/index.html', - }), - ]) - - def test_creates_cloudfront_invalidation_devel_previous_version(self): - """ - Calling :func:`publish_docs` with a development version creates an - invalidation for - - en/devel/ - - en// - each for every path in the documentation for version that was - previously `en/devel/`. - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-staging-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/devel/index.html': '', - 'en/0.3.0/index.html': '', - 'en/0.3.0/sub/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/index.html': '', - 'release/flocker-0.3.0+444.gf05215b/sub/index.html': '', - }, - }) - self.publish_docs(aws, '0.3.0+444.gf05215b', '0.3.1.dev1', - environment=Environments.STAGING) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.staging.clusterhq.com', - paths={ - 'en/devel/', - 'en/devel/index.html', - 'en/devel/sub/index.html', - 'en/devel/sub/', - 'en/0.3.1.dev1/', - 'en/0.3.1.dev1/index.html', - 'en/0.3.1.dev1/sub/', - 'en/0.3.1.dev1/sub/index.html', - }), - ]) - - def test_creates_cloudfront_invalidation_production(self): - """ - Calling :func:`publish_docs` in production creates an invalidation for - ``docs.clusterhq.com``. - """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-docs': { - 'index.html': '', - 'en/index.html': '', - 'en/latest/index.html': '', - 'en/0.3.1/index.html': '', - 'en/0.3.1/sub/index.html': '', - }, - 'clusterhq-staging-docs': {}, - }) - self.publish_docs(aws, '0.3.1', '0.3.1', - environment=Environments.PRODUCTION) - self.assertEqual( - aws.cloudfront_invalidations, [ - CreateCloudFrontInvalidation( - cname='docs.clusterhq.com', - paths={ - 'en/latest/', - 'en/latest/index.html', - 'en/latest/sub/', - 'en/latest/sub/index.html', - 'en/0.3.1/', - 'en/0.3.1/index.html', - 'en/0.3.1/sub/', - 'en/0.3.1/sub/index.html', - }), - ]) - def test_production_gets_tagged_version(self): """ Trying to publish to production, when the version being pushed isn't tagged raises an exception. """ - aws = FakeAWS(routing_rules={}, s3_buckets={}) + aws = FakeAWS(state=STATE_EMPTY) self.assertRaises( NotTagged, self.publish_docs, @@ -728,28 +539,49 @@ def test_production_can_publish_doc_version(self): Publishing a documentation version to the version of the latest full release in production succeeds. """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-docs': {}, - 'clusterhq-staging-docs': {}, - }) + aws = FakeAWS(state=STATE_POST1_PRE_PUBLICATION) # Does not raise: self.publish_docs( - aws, '0.3.1.post1', '0.3.1', environment=Environments.PRODUCTION) + aws=aws, + flocker_version=POST1_RELEASE_VERSION, + doc_version=MARKETING_RELEASE_VERSION, + environment=Environments.PRODUCTION + ) + self.expectThat( + thaw(aws.state.s3_buckets), + Equals( + thaw(STATE_POST1_POST_PUBLICATION.s3_buckets) + ), + ) + self.expectThat( + thaw(aws.state.routing_rules), + Equals( + thaw(STATE_POST1_POST_PUBLICATION.routing_rules) + ), + ) + self.expectThat( + thaw(aws.state.error_key), + Equals( + thaw(STATE_POST1_POST_PUBLICATION.error_key) + ), + ) + self.expectThat( + thaw(aws.state.cloudfront_invalidations), + Equals( + thaw(STATE_POST1_POST_PUBLICATION.cloudfront_invalidations) + ), + ) + @skipIf( + True, + "XXX: We don't do pre-releases any more. " + "This test is redundant." + ) def test_production_can_publish_prerelease(self): """ Publishing a pre-release succeeds. """ - aws = FakeAWS( - routing_rules={ - }, - s3_buckets={ - 'clusterhq-docs': {}, - 'clusterhq-staging-docs': {}, - }) + aws = FakeAWS(state=STATE_EMPTY) # Does not raise: self.publish_docs( aws, '0.3.2rc1', '0.3.2rc1', environment=Environments.PRODUCTION) @@ -758,135 +590,13 @@ def test_publish_non_release_fails(self): """ Trying to publish to version that isn't a release fails. """ - aws = FakeAWS(routing_rules={}, s3_buckets={}) + aws = FakeAWS(state=STATE_EMPTY) self.assertRaises( NotARelease, self.publish_docs, aws, '0.3.0+444.gf05215b', '0.3.0+444.gf05215b', environment=Environments.STAGING) - def assert_error_key_update(self, doc_version, environment, should_update): - """ - Call ``publish_docs`` and assert that only the expected buckets have an - updated error_key property. - - :param unicode doc_version: The version of the documentation that is - being published. - :param NamedConstant environment: One of the ``NamedConstants`` in - ``Environments``. - :param bool should_update: A flag indicating whether the error_key for - the bucket associated with ``environment`` is expected to be - updated. - :raises: ``FailTest`` if an error_key in any of the S3 buckets has been - updated unexpectedly. - """ - # Get a set of all target S3 buckets. - bucket_names = set() - for e in Environments.iterconstants(): - bucket_names.add( - DOCUMENTATION_CONFIGURATIONS[e].documentation_bucket - ) - # And that all the buckets themselves are empty. - empty_buckets = {bucket_name: {} for bucket_name in bucket_names} - # Including the dev bucket - empty_buckets['clusterhq-staging-docs'] = {} - # And that all the buckets have an empty error_key - empty_error_keys = {bucket_name: b'' for bucket_name in bucket_names} - - aws = FakeAWS( - routing_rules={}, - s3_buckets=empty_buckets, - error_key=empty_error_keys - ) - # The value of any updated error_key will include the version that's - # being published. - expected_error_path = 'en/{}/error_pages/404.html'.format(doc_version) - expected_updated_bucket = ( - DOCUMENTATION_CONFIGURATIONS[environment].documentation_bucket - ) - # Grab a copy of the current error_key before it gets mutated. - expected_error_keys = aws.error_key.copy() - if should_update: - # And if an error_key is expected to be updated we expect it to be - # for the bucket corresponding to the environment that we're - # publishing to. - expected_error_keys[expected_updated_bucket] = expected_error_path - - self.publish_docs( - aws, - flocker_version=doc_version, - doc_version=doc_version, - environment=environment - ) - - self.assertEqual(expected_error_keys, aws.error_key) - - def test_error_key_dev_staging(self): - """ - Publishing documentation for a development release to the staging - bucket, updates the error_key in that bucket only. - """ - self.assert_error_key_update( - doc_version='0.4.1.dev1', - environment=Environments.STAGING, - should_update=True - ) - - def test_error_key_dev_production(self): - """ - Publishing documentation for a development release to the production - bucket, does not update the error_key in any of the buckets. - """ - self.assert_error_key_update( - doc_version='0.4.1.dev1', - environment=Environments.PRODUCTION, - should_update=False - ) - - def test_error_key_pre_staging(self): - """ - Publishing documentation for a pre-release to the staging - bucket, updates the error_key in that bucket only. - """ - self.assert_error_key_update( - doc_version='0.4.1rc1', - environment=Environments.STAGING, - should_update=True - ) - - def test_error_key_pre_production(self): - """ - Publishing documentation for a pre-release to the production - bucket, does not update the error_key in any of the buckets. - """ - self.assert_error_key_update( - doc_version='0.4.1rc1', - environment=Environments.PRODUCTION, - should_update=False - ) - - def test_error_key_marketing_staging(self): - """ - Publishing documentation for a marketing release to the staging - bucket, updates the error_key in that bucket. - """ - self.assert_error_key_update( - doc_version='0.4.1', - environment=Environments.STAGING, - should_update=True - ) - - def test_error_key_marketing_production(self): - """ - Publishing documentation for a marketing release to the production - bucket, updates the error_key in that bucket. - """ - self.assert_error_key_update( - doc_version='0.4.1', - environment=Environments.PRODUCTION, - should_update=True - ) - class UpdateRepoTests(TestCase): """ @@ -894,12 +604,12 @@ class UpdateRepoTests(TestCase): """ def setUp(self): super(UpdateRepoTests, self).setUp() - self.target_bucket = 'test-target-bucket' - self.target_key = 'test/target/key' + self.target_bucket = u'test-target-bucket' + self.target_key = u'test/target/key' self.package_directory = FilePath(self.mktemp()) - self.packages = ['clusterhq-flocker-cli', 'clusterhq-flocker-node', - 'clusterhq-flocker-docker-plugin'] + self.packages = [u'clusterhq-flocker-cli', u'clusterhq-flocker-node', + u'clusterhq-flocker-docker-plugin'] def update_repo(self, aws, yum, package_directory, target_bucket, target_key, source_repo, @@ -937,35 +647,35 @@ def test_fake_rpm(self): - Other packages on the buildserver are not downloaded. - Existing metadata files are left untouched. """ - existing_s3_keys = { - os.path.join(self.target_key, 'existing_package.rpm'): '', + existing_s3_keys = freeze({ + os.path.join(self.target_key, u'existing_package.rpm'): u'', os.path.join(self.target_key, - 'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm'): - 'existing-content-to-be-replaced', # noqa - os.path.join(self.target_key, 'repodata', 'repomod.xml'): - '-metadata.xml', - os.path.join(self.target_key, 'repodata', - '-metadata.xml'): - 'metadata for: existing_package.rpm', - } - # Copy before passing to FakeAWS - expected_keys = existing_s3_keys.copy() + u'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm'): + u'existing-content-to-be-replaced', # noqa + os.path.join(self.target_key, u'repodata', u'repomod.xml'): + u'-metadata.xml', + os.path.join(self.target_key, u'repodata', + u'-metadata.xml'): + u'metadata for: existing_package.rpm', + }) aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: existing_s3_keys, - }, + state=FakeAWSState( + s3_buckets=freeze({ + self.target_bucket: existing_s3_keys, + }), + ) ) - unspecified_package = 'unspecified-package-0.3.3-0.dev.7.noarch.rpm' - repo_contents = { - 'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm': 'cli-package', - 'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm': 'node-package', - 'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm': - 'docker-plugin-package', - unspecified_package: 'unspecified-package-content', - } + unspecified_package = u'unspecified-package-0.3.3-0.dev.7.noarch.rpm' + repo_contents = freeze({ + u'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm': u'cli-package', + u'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm': + u'node-package', + u'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm': + u'docker-plugin-package', + unspecified_package: u'unspecified-package-content', + }) self.update_repo( aws=aws, @@ -981,32 +691,34 @@ def test_fake_rpm(self): # The expected files are the new files plus the package which already # existed in S3. - expected_packages = { - 'existing_package.rpm', - 'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm', - 'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm', - 'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm', - } + expected_packages = freeze({ + u'existing_package.rpm', + u'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm', + u'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm', + u'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm', + }) - expected_keys.update({ - 'test/target/key/clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm': - 'cli-package', - 'test/target/key/clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm': - 'node-package', - 'test/target/key/clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm': # noqa - 'docker-plugin-package', - }) - expected_keys.update({ - os.path.join(self.target_key, 'repodata', 'repomod.xml'): - '-metadata.xml', - os.path.join(self.target_key, 'repodata', - '-metadata.xml'): - 'metadata content for: ' + ','.join(sorted(expected_packages)), + expected_keys = existing_s3_keys.update({ + u'test/target/key/clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm': + u'cli-package', + u'test/target/key/clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm': + u'node-package', + u'test/target/key/clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm': # noqa + u'docker-plugin-package', + }).update({ + os.path.join(self.target_key, u'repodata', u'repomod.xml'): + u'-metadata.xml', + os.path.join(self.target_key, u'repodata', + u'-metadata.xml'): + u'metadata content for: ' + ','.join( + sorted(expected_packages) + ), }) self.assertEqual( - expected_keys, - aws.s3_buckets[self.target_bucket]) + thaw(expected_keys), + thaw(aws.state.s3_buckets[self.target_bucket]) + ) def test_fake_deb(self): """ @@ -1016,32 +728,31 @@ def test_fake_deb(self): - Existing packages on S3 are preserved in the metadata. - Other packages on the buildserver are not downloaded. """ - existing_s3_keys = { - os.path.join(self.target_key, 'existing_package.deb'): '', + existing_s3_keys = freeze({ + os.path.join(self.target_key, u'existing_package.deb'): '', os.path.join(self.target_key, - 'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb'): - 'existing-content-to-be-replaced', # noqa - os.path.join(self.target_key, 'Packages.gz'): - 'metadata for: existing_package.deb', - } - # Copy before passing to FakeAWS - expected_keys = existing_s3_keys.copy() + u'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb'): + u'existing-content-to-be-replaced', # noqa + os.path.join(self.target_key, u'Packages.gz'): + u'metadata for: existing_package.deb', + }) aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: existing_s3_keys, - }, + state=FakeAWSState( + s3_buckets=freeze({ + self.target_bucket: existing_s3_keys, + }), + ) ) - unspecified_package = 'unspecified-package_0.3.3-0.dev.7_all.deb' - repo_contents = { - 'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb': 'cli-package', - 'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb': 'node-package', - 'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb': - 'docker-plugin-package', - unspecified_package: 'unspecified-package-content', - } + unspecified_package = u'unspecified-package_0.3.3-0.dev.7_all.deb' + repo_contents = freeze({ + u'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb': u'cli-package', + u'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb': u'node-package', + u'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb': + u'docker-plugin-package', + unspecified_package: u'unspecified-package-content', + }) self.update_repo( aws=aws, @@ -1051,34 +762,34 @@ def test_fake_deb(self): target_key=self.target_key, source_repo=create_fake_repository(self, files=repo_contents), packages=self.packages, - flocker_version='0.3.3.dev7', - distribution=Distribution(name='ubuntu', version='14.04'), + flocker_version=u'0.3.3.dev7', + distribution=Distribution(name=u'ubuntu', version=u'14.04'), ) # The expected files are the new files plus the package which already # existed in S3. - expected_packages = { - 'existing_package.deb', - 'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb', - 'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb', - 'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb', - } + expected_packages = freeze({ + u'existing_package.deb', + u'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb', + u'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb', + u'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb', + }) - expected_keys.update({ - 'test/target/key/Release': 'Origin: ClusterHQ\n', - 'test/target/key/clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb': - 'cli-package', - 'test/target/key/clusterhq-flocker-node_0.3.3-0.dev.7_all.deb': - 'node-package', - 'test/target/key/clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb': # noqa - 'docker-plugin-package', - 'test/target/key/Packages.gz': - 'Packages.gz for: ' + ','.join(sorted(expected_packages)), + expected_keys = existing_s3_keys.update({ + u'test/target/key/Release': u'Origin: ClusterHQ\n', + u'test/target/key/clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb': + u'cli-package', + u'test/target/key/clusterhq-flocker-node_0.3.3-0.dev.7_all.deb': + u'node-package', + u'test/target/key/clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb': # noqa + u'docker-plugin-package', + u'test/target/key/Packages.gz': + u'Packages.gz for: ' + u','.join(sorted(expected_packages)), }) self.assertEqual( expected_keys, - aws.s3_buckets[self.target_bucket]) + aws.state.s3_buckets[self.target_bucket]) def test_package_not_available_exception(self): """ @@ -1086,10 +797,11 @@ def test_package_not_available_exception(self): is raised. """ aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: {}, - }, + state=FakeAWSState( + s3_buckets=freeze({ + self.target_bucket: freeze({}), + }), + ) ) exception = self.assertRaises( @@ -1103,8 +815,8 @@ def test_package_not_available_exception(self): source_repo=create_fake_repository( self, files={}), packages=self.packages, - flocker_version='0.3.3.dev7', - distribution=Distribution(name="centos", version="7"), + flocker_version=u'0.3.3.dev7', + distribution=Distribution(name=u"centos", version=u"7"), ) self.assertEqual(404, exception.response.status_code) @@ -1123,10 +835,11 @@ def test_real_yum_utils(self): repo_uri = 'file://' + source_repo.path aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: {}, - }, + state=FakeAWSState( + s3_buckets={ + self.target_bucket: freeze({}), + }, + ) ) class RealYum(object): @@ -1141,33 +854,33 @@ def get_dispatcher(self): target_key=self.target_key, source_repo=repo_uri, packages=self.packages, - flocker_version='0.3.3.dev7', - distribution=Distribution(name='centos', version='7'), + flocker_version=u'0.3.3.dev7', + distribution=Distribution(name=u'centos', version=u'7'), ) expected_files = { os.path.join(self.target_key, file) for file in [ - 'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm', - 'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm', - 'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm', - 'repodata/repomd.xml', + u'clusterhq-flocker-cli-0.3.3-0.dev.7.noarch.rpm', + u'clusterhq-flocker-node-0.3.3-0.dev.7.noarch.rpm', + u'clusterhq-flocker-docker-plugin-0.3.3-0.dev.7.noarch.rpm', + u'repodata/repomd.xml', ] } - files_on_s3 = aws.s3_buckets[self.target_bucket] + files_on_s3 = aws.state.s3_buckets[self.target_bucket] - repodata_path = os.path.join(self.target_key, 'repodata') + repodata_path = os.path.join(self.target_key, u'repodata') # Yum repositories prefix metadata files with the sha256 hash # of the file. Since these files contain timestamps, we calculate # the hash from the file, to determine the expected file names. for metadata_file in [ - 'other.sqlite.bz2', - 'filelists.xml.gz', - 'primary.xml.gz', - 'filelists.sqlite.bz2', - 'primary.sqlite.bz2', - 'other.xml.gz', + u'other.sqlite.bz2', + u'filelists.xml.gz', + u'primary.xml.gz', + u'filelists.sqlite.bz2', + u'primary.sqlite.bz2', + u'other.xml.gz', ]: for key in files_on_s3: if (key.endswith(metadata_file) and @@ -1176,13 +889,13 @@ def get_dispatcher(self): os.path.join( repodata_path, sha256(files_on_s3[key]).hexdigest() + - '-' + metadata_file) + u'-' + metadata_file) ) break else: expected_files.add( os.path.join( - repodata_path, '-' + metadata_file)) + repodata_path, u'-' + metadata_file)) # The original source repository contains no metadata. # This tests that CreateRepo creates the expected metadata files from @@ -1202,14 +915,15 @@ def test_real_dpkg_utils(self): source_repo = FilePath(self.mktemp()) source_repo.createDirectory() - FilePath(__file__).sibling('apt-repo').copyTo(source_repo) - repo_uri = 'file://' + source_repo.path + FilePath(__file__).sibling(u'apt-repo').copyTo(source_repo) + repo_uri = u'file://' + source_repo.path aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: {}, - }, + state=FakeAWSState( + s3_buckets=freeze({ + self.target_bucket: freeze({}), + }), + ) ) class RealYum(object): @@ -1224,21 +938,21 @@ def get_dispatcher(self): target_key=self.target_key, source_repo=repo_uri, packages=self.packages, - flocker_version='0.3.3.dev7', - distribution=Distribution(name="ubuntu", version="14.04"), + flocker_version=u'0.3.3.dev7', + distribution=Distribution(name=u"ubuntu", version=u"14.04"), ) expected_files = { os.path.join(self.target_key, file) for file in [ - 'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb', - 'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb', - 'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb', - 'Packages.gz', - 'Release', + u'clusterhq-flocker-cli_0.3.3-0.dev.7_all.deb', + u'clusterhq-flocker-node_0.3.3-0.dev.7_all.deb', + u'clusterhq-flocker-docker-plugin_0.3.3-0.dev.7_all.deb', + u'Packages.gz', + u'Release', ] } - files_on_s3 = aws.s3_buckets[self.target_bucket] + files_on_s3 = aws.state.s3_buckets[self.target_bucket] # The original source repository contains no metadata. # This tests that CreateRepo creates the expected metadata files from @@ -1247,7 +961,9 @@ def get_dispatcher(self): # The repository is built in self.packages_directory # Ensure that that does not leak into the metadata. - packages_gz = files_on_s3[os.path.join(self.target_key, 'Packages.gz')] + packages_gz = files_on_s3[ + os.path.join(self.target_key, u'Packages.gz') + ] with GzipFile(fileobj=StringIO(packages_gz), mode="r") as f: packages_metadata = f.read() self.assertNotIn(self.package_directory.path, packages_metadata) @@ -1419,15 +1135,18 @@ class UploadPythonPackagesTests(TestCase): def setUp(self): super(UploadPythonPackagesTests, self).setUp() - self.target_bucket = 'test-target-bucket' + self.target_bucket = u'test-target-bucket' self.scratch_directory = FilePath(self.mktemp()) self.top_level = FilePath(self.mktemp()) self.top_level.makedirs() self.aws = FakeAWS( - routing_rules={}, - s3_buckets={ - self.target_bucket: {}, - }) + state=FakeAWSState( + routing_rules=freeze({}), + s3_buckets=freeze({ + self.target_bucket: {}, + }) + ) + ) def upload_python_packages(self): """ @@ -1471,7 +1190,7 @@ def test_distributions_uploaded(self): self.upload_python_packages() - aws_keys = self.aws.s3_buckets[self.target_bucket].keys() + aws_keys = self.aws.state.s3_buckets[self.target_bucket].keys() self.assertEqual( sorted(aws_keys), ['python/Flocker-0.3.0-py2-none-any.whl', @@ -1681,14 +1400,16 @@ def test_index_uploaded(self): """ An index file is uploaded to S3. """ - bucket = 'clusterhq-archive' + bucket = u'clusterhq-archive' aws = FakeAWS( - routing_rules={}, - s3_buckets={ - bucket: { - 'python/Flocker-0.3.1-py2-none-any.whl': '', - }, - }) + state=FakeAWSState( + s3_buckets=freeze({ + bucket: { + u'python/Flocker-0.3.1-py2-none-any.whl': u'', + }, + }) + ) + ) scratch_directory = FilePath(self.mktemp()) scratch_directory.makedirs() @@ -1700,11 +1421,11 @@ def test_index_uploaded(self): target_bucket=bucket)) self.assertEqual( - aws.s3_buckets[bucket]['python/index.html'], + aws.state.s3_buckets[bucket][u'python/index.html'], ( - '\nThis is an index for pip\n' + u'\nThis is an index for pip\n' ))