Ejemplo n.º 1
0
    def _s3_upload(self, root, project, unique_id, version=None):
        from moztreedocs.package import distribution_files
        from moztreedocs.upload import s3_upload, s3_set_redirects

        # Workaround the issue
        # BlockingIOError: [Errno 11] write could not complete without blocking
        # https://github.com/travis-ci/travis-ci/issues/8920
        import fcntl

        fcntl.fcntl(1, fcntl.F_SETFL, 0)

        # Files are uploaded to multiple locations:
        #
        # <project>/latest
        # <project>/<version>
        #
        # This allows multiple projects and versions to be stored in the
        # S3 bucket.

        files = list(distribution_files(root))
        key_prefixes = []
        if version:
            key_prefixes.append("%s/%s" % (project, version))

        # Until we redirect / to main/latest, upload the main docs
        # to the root.
        if project == "main":
            key_prefixes.append("")

        key_prefixes.append(unique_id)

        with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
            redirects = yaml.safe_load(fh)["redirects"]

        redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}

        all_redirects = {}

        for prefix in key_prefixes:
            s3_upload(files, prefix)

            # Don't setup redirects for the "version" or "uuid" prefixes since
            # we are exceeding a 50 redirect limit and external things are
            # unlikely to link there anyway (see bug 1614908).
            if (version and prefix.endswith(version)) or prefix == unique_id:
                continue

            if prefix:
                prefix += "/"
            all_redirects.update(
                {prefix + k: prefix + v
                 for k, v in redirects.items()})

        print("Redirects currently staged")
        pprint(all_redirects, indent=1)

        s3_set_redirects(all_redirects)

        unique_link = BASE_LINK + unique_id + "/index.html"
        print("Uploaded documentation can be accessed here " + unique_link)
    def _s3_upload(self, root, project, unique_id, version=None):
        from moztreedocs.package import distribution_files
        from moztreedocs.upload import s3_upload, s3_set_redirects

        # Files are uploaded to multiple locations:
        #
        # <project>/latest
        # <project>/<version>
        #
        # This allows multiple projects and versions to be stored in the
        # S3 bucket.

        files = list(distribution_files(root))
        key_prefixes = []
        if version:
            key_prefixes.append('%s/%s' % (project, version))

        # Until we redirect / to main/latest, upload the main docs
        # to the root.
        if project == 'main':
            key_prefixes.append('')

        key_prefixes.append(unique_id)

        with open(os.path.join(DOC_ROOT, 'config.yml'), 'r') as fh:
            redirects = yaml.safe_load(fh)['redirects']

        redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}

        all_redirects = {}

        for prefix in key_prefixes:
            s3_upload(files, prefix)

            # Don't setup redirects for the "version" prefix since we are
            # exceeding a 50 redirect limit and external things are unlikely to
            # link there anyway (see bug 1614908).
            if version and prefix.endswith(version):
                continue

            if prefix:
                prefix += '/'
            all_redirects.update(
                {prefix + k: prefix + v
                 for k, v in redirects.items()})

        print("Redirects currently staged")
        pprint(all_redirects, indent=1)

        s3_set_redirects(all_redirects)
Ejemplo n.º 3
0
    def _s3_upload(self, root, project, version=None):
        from moztreedocs.package import distribution_files
        from moztreedocs.upload import s3_upload, s3_set_redirects

        # Files are uploaded to multiple locations:
        #
        # <project>/latest
        # <project>/<version>
        #
        # This allows multiple projects and versions to be stored in the
        # S3 bucket.

        files = list(distribution_files(root))
        key_prefixes = ['%s/latest' % project]
        if version:
            key_prefixes.append('%s/%s' % (project, version))

        # Until we redirect / to main/latest, upload the main docs
        # to the root.
        if project == 'main':
            key_prefixes.append('')

        with open(os.path.join(here, 'redirects.yml'), 'r') as fh:
            redirects = yaml.safe_load(fh)['redirects']

        redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}

        all_redirects = {}

        for prefix in key_prefixes:
            s3_upload(files, prefix)
            if prefix:
                prefix += '/'
            all_redirects.update(
                {prefix + k: prefix + v
                 for k, v in redirects.items()})

        print("Redirects currently staged")
        pprint(all_redirects, indent=1)

        s3_set_redirects(all_redirects)
Ejemplo n.º 4
0
    def _s3_upload(self, root, project, version=None):
        from moztreedocs.package import distribution_files
        from moztreedocs.upload import s3_upload

        # Files are uploaded to multiple locations:
        #
        # <project>/latest
        # <project>/<version>
        #
        # This allows multiple projects and versions to be stored in the
        # S3 bucket.

        files = list(distribution_files(root))

        s3_upload(files, key_prefix='%s/latest' % project)
        if version:
            s3_upload(files, key_prefix='%s/%s' % (project, version))

        # Until we redirect / to main/latest, upload the main docs
        # to the root.
        if project == 'main':
            s3_upload(files)