示例#1
0
def github_pull(user, project):
    g = get_github(user)
    repo_name = project.github_repo
    if repo_name is None:
        raise Exception("No GitHub repo defined.")
    repo = g.get_repo(repo_name)
    # If somehow we don't have a branch set, this will use the "master_branch"
    branch_name = project.github_branch or repo.master_branch
    try:
        branch = repo.get_branch(branch_name)
    except GithubException:
        raise Exception("Unable to get the branch.")

    if project.github_last_commit == branch.commit.sha:
        # Nothing to do.
        return False

    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = {x.path: x for x in tree.tree}
    paths_notags = {get_root_path(x) for x in paths}
    root = find_project_root(paths)

    # First try finding the resource map so we don't fail out part-done later.
    # TODO: transaction support for file contents would be nice...

    resource_root = root + 'resources/'
    manifest_path = root + 'appinfo.json'
    if manifest_path in paths:
        manifest_sha = paths[manifest_path].sha
        manifest = json.loads(git_blob(repo, manifest_sha))
        media = manifest.get('resources', {}).get('media', [])
    else:
        raise Exception("appinfo.json not found")

    project_type = manifest.get('projectType', 'native')

    for resource in media:
        path = resource_root + resource['file']
        if project_type == 'pebblejs' and resource['name'] in {
            'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
            continue
        if path not in paths_notags:
            raise Exception("Resource %s not found in repo." % path)

    # Now we grab the zip.
    zip_url = repo.get_archive_link('zipball', branch_name)
    u = urllib2.urlopen(zip_url)

    # And wipe the project!
    project.source_files.all().delete()
    project.resources.all().delete()

    # This must happen before do_import_archive or we'll stamp on its results.
    project.github_last_commit = branch.commit.sha
    project.github_last_sync = now()
    project.save()

    import_result = do_import_archive(project.id, u.read())

    send_keen_event('cloudpebble', 'cloudpebble_github_pull', user=user, data={
        'data': {
            'repo': project.github_repo
        }
    })

    return import_result
示例#2
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = [x.path for x in tree.tree]

    next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree}

    try:
        root = find_project_root(paths)
    except:
        root = ''

    src_root = root + 'src/'
    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = src_root + source.file_name
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob',
                                                       content=source.get_contents())
            print "New file: %s" % repo_path
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                print "Updated file: %s" % repo_path
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    expected_source_files = [src_root + x.file_name for x in project_sources]
    for path in next_tree.keys():
        if not path.startswith(src_root):
            continue
        if path not in expected_source_files:
            del next_tree[path]
            print "Deleted file: %s" % path
            has_changed = True

    # Now try handling resource files.

    resources = project.resources.all()

    resource_root = root + 'resources/'

    for res in resources:
        for variant in res.variants.all():
            repo_path = resource_root + variant.path
            if repo_path in next_tree:
                content = variant.get_contents()
                if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha:
                    print "Changed resource: %s" % repo_path
                    has_changed = True
                    blob = repo.create_git_blob(base64.b64encode(content), 'base64')
                    print "Created blob %s" % blob.sha
                    next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
            else:
                print "New resource: %s" % repo_path
                blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64')
                print "Created blob %s" % blob.sha
                next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha)

    remote_manifest_path = root + 'appinfo.json'
    remote_wscript_path = root + 'wscript'

    remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None
    if remote_manifest_sha is not None:
        their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha))
        their_res_dict = their_manifest_dict['resources']
    else:
        their_manifest_dict = {}
        their_res_dict = {'media': []}

    our_manifest_dict = generate_manifest_dict(project, resources)
    our_res_dict = our_manifest_dict['resources']

    if our_res_dict != their_res_dict:
        print "Resources mismatch."
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = resource_root + path
            if repo_path in next_tree:
                print "Deleted resource: %s" % repo_path
                del next_tree[repo_path]

    # This one is separate because there's more than just the resource map changing.
    if their_manifest_dict != our_manifest_dict:
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob',
                                                                  content=generate_manifest(project, resources))

    if project.project_type == 'native' and remote_wscript_path not in next_tree:
        next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob',
                                                             content=generate_wscript_file(project, True))
        has_changed = True

    # Commit the new tree.
    if has_changed:
        print "Has changed; committing"
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                print "removing subtree node %s" % x

        print [x._InputGitTreeElement__mode for x in next_tree.values()]
示例#3
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = [x.path for x in tree.tree]

    next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree}

    try:
        remote_version, root = find_project_root(paths)
    except:
        remote_version, root = project.sdk_version, ''

    src_root = root + 'src/'
    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = src_root + source.file_name
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob',
                                                       content=source.get_contents())
            print "New file: %s" % repo_path
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                print "Updated file: %s" % repo_path
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    expected_source_files = [src_root + x.file_name for x in project_sources]
    for path in next_tree.keys():
        if not path.startswith(src_root):
            continue
        if path not in expected_source_files:
            del next_tree[path]
            print "Deleted file: %s" % path
            has_changed = True

    # Now try handling resource files.

    resources = project.resources.all()

    old_resource_root = root + ("resources/src/" if remote_version == '1' else 'resources/')
    new_resource_root = root + ("resources/src/" if project.sdk_version == '1' else 'resources/')

    # Migrate all the resources so we can subsequently ignore the issue.
    if old_resource_root != new_resource_root:
        print "moving resources"
        new_next_tree = next_tree.copy()
        for path in next_tree:
            if path.startswith(old_resource_root) and not path.endswith('resource_map.json'):
                new_path = new_resource_root + path[len(old_resource_root):]
                print "moving %s to %s" % (path, new_path)
                next_tree[path]._InputGitTreeElement__path = new_path
                new_next_tree[new_path] = next_tree[path]
                del new_next_tree[path]
        next_tree = new_next_tree

    for res in resources:
        repo_path = new_resource_root + res.path
        if repo_path in next_tree:
            content = res.get_contents()
            if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha:
                print "Changed resource: %s" % repo_path
                has_changed = True
                blob = repo.create_git_blob(base64.b64encode(content), 'base64')
                print "Created blob %s" % blob.sha
                next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
        else:
            print "New resource: %s" % repo_path
            blob = repo.create_git_blob(base64.b64encode(res.get_contents()), 'base64')
            print "Created blob %s" % blob.sha
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha)

    # Both of these are used regardless of version
    remote_map_path = root + 'resources/src/resource_map.json'
    remote_manifest_path = root + 'appinfo.json'
    remote_wscript_path = root + 'wscript'

    if remote_version == '1':
        remote_map_sha = next_tree[remote_map_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None
        if remote_map_sha is not None:
            their_res_dict = json.loads(git_blob(repo, remote_map_sha))
        else:
            their_res_dict = {'friendlyVersion': 'VERSION', 'versionDefName': '', 'media': []}
        their_manifest_dict = {}
    else:
        remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None
        if remote_manifest_sha is not None:
            their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha))
            their_res_dict = their_manifest_dict['resources']
        else:
            their_manifest_dict = {}
            their_res_dict = {'media': []}

    if project.sdk_version == '1':
        our_res_dict = generate_resource_dict(project, resources)
    else:
        our_manifest_dict = generate_v2_manifest_dict(project, resources)
        our_res_dict = our_manifest_dict['resources']

    if our_res_dict != their_res_dict:
        print "Resources mismatch."
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = new_resource_root + path
            if repo_path in next_tree:
                print "Deleted resource: %s" % repo_path
                del next_tree[repo_path]

        # Update the stored resource map, if applicable.
        if project.sdk_version == '1':
            if remote_map_path in next_tree:
                next_tree[remote_map_path]._InputGitTreeElement__sha = NotSet
                next_tree[remote_map_path]._InputGitTreeElement__content = dict_to_pretty_json(our_res_dict)
            else:
                next_tree[remote_map_path] = InputGitTreeElement(path=remote_map_path, mode='100644', type='blob',
                                                                 content=dict_to_pretty_json(our_res_dict))
            # Delete the v2 manifest, if one exists
            if remote_manifest_path in next_tree:
                del next_tree[remote_manifest_path]
    # This one is separate because there's more than just the resource map changing.
    if project.sdk_version == '2' and their_manifest_dict != our_manifest_dict:
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_v2_manifest(project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob',
                                                                  content=generate_v2_manifest(project, resources))
        # Delete the v1 manifest, if one exists
        if remote_map_path in next_tree:
            del next_tree[remote_map_path]

    if project.sdk_version == '2':
        if remote_wscript_path not in next_tree:
            next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob',
                                                                 content=generate_wscript_file(project, True))
            has_changed = True
    else:
        del next_tree[remote_wscript_path]

    # Commit the new tree.
    if has_changed:
        print "Has changed; committing"
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                print "removing subtree node %s" % x

        print [x._InputGitTreeElement__mode for x in next_tree.values()]
示例#4
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix=".zip") as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), "r") as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                MANIFEST = "appinfo.json"
                SRC_DIR = "src/"
                RES_PATH = "resources"

                if len(contents) > 400:
                    raise Exception("Too many files in zip file.")
                file_list = [x.filename for x in contents]

                base_dir = find_project_root(file_list)
                dir_end = len(base_dir)

                def make_valid_filename(zip_entry):
                    entry_filename = zip_entry.filename
                    if entry_filename[:dir_end] != base_dir:
                        return False
                    entry_filename = entry_filename[dir_end:]
                    if entry_filename == "":
                        return False
                    if not os.path.normpath("/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s" % entry_filename).startswith(
                        "/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/"
                    ):
                        raise SuspiciousOperation("Invalid zip file contents.")
                    if zip_entry.file_size > 5242880:  # 5 MB
                        raise Exception("Excessively large compressed file.")
                    return entry_filename

                # Now iterate over the things we found
                with transaction.atomic():
                    for entry in contents:
                        filename = make_valid_filename(entry)
                        if not filename:
                            continue

                        if filename == MANIFEST:
                            # We have a resource map! We can now try importing things from it.
                            with z.open(entry) as f:
                                m = json.loads(f.read())

                            project.app_uuid = m["uuid"]
                            project.app_short_name = m["shortName"]
                            project.app_long_name = m["longName"]
                            project.app_company_name = m["companyName"]
                            project.app_version_label = m["versionLabel"]
                            project.sdk_version = m.get("sdkVersion", "2")
                            project.app_is_watchface = m.get("watchapp", {}).get("watchface", False)
                            project.app_is_hidden = m.get("watchapp", {}).get("hiddenApp", False)
                            project.app_is_shown_on_communication = m.get("watchapp", {}).get(
                                "onlyShownOnCommunication", False
                            )
                            project.app_capabilities = ",".join(m.get("capabilities", []))
                            if "targetPlatforms" in m:
                                project.app_platforms = ",".join(m["targetPlatforms"])
                            project.app_keys = dict_to_pretty_json(m.get("appKeys", {}))
                            project.project_type = m.get("projectType", "native")
                            if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]:
                                raise Exception("Illegal project type %s" % project.project_type)
                            media_map = m["resources"]["media"]

                            tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v}

                            desired_resources = {}
                            resources_files = {}
                            resource_identifiers = {}
                            resource_variants = {}
                            file_exists_for_root = {}

                            # Go through the media map and look for resources
                            for resource in media_map:
                                file_name = resource["file"]
                                identifier = resource["name"]
                                # Pebble.js and simply.js both have some internal resources that we don't import.
                                if project.project_type in {"pebblejs", "simplyjs"}:
                                    if identifier in {
                                        "MONO_FONT_14",
                                        "IMAGE_MENU_ICON",
                                        "IMAGE_LOGO_SPLASH",
                                        "IMAGE_TILE_SPLASH",
                                    }:
                                        continue
                                tags, root_file_name = get_filename_variant(file_name, tag_map)
                                if len(tags) != 0:
                                    raise ValueError("Generic resource filenames cannot contain a tilde (~)")
                                if file_name not in desired_resources:
                                    desired_resources[root_file_name] = []
                                print "Desired resource: %s" % root_file_name
                                desired_resources[root_file_name].append(resource)
                                file_exists_for_root[root_file_name] = False

                            for zipitem in contents:
                                # Let's just try opening the file
                                filename = make_valid_filename(zipitem)
                                if filename is False or not filename.startswith(RES_PATH):
                                    continue
                                filename = filename[len(RES_PATH) + 1 :]
                                try:
                                    extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, filename))
                                except KeyError:
                                    print "Failed to open %s" % filename
                                    continue

                                # Now we know the file exists and is in the resource directory - is it one we want?
                                tags, root_file_name = get_filename_variant(filename, tag_map)
                                tags_string = ",".join(str(int(t)) for t in tags)

                                print "Importing file %s with root %s " % (zipitem.filename, root_file_name)

                                if root_file_name in desired_resources:
                                    """ FIXME: targetPlatforms is currently stored in resourceFile, but it *should* be in
                                     ResourceIdentifier. Until that is fixed, we cannot support multiple identifiers
                                     linked to a single file compiling for different platforms. When the bug is fixed,
                                     this will need to be changed. Until then, we just pick the first file on the list
                                     of desired_resources."""
                                    medias = desired_resources[root_file_name]
                                    is_font = False
                                    print "Looking for variants of %s" % root_file_name
                                    # An exception to the above warning is made for fonts, where multiple identifiers is
                                    # already implemented in the UI.
                                    if len(medias) > 1:
                                        if set(r["type"] for r in medias) != {"font"}:
                                            raise NotImplementedError(
                                                "You cannot currently import a project with multiple identifiers for a single non-font file"
                                            )
                                        else:
                                            is_font = True
                                    resource = medias[-1]

                                    for resource in medias:
                                        # Make only one resource file per base resource.
                                        if root_file_name not in resources_files:
                                            kind = resource["type"]
                                            is_menu_icon = resource.get("menuIcon", False)
                                            target_platforms = resource.get("targetPlatforms", None)
                                            target_platforms = (
                                                json.dumps(target_platforms) if target_platforms else None
                                            )
                                            resources_files[root_file_name] = ResourceFile.objects.create(
                                                project=project,
                                                file_name=os.path.basename(root_file_name),
                                                kind=kind,
                                                is_menu_icon=is_menu_icon,
                                                target_platforms=target_platforms,
                                            )

                                        identifier = resource["name"]
                                        # Add all the identifiers which don't clash with existing identifiers
                                        if not identifier in resource_identifiers:
                                            tracking = resource.get("trackingAdjust", None)
                                            regex = resource.get("characterRegex", None)
                                            compatibility = resource.get("compatibility", None)

                                            ResourceIdentifier.objects.create(
                                                resource_file=resources_files[root_file_name],
                                                resource_id=identifier,
                                                character_regex=regex,
                                                tracking=tracking,
                                                compatibility=compatibility,
                                            )
                                            resource_identifiers[identifier] = resources_files[root_file_name]

                                        # At the moment, only add > 1 identifier for fonts.
                                        if not is_font:
                                            break

                                    print "Adding variant %s with tags [%s]" % (root_file_name, tags_string)
                                    actual_file_name = resource["file"]
                                    resource_variants[actual_file_name] = ResourceVariant.objects.create(
                                        resource_file=resources_files[root_file_name], tags=tags_string
                                    )
                                    resource_variants[actual_file_name].save_file(extracted)
                                    file_exists_for_root[root_file_name] = True

                            # Check that at least one variant of each specified resource exists.
                            for root_file_name, loaded in file_exists_for_root.iteritems():
                                if not loaded:
                                    raise KeyError(
                                        "No file was found to satisfy the manifest filename: {}".format(root_file_name)
                                    )

                        elif filename.startswith(SRC_DIR):
                            if (not filename.startswith(".")) and (
                                filename.endswith(".c") or filename.endswith(".h") or filename.endswith(".js")
                            ):
                                base_filename = filename[len(SRC_DIR) :]
                                source = SourceFile.objects.create(project=project, file_name=base_filename)
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode("utf-8"))
                    project.save()
                    send_keen_event("cloudpebble", "cloudpebble_zip_import_succeeded", project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_keen_event(
            "cloudpebble", "cloudpebble_zip_import_failed", user=project.owner, data={"data": {"reason": e.message}}
        )
        raise
示例#5
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token,
               client_id=settings.GITHUB_CLIENT_ID,
               client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = [x.path for x in tree.tree]

    next_tree = {
        x.path: InputGitTreeElement(path=x.path,
                                    mode=x.mode,
                                    type=x.type,
                                    sha=x.sha)
        for x in tree.tree
    }

    try:
        root = find_project_root(paths)
    except:
        root = ''

    src_root = root + 'src/'
    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = src_root + source.file_name
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(
                path=repo_path,
                mode='100644',
                type='blob',
                content=source.get_contents())
            print "New file: %s" % repo_path
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                print "Updated file: %s" % repo_path
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[
                    repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    expected_source_files = [src_root + x.file_name for x in project_sources]
    for path in next_tree.keys():
        if not path.startswith(src_root):
            continue
        if path not in expected_source_files:
            del next_tree[path]
            print "Deleted file: %s" % path
            has_changed = True

    # Now try handling resource files.

    resources = project.resources.all()

    resource_root = root + 'resources/'

    for res in resources:

        repo_path = resource_root + res.path
        if repo_path in next_tree:
            content = res.get_contents()
            if git_sha(
                    content) != next_tree[repo_path]._InputGitTreeElement__sha:
                print "Changed resource: %s" % repo_path
                has_changed = True
                blob = repo.create_git_blob(base64.b64encode(content),
                                            'base64')
                print "Created blob %s" % blob.sha
                next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
        else:
            print "New resource: %s" % repo_path
            blob = repo.create_git_blob(base64.b64encode(res.get_contents()),
                                        'base64')
            print "Created blob %s" % blob.sha
            next_tree[repo_path] = InputGitTreeElement(path=repo_path,
                                                       mode='100644',
                                                       type='blob',
                                                       sha=blob.sha)

    remote_manifest_path = root + 'appinfo.json'
    remote_wscript_path = root + 'wscript'

    remote_manifest_sha = next_tree[
        remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None
    if remote_manifest_sha is not None:
        their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha))
        their_res_dict = their_manifest_dict['resources']
    else:
        their_manifest_dict = {}
        their_res_dict = {'media': []}

    our_manifest_dict = generate_manifest_dict(project, resources)
    our_res_dict = our_manifest_dict['resources']

    if our_res_dict != their_res_dict:
        print "Resources mismatch."
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(
            x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = resource_root + path
            if repo_path in next_tree:
                print "Deleted resource: %s" % repo_path
                del next_tree[repo_path]

    # This one is separate because there's more than just the resource map changing.
    if their_manifest_dict != our_manifest_dict:
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[
                remote_manifest_path]._InputGitTreeElement__content = generate_manifest(
                    project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(
                path=remote_manifest_path,
                mode='100644',
                type='blob',
                content=generate_manifest(project, resources))

    if project.project_type == 'native' and remote_wscript_path not in next_tree:
        next_tree[remote_wscript_path] = InputGitTreeElement(
            path=remote_wscript_path,
            mode='100644',
            type='blob',
            content=generate_wscript_file(project, True))
        has_changed = True

    # Commit the new tree.
    if has_changed:
        print "Has changed; committing"
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                print "removing subtree node %s" % x

        print[x._InputGitTreeElement__mode for x in next_tree.values()]
        git_tree = repo.create_git_tree(next_tree.values())
        print "Created tree %s" % git_tree.sha
        git_commit = repo.create_git_commit(commit_message, git_tree, [commit])
        print "Created commit %s" % git_commit.sha
        git_ref = repo.get_git_ref(
            'heads/%s' % (project.github_branch or repo.master_branch))
        git_ref.edit(git_commit.sha)
        print "Updated ref %s" % git_ref.ref
        project.github_last_commit = git_commit.sha
        project.github_last_sync = now()
        project.save()
        return True

    send_keen_event('cloudpebble',
                    'cloudpebble_github_push',
                    user=user,
                    data={'data': {
                        'repo': project.github_repo
                    }})

    return False
示例#6
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                MANIFEST = 'appinfo.json'
                SRC_DIR = 'src/'
                WORKER_SRC_DIR = 'worker_src/'
                RES_PATH = 'resources'

                if len(contents) > 400:
                    raise Exception("Too many files in zip file.")
                file_list = [x.filename for x in contents]


                base_dir = find_project_root(file_list)
                dir_end = len(base_dir)

                def make_valid_filename(zip_entry):
                    entry_filename = zip_entry.filename
                    if entry_filename[:dir_end] != base_dir:
                        return False
                    entry_filename = entry_filename[dir_end:]
                    if entry_filename == '':
                        return False
                    if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % entry_filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'):
                        raise SuspiciousOperation("Invalid zip file contents.")
                    if zip_entry.file_size > 5242880:  # 5 MB
                        raise Exception("Excessively large compressed file.")
                    return entry_filename

                # Now iterate over the things we found
                with transaction.atomic():
                    for entry in contents:
                        filename = make_valid_filename(entry)
                        if not filename:
                            continue

                        if filename == MANIFEST:
                            # We have a resource map! We can now try importing things from it.
                            with z.open(entry) as f:
                                m = json.loads(f.read())

                            project.app_uuid = m['uuid']
                            project.app_short_name = m['shortName']
                            project.app_long_name = m['longName']
                            project.app_company_name = m['companyName']
                            project.app_version_label = m['versionLabel']
                            project.sdk_version = m.get('sdkVersion', '2')
                            project.app_is_watchface = m.get('watchapp', {}).get('watchface', False)
                            project.app_is_hidden = m.get('watchapp', {}).get('hiddenApp', False)
                            project.app_is_shown_on_communication = m.get('watchapp', {}).get('onlyShownOnCommunication', False)
                            project.app_capabilities = ','.join(m.get('capabilities', []))
                            project.app_modern_multi_js = m.get('enableMultiJS', False)
                            if 'targetPlatforms' in m:
                                project.app_platforms = ','.join(m['targetPlatforms'])
                            project.app_keys = dict_to_pretty_json(m.get('appKeys', {}))
                            project.project_type = m.get('projectType', 'native')
                            if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]:
                                raise Exception("Illegal project type %s" % project.project_type)
                            media_map = m['resources']['media']

                            tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v}

                            desired_resources = {}
                            resources_files = {}
                            resource_variants = {}
                            file_exists_for_root = {}

                            # Go through the media map and look for resources
                            for resource in media_map:
                                file_name = resource['file']
                                identifier = resource['name']
                                # Pebble.js and simply.js both have some internal resources that we don't import.
                                if project.project_type in {'pebblejs', 'simplyjs'}:
                                    if identifier in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
                                        continue
                                tags, root_file_name = get_filename_variant(file_name, tag_map)
                                if (len(tags) != 0):
                                    raise ValueError("Generic resource filenames cannot contain a tilde (~)")
                                if file_name not in desired_resources:
                                    desired_resources[root_file_name] = []
                                print "Desired resource: %s" % root_file_name
                                desired_resources[root_file_name].append(resource)
                                file_exists_for_root[root_file_name] = False

                            for zipitem in contents:
                                # Let's just try opening the file
                                filename = make_valid_filename(zipitem)
                                if filename is False or not filename.startswith(RES_PATH):
                                    continue
                                filename = filename[len(RES_PATH)+1:]
                                try:
                                    extracted = z.open("%s%s/%s"%(base_dir, RES_PATH, filename))
                                except KeyError:
                                    print "Failed to open %s" % filename
                                    continue

                                # Now we know the file exists and is in the resource directory - is it one we want?
                                tags, root_file_name = get_filename_variant(filename, tag_map)
                                tags_string = ",".join(str(int(t)) for t in tags)

                                print "Importing file %s with root %s " % (zipitem.filename, root_file_name)

                                if root_file_name in desired_resources:
                                    medias = desired_resources[root_file_name]
                                    print "Looking for variants of %s" % root_file_name

                                    # Because 'kind' and 'is_menu_icons' are properties of ResourceFile in the database,
                                    # we just use the first one.
                                    resource = medias[0]
                                    # Make only one resource file per base resource.
                                    if root_file_name not in resources_files:
                                        kind = resource['type']
                                        is_menu_icon = resource.get('menuIcon', False)
                                        resources_files[root_file_name] = ResourceFile.objects.create(
                                            project=project,
                                            file_name=os.path.basename(root_file_name),
                                            kind=kind,
                                            is_menu_icon=is_menu_icon)

                                    # But add a resource variant for every file
                                    print "Adding variant %s with tags [%s]" % (root_file_name, tags_string)
                                    actual_file_name = resource['file']
                                    resource_variants[actual_file_name] = ResourceVariant.objects.create(resource_file=resources_files[root_file_name], tags=tags_string)
                                    resource_variants[actual_file_name].save_file(extracted)
                                    file_exists_for_root[root_file_name] = True

                            # Now add all the resource identifiers
                            for root_file_name in desired_resources:
                                for resource in desired_resources[root_file_name]:
                                    target_platforms = json.dumps(resource['targetPlatforms']) if 'targetPlatforms' in resource else None
                                    ResourceIdentifier.objects.create(
                                        resource_file=resources_files[root_file_name],
                                        resource_id=resource['name'],
                                        target_platforms=target_platforms,
                                        # Font options
                                        character_regex=resource.get('characterRegex', None),
                                        tracking=resource.get('trackingAdjust', None),
                                        compatibility=resource.get('compatibility', None),
                                        # Bitmap options
                                        memory_format=resource.get('memoryFormat', None),
                                        storage_format=resource.get('storageFormat', None),
                                        space_optimisation=resource.get('spaceOptimization', None)
                                    )

                            # Check that at least one variant of each specified resource exists.
                            for root_file_name, loaded in file_exists_for_root.iteritems():
                                if not loaded:
                                    raise KeyError("No file was found to satisfy the manifest filename: {}".format(root_file_name))

                        elif filename.startswith(SRC_DIR):
                            if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')):
                                base_filename = filename[len(SRC_DIR):]
                                if project.app_modern_multi_js and filename.endswith('.js') and filename.startswith('js/'):
                                    base_filename = base_filename[len('js/'):]
                                source = SourceFile.objects.create(project=project, file_name=base_filename)
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode('utf-8'))
                        elif filename.startswith(WORKER_SRC_DIR):
                            if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')):
                                base_filename = filename[len(WORKER_SRC_DIR):]
                                source = SourceFile.objects.create(project=project, file_name=base_filename, target='worker')
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode('utf-8'))
                    project.save()
                    send_td_event('cloudpebble_zip_import_succeeded', project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_td_event('cloudpebble_zip_import_failed', data={
            'data': {
                'reason': e.message
            }
        }, user=project.owner)
        raise
示例#7
0
def github_pull(user, project):
    g = get_github(user)
    repo_name = project.github_repo
    if repo_name is None:
        raise Exception("No GitHub repo defined.")
    repo = g.get_repo(repo_name)
    # If somehow we don't have a branch set, this will use the "master_branch"
    branch_name = project.github_branch or repo.master_branch
    try:
        branch = repo.get_branch(branch_name)
    except GithubException:
        raise Exception("Unable to get the branch.")

    if project.github_last_commit == branch.commit.sha:
        # Nothing to do.
        return False

    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = {x.path: x for x in tree.tree}

    root = find_project_root(paths)

    # First try finding the resource map so we don't fail out part-done later.
    # TODO: transaction support for file contents would be nice...

    resource_root = root + 'resources/'
    manifest_path = root + 'appinfo.json'
    if manifest_path in paths:
        manifest_sha = paths[manifest_path].sha
        manifest = json.loads(git_blob(repo, manifest_sha))
        media = manifest.get('resources', {}).get('media', [])
    else:
        raise Exception("appinfo.json not found")

    project_type = manifest.get('projectType', 'native')

    for resource in media:
        path = resource_root + resource['file']
        if project_type == 'pebblejs' and resource['name'] in {
                'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH',
                'IMAGE_TILE_SPLASH'
        }:
            continue
        if path not in paths:
            raise Exception("Resource %s not found in repo." % path)

    # Now we grab the zip.
    zip_url = repo.get_archive_link('zipball', branch_name)
    u = urllib2.urlopen(zip_url)

    # And wipe the project!
    project.source_files.all().delete()
    project.resources.all().delete()

    # This must happen before do_import_archive or we'll stamp on its results.
    project.github_last_commit = branch.commit.sha
    project.github_last_sync = now()
    project.save()

    import_result = do_import_archive(project.id, u.read())

    send_keen_event('cloudpebble',
                    'cloudpebble_github_pull',
                    user=user,
                    data={'data': {
                        'repo': project.github_repo
                    }})

    return import_result
示例#8
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                RESOURCE_MAP = 'resources/src/resource_map.json'
                MANIFEST = 'appinfo.json'
                SRC_DIR = 'src/'
                if len(contents) > 200:
                    raise Exception("Too many files in zip file.")
                file_list = [x.filename for x in contents]

                version, base_dir = find_project_root(file_list)
                dir_end = len(base_dir)
                project.sdk_version = version

                # Now iterate over the things we found
                with transaction.commit_on_success():
                    for entry in contents:
                        filename = entry.filename
                        if filename[:dir_end] != base_dir:
                            continue
                        filename = filename[dir_end:]
                        if filename == '':
                            continue
                        if not os.path.normpath(
                                '/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' %
                                filename).startswith(
                                    '/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'
                                ):
                            raise SuspiciousOperation(
                                "Invalid zip file contents.")
                        if entry.file_size > 5242880:  # 5 MB
                            raise Exception(
                                "Excessively large compressed file.")

                        if (filename == RESOURCE_MAP
                                and version == '1') or (filename == MANIFEST
                                                        and version == '2'):
                            # We have a resource map! We can now try importing things from it.
                            with z.open(entry) as f:
                                m = json.loads(f.read())

                            if version == '1':
                                project.version_def_name = m['versionDefName']
                                media_map = m['media']
                            elif version == '2':
                                project.app_uuid = m['uuid']
                                project.app_short_name = m['shortName']
                                project.app_long_name = m['longName']
                                project.app_company_name = m['companyName']
                                project.app_version_code = m['versionCode']
                                project.app_version_label = m['versionLabel']
                                project.app_is_watchface = m.get(
                                    'watchapp', {}).get('watchface', False)
                                project.app_capabilities = ','.join(
                                    m.get('capabilities', []))
                                project.app_keys = dict_to_pretty_json(
                                    m.get('appKeys', {}))
                                media_map = m['resources']['media']

                            resources = {}
                            for resource in media_map:
                                kind = resource['type']
                                def_name = resource[
                                    'defName'] if version == '1' else resource[
                                        'name']
                                file_name = resource['file']
                                regex = resource.get('characterRegex', None)
                                tracking = resource.get('trackingAdjust', None)
                                is_menu_icon = resource.get('menuIcon', False)
                                if file_name not in resources:
                                    resources[
                                        file_name] = ResourceFile.objects.create(
                                            project=project,
                                            file_name=os.path.basename(
                                                file_name),
                                            kind=kind,
                                            is_menu_icon=is_menu_icon)
                                    res_path = 'resources/src' if version == '1' else 'resources'
                                    resources[file_name].save_file(
                                        z.open(
                                            '%s%s/%s' %
                                            (base_dir, res_path, file_name)))
                                ResourceIdentifier.objects.create(
                                    resource_file=resources[file_name],
                                    resource_id=def_name,
                                    character_regex=regex,
                                    tracking=tracking)

                        elif filename.startswith(SRC_DIR):
                            if (not filename.startswith('.')) and (
                                    filename.endswith('.c')
                                    or filename.endswith('.h')
                                    or filename.endswith('.js')):
                                base_filename = os.path.basename(
                                    filename) if not filename.endswith(
                                        'js/pebble-js-app.js'
                                    ) else 'js/pebble-js-app.js'
                                source = SourceFile.objects.create(
                                    project=project, file_name=base_filename)
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode('utf-8'))
                    project.save()
                    send_keen_event('cloudpebble',
                                    'cloudpebble_zip_import_succeeded',
                                    project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_keen_event('cloudpebble',
                        'cloudpebble_zip_import_failed',
                        user=project.owner,
                        data={'data': {
                            'reason': e.message
                        }})
        raise
示例#9
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                RESOURCE_MAP = 'resources/src/resource_map.json'
                MANIFEST = 'appinfo.json'
                SRC_DIR = 'src/'
                if len(contents) > 200:
                    raise Exception("Too many files in zip file.")
                file_list = [x.filename for x in contents]

                version, base_dir = find_project_root(file_list)
                dir_end = len(base_dir)
                project.sdk_version = version

                # Now iterate over the things we found
                with transaction.commit_on_success():
                    for entry in contents:
                        filename = entry.filename
                        if filename[:dir_end] != base_dir:
                            continue
                        filename = filename[dir_end:]
                        if filename == '':
                            continue
                        if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'):
                            raise SuspiciousOperation("Invalid zip file contents.")
                        if entry.file_size > 5242880:  # 5 MB
                            raise Exception("Excessively large compressed file.")

                        if (filename == RESOURCE_MAP and version == '1') or (filename == MANIFEST and version == '2'):
                            # We have a resource map! We can now try importing things from it.
                            with z.open(entry) as f:
                                m = json.loads(f.read())

                            if version == '1':
                                project.version_def_name = m['versionDefName']
                                media_map = m['media']
                            elif version == '2':
                                project.app_uuid = m['uuid']
                                project.app_short_name = m['shortName']
                                project.app_long_name = m['longName']
                                project.app_company_name = m['companyName']
                                project.app_version_code = m['versionCode']
                                project.app_version_label = m['versionLabel']
                                project.app_is_watchface = m.get('watchapp', {}).get('watchface', False)
                                project.app_capabilities = ','.join(m.get('capabilities', []))
                                project.app_keys = dict_to_pretty_json(m.get('appKeys', {}))
                                media_map = m['resources']['media']

                            resources = {}
                            for resource in media_map:
                                kind = resource['type']
                                def_name = resource['defName'] if version == '1' else resource['name']
                                file_name = resource['file']
                                regex = resource.get('characterRegex', None)
                                tracking = resource.get('trackingAdjust', None)
                                is_menu_icon = resource.get('menuIcon', False)
                                if file_name not in resources:
                                    resources[file_name] = ResourceFile.objects.create(project=project, file_name=os.path.basename(file_name), kind=kind, is_menu_icon=is_menu_icon)
                                    res_path = 'resources/src' if version == '1' else 'resources'
                                    resources[file_name].save_file(z.open('%s%s/%s' % (base_dir, res_path, file_name)))
                                ResourceIdentifier.objects.create(
                                    resource_file=resources[file_name],
                                    resource_id=def_name,
                                    character_regex=regex,
                                    tracking=tracking
                                )

                        elif filename.startswith(SRC_DIR):
                            if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')):
                                base_filename = os.path.basename(filename) if not filename.endswith('js/pebble-js-app.js') else 'js/pebble-js-app.js'
                                source = SourceFile.objects.create(project=project, file_name=base_filename)
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode('utf-8'))
                    project.save()
                    send_keen_event('cloudpebble', 'cloudpebble_zip_import_succeeded', project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_keen_event('cloudpebble', 'cloudpebble_zip_import_failed', user=project.owner, data={
            'data': {
                'reason': e.message
            }
        })
        raise
示例#10
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = [x.path for x in tree.tree]

    next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree}

    try:
        root = find_project_root(paths)
    except:
        root = ''
    expected_paths = set()

    def update_expected_paths(new_path):
        # This adds the path *and* its parent directories to the list of expected paths.
        # The parent directories are already keys in next_tree, so if they aren't present in expected_paths
        # then, when iterating over next_tree to see which files have been deleted, we would have to treat
        # directories as special cases.
        split_path = new_path.split('/')
        expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1))

    src_root = root + 'src/'
    worker_src_root = root + 'worker_src/'
    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = src_root + source.file_name
        if project.project_type == 'native':
            if source.target == 'worker':
                repo_path = worker_src_root + source.file_name
            elif project.app_modern_multi_js and source.file_name.endswith('.js'):
                repo_path = src_root + 'js/' + source.file_name

        update_expected_paths(repo_path)
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob',
                                                       content=source.get_contents())
            print "New file: %s" % repo_path
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                print "Updated file: %s" % repo_path
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    # Now try handling resource files.
    resources = project.resources.all()
    resource_root = root + 'resources/'
    for res in resources:
        for variant in res.variants.all():
            repo_path = resource_root + variant.path
            update_expected_paths(repo_path)
            if repo_path in next_tree:
                content = variant.get_contents()
                if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha:
                    print "Changed resource: %s" % repo_path
                    has_changed = True
                    blob = repo.create_git_blob(base64.b64encode(content), 'base64')
                    print "Created blob %s" % blob.sha
                    next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
            else:
                print "New resource: %s" % repo_path
                has_changed = True
                blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64')
                print "Created blob %s" % blob.sha
                next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha)

    # Manage deleted files
    for path in next_tree.keys():
        if not (any(path.startswith(root) for root in (src_root, resource_root, worker_src_root))):
            continue
        if path not in expected_paths:
            del next_tree[path]
            print "Deleted file: %s" % path
            has_changed = True

    # Compare the resource dicts
    remote_manifest_path = root + 'appinfo.json'
    remote_wscript_path = root + 'wscript'

    remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None
    if remote_manifest_sha is not None:
        their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha))
        their_res_dict = their_manifest_dict['resources']
    else:
        their_manifest_dict = {}
        their_res_dict = {'media': []}

    our_manifest_dict = generate_manifest_dict(project, resources)
    our_res_dict = our_manifest_dict['resources']

    if our_res_dict != their_res_dict:
        print "Resources mismatch."
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = resource_root + path
            if repo_path in next_tree:
                print "Deleted resource: %s" % repo_path
                del next_tree[repo_path]

    # This one is separate because there's more than just the resource map changing.
    if their_manifest_dict != our_manifest_dict:
        has_changed = True
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob',
                                                                  content=generate_manifest(project, resources))

    if project.project_type == 'native' and remote_wscript_path not in next_tree:
        next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob',
                                                             content=generate_wscript_file(project, True))
        has_changed = True

    # Commit the new tree.
    if has_changed:
        print "Has changed; committing"
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                print "removing subtree node %s" % x

        print [x._InputGitTreeElement__mode for x in next_tree.values()]
示例#11
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                MANIFEST = 'appinfo.json'
                SRC_DIR = 'src/'
                if len(contents) > 400:
                    raise Exception("Too many files in zip file.")
                file_list = [x.filename for x in contents]

                base_dir = find_project_root(file_list)
                dir_end = len(base_dir)

                # Now iterate over the things we found
                with transaction.commit_on_success():
                    for entry in contents:
                        filename = entry.filename
                        if filename[:dir_end] != base_dir:
                            continue
                        filename = filename[dir_end:]
                        if filename == '':
                            continue
                        if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'):
                            raise SuspiciousOperation("Invalid zip file contents.")
                        if entry.file_size > 5242880:  # 5 MB
                            raise Exception("Excessively large compressed file.")

                        if filename == MANIFEST:
                            # We have a resource map! We can now try importing things from it.
                            with z.open(entry) as f:
                                m = json.loads(f.read())

                            project.app_uuid = m['uuid']
                            project.app_short_name = m['shortName']
                            project.app_long_name = m['longName']
                            project.app_company_name = m['companyName']
                            project.app_version_label = m['versionLabel']
                            project.sdk_version = m.get('sdkVersion', '2')
                            project.app_is_watchface = m.get('watchapp', {}).get('watchface', False)
                            project.app_capabilities = ','.join(m.get('capabilities', []))
                            if 'targetPlatforms' in m:
                                project.app_platforms = ','.join(m['targetPlatforms'])
                            project.app_keys = dict_to_pretty_json(m.get('appKeys', {}))
                            project.project_type = m.get('projectType', 'native')
                            if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]:
                                raise Exception("Illegal project type %s" % project.project_type)
                            media_map = m['resources']['media']

                            resources = {}
                            resource_files = {}
                            resource_suffix_map = {v: k for k, v in ResourceVariant.VARIANT_SUFFIXES.iteritems()}
                            del resource_suffix_map['']  # This mapping is confusing to keep around; everything is suffixed with nothing.
                            for resource in media_map:
                                kind = resource['type']
                                def_name = resource['name']
                                file_name = resource['file']
                                # Pebble.js and simply.js both have some internal resources that we don't import.
                                if project.project_type in {'pebblejs', 'simplyjs'}:
                                    if def_name in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
                                        continue
                                regex = resource.get('characterRegex', None)
                                tracking = resource.get('trackingAdjust', None)
                                is_menu_icon = resource.get('menuIcon', False)
                                compatibility = resource.get('compatibility', None)
                                if file_name not in resource_files:
                                    file_name_parts = os.path.splitext(file_name)
                                    for suffix in resource_suffix_map.iterkeys():
                                        if file_name_parts[0].endswith(suffix):
                                            root_file_name = file_name_parts[0][:len(file_name_parts[0]) - len(suffix)] + "." + file_name_parts[1]
                                            variant = resource_suffix_map[suffix]
                                            break
                                    else:
                                        root_file_name = file_name
                                        variant = ResourceVariant.VARIANT_DEFAULT
                                    if root_file_name not in resources:
                                        resources[root_file_name] = ResourceFile.objects.create(project=project, file_name=os.path.basename(root_file_name), kind=kind, is_menu_icon=is_menu_icon)
                                    res_path = 'resources'
                                    resource_files[file_name] = ResourceVariant.objects.create(resource_file=resources[root_file_name], variant=variant)
                                    resource_files[file_name].save_file(z.open('%s%s/%s' % (base_dir, res_path, file_name)))
                                ResourceIdentifier.objects.create(
                                    resource_file=resources[file_name],
                                    resource_id=def_name,
                                    character_regex=regex,
                                    tracking=tracking,
                                    compatibility=compatibility
                                )

                        elif filename.startswith(SRC_DIR):
                            if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')):
                                base_filename = filename[len(SRC_DIR):]
                                source = SourceFile.objects.create(project=project, file_name=base_filename)
                                with z.open(entry.filename) as f:
                                    source.save_file(f.read().decode('utf-8'))
                    project.save()
                    send_keen_event('cloudpebble', 'cloudpebble_zip_import_succeeded', project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_keen_event('cloudpebble', 'cloudpebble_zip_import_failed', user=project.owner, data={
            'data': {
                'reason': e.message
            }
        })
        raise