def run_test(self, contents, expected_dir=None, expected_name=None):
     """Run find_project_root_and_manifest() with a list of BaseProjectItems, and check for the expected output
     :param contents: A list of either strings or FakeProjectItems
     :param expected_dir: The root dir which the function should return
     :param expected_name: The manifest filename which the function should return
     """
     base_dir, manifest = find_project_root_and_manifest([(FakeProjectItem(item) if not isinstance(item, FakeProjectItem) else item) for item in contents])
     if expected_dir:
         self.assertEqual(base_dir, expected_dir)
     if expected_name:
         self.assertEqual(manifest.name, expected_name)
Example #2
0
 def run_test(self, contents, expected_dir=None, expected_name=None):
     """Run find_project_root_and_manifest() with a list of BaseProjectItems, and check for the expected output
     :param contents: A list of either strings or FakeProjectItems
     :param expected_dir: The root dir which the function should return
     :param expected_name: The manifest filename which the function should return
     """
     base_dir, manifest = find_project_root_and_manifest([
         (FakeProjectItem(item)
          if not isinstance(item, FakeProjectItem) else item)
         for item in contents
     ])
     if expected_dir:
         self.assertEqual(base_dir, expected_dir)
     if expected_name:
         self.assertEqual(manifest.name, expected_name)
Example #3
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                SRC_DIR = 'src/'
                WORKER_SRC_DIR = 'worker_src/'
                INCLUDE_SRC_DIR = 'include/'

                if len(contents) > 400:
                    raise InvalidProjectArchiveException("Too many files in zip file.")

                archive_items = [ArchiveProjectItem(z, x) for x in contents]
                base_dir, manifest_item = find_project_root_and_manifest(archive_items)
                dir_end = len(base_dir)

                def make_valid_filename(zip_entry):
                    entry_filename = zip_entry.filename
                    if entry_filename[:dir_end] != base_dir:
                        return False
                    entry_filename = entry_filename[dir_end:]
                    if entry_filename == '':
                        return False
                    if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % entry_filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'):
                        raise SuspiciousOperation("Invalid zip file contents.")
                    if zip_entry.file_size > 5242880:  # 5 MB
                        raise InvalidProjectArchiveException("Excessively large compressed file.")
                    return entry_filename

                manifest_kind = make_valid_filename(manifest_item.entry)
                manifest_dict = json.loads(manifest_item.read())

                # Now iterate over the things we found, filter out invalid files and look for the manifest.
                filtered_contents = []
                for entry in contents:
                    filename = make_valid_filename(entry)
                    if not filename or filename in MANIFEST_KINDS:
                        continue
                    else:
                        filtered_contents.append((filename, entry))

                with transaction.atomic():
                    # We have a resource map! We can now try importing things from it.
                    project_options, media_map, dependencies = load_manifest_dict(manifest_dict, manifest_kind)

                    for k, v in project_options.iteritems():
                        setattr(project, k, v)
                    project.full_clean()
                    project.set_dependencies(dependencies)

                    RES_PATH = project.resources_path

                    tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v}

                    desired_resources = {}
                    resources_files = {}
                    resource_variants = {}
                    file_exists_for_root = {}

                    # Go through the media map and look for resources
                    for resource in media_map:
                        file_name = resource['file']
                        identifier = resource['name']
                        # Pebble.js and simply.js both have some internal resources that we don't import.
                        if project.project_type in {'pebblejs', 'simplyjs'}:
                            if identifier in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
                                continue
                        tags, root_file_name = get_filename_variant(file_name, tag_map)
                        if (len(tags) != 0):
                            raise ValueError("Generic resource filenames cannot contain a tilde (~)")
                        if file_name not in desired_resources:
                            desired_resources[root_file_name] = []

                        desired_resources[root_file_name].append(resource)
                        file_exists_for_root[root_file_name] = False

                    # Go through the zip file process all resource and source files.
                    for filename, entry in filtered_contents:
                        if filename.startswith(RES_PATH):
                            base_filename = filename[len(RES_PATH) + 1:]
                            # Let's just try opening the file
                            try:
                                extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, base_filename))
                            except KeyError:
                                logger.debug("Failed to open %s", base_filename)
                                continue

                            # Now we know the file exists and is in the resource directory - is it the one we want?
                            tags, root_file_name = get_filename_variant(base_filename, tag_map)
                            tags_string = ",".join(str(int(t)) for t in tags)

                            if root_file_name in desired_resources:
                                medias = desired_resources[root_file_name]

                                # Because 'kind' and 'is_menu_icons' are properties of ResourceFile in the database,
                                # we just use the first one.
                                resource = medias[0]
                                # Make only one resource file per base resource.
                                if root_file_name not in resources_files:
                                    kind = resource['type']
                                    is_menu_icon = resource.get('menuIcon', False)
                                    resources_files[root_file_name] = ResourceFile.objects.create(
                                        project=project,
                                        file_name=os.path.basename(root_file_name),
                                        kind=kind,
                                        is_menu_icon=is_menu_icon)

                                # But add a resource variant for every file
                                actual_file_name = resource['file']
                                resource_variants[actual_file_name] = ResourceVariant.objects.create(resource_file=resources_files[root_file_name], tags=tags_string)
                                resource_variants[actual_file_name].save_file(extracted)
                                file_exists_for_root[root_file_name] = True
                        else:
                            try:
                                base_filename, target = SourceFile.get_details_for_path(project.project_type, filename)
                            except ValueError:
                                # We'll just ignore any out of place files.
                                continue
                            source = SourceFile.objects.create(project=project, file_name=base_filename, target=target)

                            with z.open(entry.filename) as f:
                                source.save_text(f.read().decode('utf-8'))

                    # Now add all the resource identifiers
                    for root_file_name in desired_resources:
                        for resource in desired_resources[root_file_name]:
                            target_platforms = json.dumps(resource['targetPlatforms']) if 'targetPlatforms' in resource else None
                            ResourceIdentifier.objects.create(
                                resource_file=resources_files[root_file_name],
                                resource_id=resource['name'],
                                target_platforms=target_platforms,
                                # Font options
                                character_regex=resource.get('characterRegex', None),
                                tracking=resource.get('trackingAdjust', None),
                                compatibility=resource.get('compatibility', None),
                                # Bitmap options
                                memory_format=resource.get('memoryFormat', None),
                                storage_format=resource.get('storageFormat', None),
                                space_optimisation=resource.get('spaceOptimization', None)
                            )

                    # Check that at least one variant of each specified resource exists.
                    for root_file_name, loaded in file_exists_for_root.iteritems():
                        if not loaded:
                            raise KeyError("No file was found to satisfy the manifest filename: {}".format(root_file_name))
                    project.save()
                    send_td_event('cloudpebble_zip_import_succeeded', project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_td_event('cloudpebble_zip_import_failed', data={
            'data': {
                'reason': str(e)
            }
        }, user=project.owner)
        raise
Example #4
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree}

    try:
        root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree])
    except InvalidProjectArchiveException:
        root = ''
        manifest_item = None

    expected_paths = set()

    def update_expected_paths(new_path):
        # This adds the path *and* its parent directories to the list of expected paths.
        # The parent directories are already keys in next_tree, so if they aren't present in expected_paths
        # then, when iterating over next_tree to see which files have been deleted, we would have to treat
        # directories as special cases.
        split_path = new_path.split('/')
        expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1))

    src_root = root + 'src/'
    worker_src_root = root + 'worker_src/'
    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = src_root + source.file_name
        if project.project_type == 'native':
            if source.target == 'worker':
                repo_path = worker_src_root + source.file_name
            elif project.app_modern_multi_js and source.file_name.endswith('.js'):
                repo_path = src_root + 'js/' + source.file_name

        update_expected_paths(repo_path)
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob',
                                                       content=source.get_contents())
            logger.debug("New file: %s", repo_path)
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                logger.debug("Updated file: %s", repo_path)
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    # Now try handling resource files.
    resources = project.resources.all()
    resource_root = root + 'resources/'
    for res in resources:
        for variant in res.variants.all():
            repo_path = resource_root + variant.path
            update_expected_paths(repo_path)
            if repo_path in next_tree:
                content = variant.get_contents()
                if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha:
                    logger.debug("Changed resource: %s", repo_path)
                    has_changed = True
                    blob = repo.create_git_blob(base64.b64encode(content), 'base64')
                    logger.debug("Created blob %s", blob.sha)
                    next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
            else:
                logger.debug("New resource: %s", repo_path)
                has_changed = True
                blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64')
                logger.debug("Created blob %s", blob.sha)
                next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha)

    # Manage deleted files
    for path in next_tree.keys():
        if not (any(path.startswith(root) for root in (src_root, resource_root, worker_src_root))):
            continue
        if path not in expected_paths:
            del next_tree[path]
            logger.debug("Deleted file: %s", path)
            has_changed = True

    # Compare the resource dicts
    remote_manifest_path = root + manifest_name_for_project(project)
    remote_wscript_path = root + 'wscript'

    if manifest_item:
        their_manifest_dict = json.loads(manifest_item.read())
        their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []}))
        # If the manifest needs a new path (e.g. it is now package.json), delete the old one
        if manifest_item.path != remote_manifest_path:
            del next_tree[manifest_item.path]
    else:
        their_manifest_dict = {}
        their_res_dict = {'media': []}

    our_manifest_dict = generate_manifest_dict(project, resources)
    our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []}))

    if our_res_dict != their_res_dict:
        logger.debug("Resources mismatch.")
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = resource_root + path
            if repo_path in next_tree:
                logger.debug("Deleted resource: %s", repo_path)
                del next_tree[repo_path]

    # This one is separate because there's more than just the resource map changing.
    if their_manifest_dict != our_manifest_dict:
        has_changed = True
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob',
                                                                  content=generate_manifest(project, resources))

    if project.project_type == 'native' and remote_wscript_path not in next_tree:
        next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob',
                                                             content=generate_wscript_file(project, True))
        has_changed = True

    # Commit the new tree.
    if has_changed:
        logger.debug("Has changed; committing")
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                logger.debug("removing subtree node %s", x)

        logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()])
        git_tree = repo.create_git_tree(next_tree.values())
        logger.debug("Created tree %s", git_tree.sha)
        git_commit = repo.create_git_commit(commit_message, git_tree, [commit])
        logger.debug("Created commit %s", git_commit.sha)
        git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch))
        git_ref.edit(git_commit.sha)
        logger.debug("Updated ref %s", git_ref.ref)
        project.github_last_commit = git_commit.sha
        project.github_last_sync = now()
        project.save()
        return True

    send_td_event('cloudpebble_github_push', data={
        'data': {
            'repo': project.github_repo
        }
    }, user=user)

    return False
Example #5
0
def github_pull(user, project):
    g = get_github(user)
    repo_name = project.github_repo
    if repo_name is None:
        raise Exception("No GitHub repo defined.")
    repo = g.get_repo(repo_name)
    # If somehow we don't have a branch set, this will use the "master_branch"
    branch_name = project.github_branch or repo.master_branch
    try:
        branch = repo.get_branch(branch_name)
    except GithubException:
        raise Exception("Unable to get the branch.")

    if project.github_last_commit == branch.commit.sha:
        # Nothing to do.
        return False

    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = {x.path: x for x in tree.tree}
    paths_notags = {get_root_path(x) for x in paths}

    # First try finding the resource map so we don't fail out part-done later.
    try:
        root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree])
    except ValueError as e:
        raise ValueError("In manifest file: %s" % str(e))
    resource_root = root + 'resources/'
    manifest = json.loads(manifest_item.read())

    media = manifest.get('resources', {}).get('media', [])
    project_type = manifest.get('projectType', 'native')

    for resource in media:
        path = resource_root + resource['file']
        if project_type == 'pebblejs' and resource['name'] in {
            'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
            continue
        if path not in paths_notags:
            raise Exception("Resource %s not found in repo." % path)

    # Now we grab the zip.
    zip_url = repo.get_archive_link('zipball', branch_name)
    u = urllib2.urlopen(zip_url)

    # And wipe the project!
    # TODO: transaction support for file contents would be nice...
    project.source_files.all().delete()
    project.resources.all().delete()

    # This must happen before do_import_archive or we'll stamp on its results.
    project.github_last_commit = branch.commit.sha
    project.github_last_sync = now()
    project.save()

    import_result = do_import_archive(project.id, u.read())

    send_td_event('cloudpebble_github_pull', data={
        'data': {
            'repo': project.github_repo
        }
    }, user=user)

    return import_result
Example #6
0
def github_push(user, commit_message, repo_name, project):
    g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET)
    repo = g.get_repo(repo_name)
    try:
        branch = repo.get_branch(project.github_branch or repo.master_branch)
    except GithubException:
        raise Exception("Unable to get branch.")
    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree}

    try:
        root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree])
    except InvalidProjectArchiveException:
        root = ''
        manifest_item = None

    expected_paths = set()

    def update_expected_paths(new_path):
        # This adds the path *and* its parent directories to the list of expected paths.
        # The parent directories are already keys in next_tree, so if they aren't present in expected_paths
        # then, when iterating over next_tree to see which files have been deleted, we would have to treat
        # directories as special cases.
        split_path = new_path.split('/')
        expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1))

    project_sources = project.source_files.all()
    has_changed = False
    for source in project_sources:
        repo_path = os.path.join(root, source.project_path)

        update_expected_paths(repo_path)
        if repo_path not in next_tree:
            has_changed = True
            next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob',
                                                       content=source.get_contents())
            logger.debug("New file: %s", repo_path)
        else:
            sha = next_tree[repo_path]._InputGitTreeElement__sha
            our_content = source.get_contents()
            expected_sha = git_sha(our_content)
            if expected_sha != sha:
                logger.debug("Updated file: %s", repo_path)
                next_tree[repo_path]._InputGitTreeElement__sha = NotSet
                next_tree[repo_path]._InputGitTreeElement__content = our_content
                has_changed = True

    # Now try handling resource files.
    resources = project.resources.all()
    resource_root = project.resources_path
    for res in resources:
        for variant in res.variants.all():
            repo_path = os.path.join(resource_root, variant.path)
            update_expected_paths(repo_path)
            if repo_path in next_tree:
                content = variant.get_contents()
                if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha:
                    logger.debug("Changed resource: %s", repo_path)
                    has_changed = True
                    blob = repo.create_git_blob(base64.b64encode(content), 'base64')
                    logger.debug("Created blob %s", blob.sha)
                    next_tree[repo_path]._InputGitTreeElement__sha = blob.sha
            else:
                logger.debug("New resource: %s", repo_path)
                has_changed = True
                blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64')
                logger.debug("Created blob %s", blob.sha)
                next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha)

    # Manage deleted files
    src_root = os.path.join(root, 'src')
    worker_src_root = os.path.join(root, 'worker_src')
    for path in next_tree.keys():
        if not (any(path.startswith(root+'/') for root in (src_root, resource_root, worker_src_root))):
            continue
        if path not in expected_paths:
            del next_tree[path]
            logger.debug("Deleted file: %s", path)
            has_changed = True

    # Compare the resource dicts
    remote_manifest_path = root + manifest_name_for_project(project)
    remote_wscript_path = root + 'wscript'

    if manifest_item:
        their_manifest_dict = json.loads(manifest_item.read())
        their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []}))
        # If the manifest needs a new path (e.g. it is now package.json), delete the old one
        if manifest_item.path != remote_manifest_path:
            del next_tree[manifest_item.path]
    else:
        their_manifest_dict = {}
        their_res_dict = {'media': []}

    our_manifest_dict = generate_manifest_dict(project, resources)
    our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []}))

    if our_res_dict != their_res_dict:
        logger.debug("Resources mismatch.")
        has_changed = True
        # Try removing things that we've deleted, if any
        to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media'])
        for path in to_remove:
            repo_path = resource_root + path
            if repo_path in next_tree:
                logger.debug("Deleted resource: %s", repo_path)
                del next_tree[repo_path]

    # This one is separate because there's more than just the resource map changing.
    if their_manifest_dict != our_manifest_dict:
        has_changed = True
        if remote_manifest_path in next_tree:
            next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet
            next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources)
        else:
            next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob',
                                                                  content=generate_manifest(project, resources))

    if project.project_type == 'native' and remote_wscript_path not in next_tree:
        next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob',
                                                             content=generate_wscript_file(project, True))
        has_changed = True

    # Commit the new tree.
    if has_changed:
        logger.debug("Has changed; committing")
        # GitHub seems to choke if we pass the raw directory nodes off to it,
        # so we delete those.
        for x in next_tree.keys():
            if next_tree[x]._InputGitTreeElement__mode == '040000':
                del next_tree[x]
                logger.debug("removing subtree node %s", x)

        logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()])
        git_tree = repo.create_git_tree(next_tree.values())
        logger.debug("Created tree %s", git_tree.sha)
        git_commit = repo.create_git_commit(commit_message, git_tree, [commit])
        logger.debug("Created commit %s", git_commit.sha)
        git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch))
        git_ref.edit(git_commit.sha)
        logger.debug("Updated ref %s", git_ref.ref)
        project.github_last_commit = git_commit.sha
        project.github_last_sync = now()
        project.save()
        return True

    send_td_event('cloudpebble_github_push', data={
        'data': {
            'repo': project.github_repo
        }
    }, user=user)

    return False
Example #7
0
def github_pull(user, project):
    g = get_github(user)
    repo_name = project.github_repo
    if repo_name is None:
        raise Exception("No GitHub repo defined.")
    repo = g.get_repo(repo_name)
    # If somehow we don't have a branch set, this will use the "master_branch"
    branch_name = project.github_branch or repo.master_branch
    try:
        branch = repo.get_branch(branch_name)
    except GithubException:
        raise Exception("Unable to get the branch.")

    if project.github_last_commit == branch.commit.sha:
        # Nothing to do.
        return False

    commit = repo.get_git_commit(branch.commit.sha)
    tree = repo.get_git_tree(commit.tree.sha, recursive=True)

    paths = {x.path: x for x in tree.tree}
    paths_notags = {get_root_path(x) for x in paths}

    # First try finding the resource map so we don't fail out part-done later.
    try:
        root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree])
    except ValueError as e:
        raise ValueError("In manifest file: %s" % str(e))
    resource_root = root + project.resources_path + '/'
    manifest = json.loads(manifest_item.read())

    media = manifest.get('resources', {}).get('media', [])
    project_type = manifest.get('projectType', 'native')

    for resource in media:
        path = resource_root + resource['file']
        if project_type == 'pebblejs' and resource['name'] in {
            'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
            continue
        if path not in paths_notags:
            raise Exception("Resource %s not found in repo." % path)

    # Now we grab the zip.
    zip_url = repo.get_archive_link('zipball', branch_name)
    u = urllib2.urlopen(zip_url)

    # And wipe the project!
    # TODO: transaction support for file contents would be nice...
    project.source_files.all().delete()
    project.resources.all().delete()

    # This must happen before do_import_archive or we'll stamp on its results.
    project.github_last_commit = branch.commit.sha
    project.github_last_sync = now()
    project.save()

    import_result = do_import_archive(project.id, u.read())

    send_td_event('cloudpebble_github_pull', data={
        'data': {
            'repo': project.github_repo
        }
    }, user=user)

    return import_result
Example #8
0
def do_import_archive(project_id, archive, delete_project=False):
    project = Project.objects.get(pk=project_id)
    try:
        with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file:
            archive_file.write(archive)
            archive_file.flush()
            with zipfile.ZipFile(str(archive_file.name), 'r') as z:
                contents = z.infolist()
                # Requirements:
                # - Find the folder containing the project. This may or may not be at the root level.
                # - Read in the source files, resources and resource map.
                # Observations:
                # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file.
                # - Legal projects must have a resource map at resources/src/resource_map.json
                # Strategy:
                # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'.
                #   - This is taken to be the project directory.
                # - Import every file in 'src/' with the extension .c or .h as a source file
                # - Parse resource_map.json and import files it references
                SRC_DIR = 'src/'
                WORKER_SRC_DIR = 'worker_src/'
                INCLUDE_SRC_DIR = 'include/'

                if len(contents) > 400:
                    raise InvalidProjectArchiveException("Too many files in zip file.")

                archive_items = [ArchiveProjectItem(z, x) for x in contents]
                base_dir, manifest_item = find_project_root_and_manifest(archive_items)
                dir_end = len(base_dir)

                def make_valid_filename(zip_entry):
                    entry_filename = zip_entry.filename
                    if entry_filename[:dir_end] != base_dir:
                        return False
                    entry_filename = entry_filename[dir_end:]
                    if entry_filename == '':
                        return False
                    if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % entry_filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'):
                        raise SuspiciousOperation("Invalid zip file contents.")
                    if zip_entry.file_size > 5242880:  # 5 MB
                        raise InvalidProjectArchiveException("Excessively large compressed file.")
                    return entry_filename

                manifest_kind = make_valid_filename(manifest_item.entry)
                manifest_dict = json.loads(manifest_item.read())

                # Now iterate over the things we found, filter out invalid files and look for the manifest.
                filtered_contents = []
                for entry in contents:
                    filename = make_valid_filename(entry)
                    if not filename or filename in MANIFEST_KINDS:
                        continue
                    else:
                        filtered_contents.append((filename, entry))

                with transaction.atomic():
                    # We have a resource map! We can now try importing things from it.
                    project_options, media_map, dependencies = load_manifest_dict(manifest_dict, manifest_kind)

                    for k, v in project_options.iteritems():
                        setattr(project, k, v)
                    project.full_clean()
                    project.set_dependencies(dependencies)

                    RES_PATH = project.resources_path

                    tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v}

                    desired_resources = {}
                    resources_files = {}
                    resource_variants = {}
                    file_exists_for_root = {}

                    # Go through the media map and look for resources
                    for resource in media_map:
                        file_name = resource['file']
                        identifier = resource['name']
                        # Pebble.js and simply.js both have some internal resources that we don't import.
                        if project.project_type in {'pebblejs', 'simplyjs'}:
                            if identifier in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}:
                                continue
                        tags, root_file_name = get_filename_variant(file_name, tag_map)
                        if (len(tags) != 0):
                            raise ValueError("Generic resource filenames cannot contain a tilde (~)")
                        if file_name not in desired_resources:
                            desired_resources[root_file_name] = []

                        desired_resources[root_file_name].append(resource)
                        file_exists_for_root[root_file_name] = False

                    # Go through the zip file process all resource and source files.
                    for filename, entry in filtered_contents:
                        if filename.startswith(RES_PATH):
                            base_filename = filename[len(RES_PATH) + 1:]
                            # Let's just try opening the file
                            try:
                                extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, base_filename))
                            except KeyError:
                                logger.debug("Failed to open %s", base_filename)
                                continue

                            # Now we know the file exists and is in the resource directory - is it the one we want?
                            tags, root_file_name = get_filename_variant(base_filename, tag_map)
                            tags_string = ",".join(str(int(t)) for t in tags)

                            if root_file_name in desired_resources:
                                medias = desired_resources[root_file_name]

                                # Because 'kind' and 'is_menu_icons' are properties of ResourceFile in the database,
                                # we just use the first one.
                                resource = medias[0]
                                # Make only one resource file per base resource.
                                if root_file_name not in resources_files:
                                    kind = resource['type']
                                    is_menu_icon = resource.get('menuIcon', False)
                                    resources_files[root_file_name] = ResourceFile.objects.create(
                                        project=project,
                                        file_name=os.path.basename(root_file_name),
                                        kind=kind,
                                        is_menu_icon=is_menu_icon)

                                # But add a resource variant for every file
                                actual_file_name = resource['file']
                                resource_variants[actual_file_name] = ResourceVariant.objects.create(resource_file=resources_files[root_file_name], tags=tags_string)
                                resource_variants[actual_file_name].save_file(extracted)
                                file_exists_for_root[root_file_name] = True
                        else:
                            try:
                                base_filename, target = SourceFile.get_details_for_path(project.project_type, filename)
                            except ValueError:
                                # We'll just ignore any out of place files.
                                continue
                            source = SourceFile.objects.create(project=project, file_name=base_filename, target=target)

                            with z.open(entry.filename) as f:
                                source.save_text(f.read().decode('utf-8'))

                    # Now add all the resource identifiers
                    for root_file_name in desired_resources:
                        for resource in desired_resources[root_file_name]:
                            target_platforms = json.dumps(resource['targetPlatforms']) if 'targetPlatforms' in resource else None
                            ResourceIdentifier.objects.create(
                                resource_file=resources_files[root_file_name],
                                resource_id=resource['name'],
                                target_platforms=target_platforms,
                                # Font options
                                character_regex=resource.get('characterRegex', None),
                                tracking=resource.get('trackingAdjust', None),
                                compatibility=resource.get('compatibility', None),
                                # Bitmap options
                                memory_format=resource.get('memoryFormat', None),
                                storage_format=resource.get('storageFormat', None),
                                space_optimisation=resource.get('spaceOptimization', None)
                            )

                    # Check that at least one variant of each specified resource exists.
                    for root_file_name, loaded in file_exists_for_root.iteritems():
                        if not loaded:
                            raise KeyError("No file was found to satisfy the manifest filename: {}".format(root_file_name))
                    project.save()
                    send_td_event('cloudpebble_zip_import_succeeded', project=project)

        # At this point we're supposed to have successfully created the project.
        return True
    except Exception as e:
        if delete_project:
            try:
                Project.objects.get(pk=project_id).delete()
            except:
                pass
        send_td_event('cloudpebble_zip_import_failed', data={
            'data': {
                'reason': str(e)
            }
        }, user=project.owner)
        raise