def make_appinfo(options=None): """ Make an appinfo.json file :param options: Custom options to overwrite the defaults :return: A pretty-printed JSON string """ appinfo = { "appKeys": {}, "capabilities": [""], "companyName": "test", "enableMultiJS": True, "longName": "test", "projectType": "native", "resources": { "media": [] }, "sdkVersion": "3", "shortName": "test", "uuid": "123e4567-e89b-42d3-a456-426655440000", "versionLabel": "1.0", "watchapp": { "watchface": False } } if options: appinfo.update(options) return dict_to_pretty_json(appinfo)
def make_package(package_options=None, pebble_options=None, no_pebble=False): """ Make a package.json file :param package_options: Custom options to override the root-level keys :param pebble_options: Custom options to override keys in the 'pebble' object :param no_pebble: True if there should be no pebble object at all :return: pretty-printed JSON string """ package = { "author": "test", "dependencies": {}, "keywords": [], "name": "test", "pebble": { "messageKeys": [], "displayName": "test", "enableMultiJS": True, "projectType": "native", "resources": { "media": [] }, "sdkVersion": "3", "uuid": '123e4567-e89b-42d3-a456-426655440000', "watchapp": { "watchface": False } }, "version": "1.0.0" } if package_options: package.update(package_options) if pebble_options: package['pebble'].update(pebble_options) if no_pebble: del package['pebble'] return dict_to_pretty_json(package)
def make_appinfo(options=None): """ Make an appinfo.json file :param options: Custom options to overwrite the defaults :return: A pretty-printed JSON string """ appinfo = { "appKeys": {}, "capabilities": [ "" ], "companyName": "test", "enableMultiJS": True, "longName": "test", "projectType": "native", "resources": { "media": [] }, "sdkVersion": "3", "shortName": "test", "uuid": "123e4567-e89b-42d3-a456-426655440000", "versionLabel": "1.0", "watchapp": { "watchface": False } } if options: appinfo.update(options) return dict_to_pretty_json(appinfo)
def import_gist(user_id, gist_id): user = User.objects.get(pk=user_id) g = github.Github() try: gist = g.get_gist(gist_id) except github.UnknownObjectException: send_keen_event('cloudpebble', 'cloudpebble_gist_not_found', user=user, data={'data': {'gist_id': gist_id}}) raise Exception("Couldn't find gist to import.") files = gist.files default_name = gist.description or 'Sample project' is_native = True if 'appinfo.json' in files: settings = json.loads(files['appinfo.json'].content) if len(files) == 2 and 'simply.js' in files: is_native = False else: settings = {} if len(files) == 1 and 'simply.js' in files: is_native = False project_settings = { 'name': settings.get('longName', default_name), 'owner': user, 'sdk_version': 2, 'app_uuid': generate_half_uuid(), 'app_short_name': settings.get('shortName', default_name), 'app_long_name': settings.get('longName', default_name), 'app_company_name': settings.get('companyName', user.username), 'app_version_code': 1, 'app_version_label': settings.get('versionLabel', '1.0'), 'app_is_watchface': settings.get('watchapp', {}).get('watchface', False), 'app_capabilities': ','.join(settings.get('capabilities', [])), 'app_keys': dict_to_pretty_json(settings.get('appKeys', {})), 'project_type': 'native' if is_native else 'simplyjs' } with transaction.commit_on_success(): project = Project.objects.create(**project_settings) if is_native: for filename in gist.files: if filename.endswith('.c') or filename.endswith('.h') or filename == 'pebble-js-app.js': # Because gists can't have subdirectories. if filename == 'pebble-js-app.js': cp_filename = 'js/pebble-js-app.js' else: cp_filename = filename source_file = SourceFile.objects.create(project=project, file_name=cp_filename) source_file.save_file(gist.files[filename].content) else: source_file = SourceFile.objects.create(project=project, file_name='app.js') source_file.save_file(gist.files['simply.js'].content) send_keen_event('cloudpebble', 'cloudpebble_gist_import', project=project, data={'data': {'gist_id': gist_id}}) return project.id
def make_package(package_options=None, pebble_options=None, no_pebble=False): """ Make a package.json file :param package_options: Custom options to override the root-level keys :param pebble_options: Custom options to override keys in the 'pebble' object :param no_pebble: True if there should be no pebble object at all :return: pretty-printed JSON string """ package = { "author": "test", "dependencies": {}, "keywords": [], "name": "test", "pebble": { "messageKeys": ([] if settings.NPM_MANIFEST_SUPPORT else {}), "capabilities": [ "" ], "displayName": "test", "enableMultiJS": True, "projectType": "native", "resources": { "media": [] }, "sdkVersion": "3", "uuid": '666x6666-x66x-66x6-x666-666666666666', "watchapp": { "watchface": False } }, "version": "1.0.0" } if package_options: package.update(package_options) if pebble_options: package['pebble'].update(pebble_options) if no_pebble: del package['pebble'] return dict_to_pretty_json(package)
def do_import_archive(project_id, archive, delete_project=False): project = Project.objects.get(pk=project_id) try: with tempfile.NamedTemporaryFile(suffix=".zip") as archive_file: archive_file.write(archive) archive_file.flush() with zipfile.ZipFile(str(archive_file.name), "r") as z: contents = z.infolist() # Requirements: # - Find the folder containing the project. This may or may not be at the root level. # - Read in the source files, resources and resource map. # Observations: # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file. # - Legal projects must have a resource map at resources/src/resource_map.json # Strategy: # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'. # - This is taken to be the project directory. # - Import every file in 'src/' with the extension .c or .h as a source file # - Parse resource_map.json and import files it references MANIFEST = "appinfo.json" SRC_DIR = "src/" RES_PATH = "resources" if len(contents) > 400: raise Exception("Too many files in zip file.") file_list = [x.filename for x in contents] base_dir = find_project_root(file_list) dir_end = len(base_dir) def make_valid_filename(zip_entry): entry_filename = zip_entry.filename if entry_filename[:dir_end] != base_dir: return False entry_filename = entry_filename[dir_end:] if entry_filename == "": return False if not os.path.normpath("/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s" % entry_filename).startswith( "/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/" ): raise SuspiciousOperation("Invalid zip file contents.") if zip_entry.file_size > 5242880: # 5 MB raise Exception("Excessively large compressed file.") return entry_filename # Now iterate over the things we found with transaction.atomic(): for entry in contents: filename = make_valid_filename(entry) if not filename: continue if filename == MANIFEST: # We have a resource map! We can now try importing things from it. with z.open(entry) as f: m = json.loads(f.read()) project.app_uuid = m["uuid"] project.app_short_name = m["shortName"] project.app_long_name = m["longName"] project.app_company_name = m["companyName"] project.app_version_label = m["versionLabel"] project.sdk_version = m.get("sdkVersion", "2") project.app_is_watchface = m.get("watchapp", {}).get("watchface", False) project.app_is_hidden = m.get("watchapp", {}).get("hiddenApp", False) project.app_is_shown_on_communication = m.get("watchapp", {}).get( "onlyShownOnCommunication", False ) project.app_capabilities = ",".join(m.get("capabilities", [])) if "targetPlatforms" in m: project.app_platforms = ",".join(m["targetPlatforms"]) project.app_keys = dict_to_pretty_json(m.get("appKeys", {})) project.project_type = m.get("projectType", "native") if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]: raise Exception("Illegal project type %s" % project.project_type) media_map = m["resources"]["media"] tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v} desired_resources = {} resources_files = {} resource_identifiers = {} resource_variants = {} file_exists_for_root = {} # Go through the media map and look for resources for resource in media_map: file_name = resource["file"] identifier = resource["name"] # Pebble.js and simply.js both have some internal resources that we don't import. if project.project_type in {"pebblejs", "simplyjs"}: if identifier in { "MONO_FONT_14", "IMAGE_MENU_ICON", "IMAGE_LOGO_SPLASH", "IMAGE_TILE_SPLASH", }: continue tags, root_file_name = get_filename_variant(file_name, tag_map) if len(tags) != 0: raise ValueError("Generic resource filenames cannot contain a tilde (~)") if file_name not in desired_resources: desired_resources[root_file_name] = [] print "Desired resource: %s" % root_file_name desired_resources[root_file_name].append(resource) file_exists_for_root[root_file_name] = False for zipitem in contents: # Let's just try opening the file filename = make_valid_filename(zipitem) if filename is False or not filename.startswith(RES_PATH): continue filename = filename[len(RES_PATH) + 1 :] try: extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, filename)) except KeyError: print "Failed to open %s" % filename continue # Now we know the file exists and is in the resource directory - is it one we want? tags, root_file_name = get_filename_variant(filename, tag_map) tags_string = ",".join(str(int(t)) for t in tags) print "Importing file %s with root %s " % (zipitem.filename, root_file_name) if root_file_name in desired_resources: """ FIXME: targetPlatforms is currently stored in resourceFile, but it *should* be in ResourceIdentifier. Until that is fixed, we cannot support multiple identifiers linked to a single file compiling for different platforms. When the bug is fixed, this will need to be changed. Until then, we just pick the first file on the list of desired_resources.""" medias = desired_resources[root_file_name] is_font = False print "Looking for variants of %s" % root_file_name # An exception to the above warning is made for fonts, where multiple identifiers is # already implemented in the UI. if len(medias) > 1: if set(r["type"] for r in medias) != {"font"}: raise NotImplementedError( "You cannot currently import a project with multiple identifiers for a single non-font file" ) else: is_font = True resource = medias[-1] for resource in medias: # Make only one resource file per base resource. if root_file_name not in resources_files: kind = resource["type"] is_menu_icon = resource.get("menuIcon", False) target_platforms = resource.get("targetPlatforms", None) target_platforms = ( json.dumps(target_platforms) if target_platforms else None ) resources_files[root_file_name] = ResourceFile.objects.create( project=project, file_name=os.path.basename(root_file_name), kind=kind, is_menu_icon=is_menu_icon, target_platforms=target_platforms, ) identifier = resource["name"] # Add all the identifiers which don't clash with existing identifiers if not identifier in resource_identifiers: tracking = resource.get("trackingAdjust", None) regex = resource.get("characterRegex", None) compatibility = resource.get("compatibility", None) ResourceIdentifier.objects.create( resource_file=resources_files[root_file_name], resource_id=identifier, character_regex=regex, tracking=tracking, compatibility=compatibility, ) resource_identifiers[identifier] = resources_files[root_file_name] # At the moment, only add > 1 identifier for fonts. if not is_font: break print "Adding variant %s with tags [%s]" % (root_file_name, tags_string) actual_file_name = resource["file"] resource_variants[actual_file_name] = ResourceVariant.objects.create( resource_file=resources_files[root_file_name], tags=tags_string ) resource_variants[actual_file_name].save_file(extracted) file_exists_for_root[root_file_name] = True # Check that at least one variant of each specified resource exists. for root_file_name, loaded in file_exists_for_root.iteritems(): if not loaded: raise KeyError( "No file was found to satisfy the manifest filename: {}".format(root_file_name) ) elif filename.startswith(SRC_DIR): if (not filename.startswith(".")) and ( filename.endswith(".c") or filename.endswith(".h") or filename.endswith(".js") ): base_filename = filename[len(SRC_DIR) :] source = SourceFile.objects.create(project=project, file_name=base_filename) with z.open(entry.filename) as f: source.save_file(f.read().decode("utf-8")) project.save() send_keen_event("cloudpebble", "cloudpebble_zip_import_succeeded", project=project) # At this point we're supposed to have successfully created the project. return True except Exception as e: if delete_project: try: Project.objects.get(pk=project_id).delete() except: pass send_keen_event( "cloudpebble", "cloudpebble_zip_import_failed", user=project.owner, data={"data": {"reason": e.message}} ) raise
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: remote_version, root = find_project_root(paths) except: remote_version, root = project.sdk_version, '' src_root = root + 'src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True expected_source_files = [src_root + x.file_name for x in project_sources] for path in next_tree.keys(): if not path.startswith(src_root): continue if path not in expected_source_files: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Now try handling resource files. resources = project.resources.all() old_resource_root = root + ("resources/src/" if remote_version == '1' else 'resources/') new_resource_root = root + ("resources/src/" if project.sdk_version == '1' else 'resources/') # Migrate all the resources so we can subsequently ignore the issue. if old_resource_root != new_resource_root: print "moving resources" new_next_tree = next_tree.copy() for path in next_tree: if path.startswith(old_resource_root) and not path.endswith('resource_map.json'): new_path = new_resource_root + path[len(old_resource_root):] print "moving %s to %s" % (path, new_path) next_tree[path]._InputGitTreeElement__path = new_path new_next_tree[new_path] = next_tree[path] del new_next_tree[path] next_tree = new_next_tree for res in resources: repo_path = new_resource_root + res.path if repo_path in next_tree: content = res.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path blob = repo.create_git_blob(base64.b64encode(res.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Both of these are used regardless of version remote_map_path = root + 'resources/src/resource_map.json' remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' if remote_version == '1': remote_map_sha = next_tree[remote_map_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_map_sha is not None: their_res_dict = json.loads(git_blob(repo, remote_map_sha)) else: their_res_dict = {'friendlyVersion': 'VERSION', 'versionDefName': '', 'media': []} their_manifest_dict = {} else: remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} if project.sdk_version == '1': our_res_dict = generate_resource_dict(project, resources) else: our_manifest_dict = generate_v2_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = new_resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # Update the stored resource map, if applicable. if project.sdk_version == '1': if remote_map_path in next_tree: next_tree[remote_map_path]._InputGitTreeElement__sha = NotSet next_tree[remote_map_path]._InputGitTreeElement__content = dict_to_pretty_json(our_res_dict) else: next_tree[remote_map_path] = InputGitTreeElement(path=remote_map_path, mode='100644', type='blob', content=dict_to_pretty_json(our_res_dict)) # Delete the v2 manifest, if one exists if remote_manifest_path in next_tree: del next_tree[remote_manifest_path] # This one is separate because there's more than just the resource map changing. if project.sdk_version == '2' and their_manifest_dict != our_manifest_dict: if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_v2_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_v2_manifest(project, resources)) # Delete the v1 manifest, if one exists if remote_map_path in next_tree: del next_tree[remote_map_path] if project.sdk_version == '2': if remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True else: del next_tree[remote_wscript_path] # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print [x._InputGitTreeElement__mode for x in next_tree.values()]
def do_import_archive(project_id, archive, delete_project=False): project = Project.objects.get(pk=project_id) try: with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file: archive_file.write(archive) archive_file.flush() with zipfile.ZipFile(str(archive_file.name), 'r') as z: contents = z.infolist() # Requirements: # - Find the folder containing the project. This may or may not be at the root level. # - Read in the source files, resources and resource map. # Observations: # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file. # - Legal projects must have a resource map at resources/src/resource_map.json # Strategy: # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'. # - This is taken to be the project directory. # - Import every file in 'src/' with the extension .c or .h as a source file # - Parse resource_map.json and import files it references MANIFEST = 'appinfo.json' SRC_DIR = 'src/' WORKER_SRC_DIR = 'worker_src/' RES_PATH = 'resources' if len(contents) > 400: raise Exception("Too many files in zip file.") file_list = [x.filename for x in contents] base_dir = find_project_root(file_list) dir_end = len(base_dir) def make_valid_filename(zip_entry): entry_filename = zip_entry.filename if entry_filename[:dir_end] != base_dir: return False entry_filename = entry_filename[dir_end:] if entry_filename == '': return False if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % entry_filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'): raise SuspiciousOperation("Invalid zip file contents.") if zip_entry.file_size > 5242880: # 5 MB raise Exception("Excessively large compressed file.") return entry_filename # Now iterate over the things we found with transaction.atomic(): for entry in contents: filename = make_valid_filename(entry) if not filename: continue if filename == MANIFEST: # We have a resource map! We can now try importing things from it. with z.open(entry) as f: m = json.loads(f.read()) project.app_uuid = m['uuid'] project.app_short_name = m['shortName'] project.app_long_name = m['longName'] project.app_company_name = m['companyName'] project.app_version_label = m['versionLabel'] project.sdk_version = m.get('sdkVersion', '2') project.app_is_watchface = m.get('watchapp', {}).get('watchface', False) project.app_is_hidden = m.get('watchapp', {}).get('hiddenApp', False) project.app_is_shown_on_communication = m.get('watchapp', {}).get('onlyShownOnCommunication', False) project.app_capabilities = ','.join(m.get('capabilities', [])) project.app_modern_multi_js = m.get('enableMultiJS', False) if 'targetPlatforms' in m: project.app_platforms = ','.join(m['targetPlatforms']) project.app_keys = dict_to_pretty_json(m.get('appKeys', {})) project.project_type = m.get('projectType', 'native') if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]: raise Exception("Illegal project type %s" % project.project_type) media_map = m['resources']['media'] tag_map = {v: k for k, v in ResourceVariant.VARIANT_STRINGS.iteritems() if v} desired_resources = {} resources_files = {} resource_variants = {} file_exists_for_root = {} # Go through the media map and look for resources for resource in media_map: file_name = resource['file'] identifier = resource['name'] # Pebble.js and simply.js both have some internal resources that we don't import. if project.project_type in {'pebblejs', 'simplyjs'}: if identifier in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}: continue tags, root_file_name = get_filename_variant(file_name, tag_map) if (len(tags) != 0): raise ValueError("Generic resource filenames cannot contain a tilde (~)") if file_name not in desired_resources: desired_resources[root_file_name] = [] print "Desired resource: %s" % root_file_name desired_resources[root_file_name].append(resource) file_exists_for_root[root_file_name] = False for zipitem in contents: # Let's just try opening the file filename = make_valid_filename(zipitem) if filename is False or not filename.startswith(RES_PATH): continue filename = filename[len(RES_PATH)+1:] try: extracted = z.open("%s%s/%s"%(base_dir, RES_PATH, filename)) except KeyError: print "Failed to open %s" % filename continue # Now we know the file exists and is in the resource directory - is it one we want? tags, root_file_name = get_filename_variant(filename, tag_map) tags_string = ",".join(str(int(t)) for t in tags) print "Importing file %s with root %s " % (zipitem.filename, root_file_name) if root_file_name in desired_resources: medias = desired_resources[root_file_name] print "Looking for variants of %s" % root_file_name # Because 'kind' and 'is_menu_icons' are properties of ResourceFile in the database, # we just use the first one. resource = medias[0] # Make only one resource file per base resource. if root_file_name not in resources_files: kind = resource['type'] is_menu_icon = resource.get('menuIcon', False) resources_files[root_file_name] = ResourceFile.objects.create( project=project, file_name=os.path.basename(root_file_name), kind=kind, is_menu_icon=is_menu_icon) # But add a resource variant for every file print "Adding variant %s with tags [%s]" % (root_file_name, tags_string) actual_file_name = resource['file'] resource_variants[actual_file_name] = ResourceVariant.objects.create(resource_file=resources_files[root_file_name], tags=tags_string) resource_variants[actual_file_name].save_file(extracted) file_exists_for_root[root_file_name] = True # Now add all the resource identifiers for root_file_name in desired_resources: for resource in desired_resources[root_file_name]: target_platforms = json.dumps(resource['targetPlatforms']) if 'targetPlatforms' in resource else None ResourceIdentifier.objects.create( resource_file=resources_files[root_file_name], resource_id=resource['name'], target_platforms=target_platforms, # Font options character_regex=resource.get('characterRegex', None), tracking=resource.get('trackingAdjust', None), compatibility=resource.get('compatibility', None), # Bitmap options memory_format=resource.get('memoryFormat', None), storage_format=resource.get('storageFormat', None), space_optimisation=resource.get('spaceOptimization', None) ) # Check that at least one variant of each specified resource exists. for root_file_name, loaded in file_exists_for_root.iteritems(): if not loaded: raise KeyError("No file was found to satisfy the manifest filename: {}".format(root_file_name)) elif filename.startswith(SRC_DIR): if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')): base_filename = filename[len(SRC_DIR):] if project.app_modern_multi_js and filename.endswith('.js') and filename.startswith('js/'): base_filename = base_filename[len('js/'):] source = SourceFile.objects.create(project=project, file_name=base_filename) with z.open(entry.filename) as f: source.save_file(f.read().decode('utf-8')) elif filename.startswith(WORKER_SRC_DIR): if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')): base_filename = filename[len(WORKER_SRC_DIR):] source = SourceFile.objects.create(project=project, file_name=base_filename, target='worker') with z.open(entry.filename) as f: source.save_file(f.read().decode('utf-8')) project.save() send_td_event('cloudpebble_zip_import_succeeded', project=project) # At this point we're supposed to have successfully created the project. return True except Exception as e: if delete_project: try: Project.objects.get(pk=project_id).delete() except: pass send_td_event('cloudpebble_zip_import_failed', data={ 'data': { 'reason': e.message } }, user=project.owner) raise
def import_gist(user_id, gist_id): user = User.objects.get(pk=user_id) g = github.Github() try: gist = g.get_gist(gist_id) except github.UnknownObjectException: send_keen_event('cloudpebble', 'cloudpebble_gist_not_found', user=user, data={'data': { 'gist_id': gist_id }}) raise Exception("Couldn't find gist to import.") files = gist.files default_name = gist.description or 'Sample project' project_type = 'native' if 'appinfo.json' in files: settings = json.loads(files['appinfo.json'].content) if 'projectType' in settings: project_type = settings['projectType'] elif len(files) == 2: if 'simply.js' in files: project_type = 'simplyjs' elif 'app.js' in files: project_type = 'pebblejs' else: settings = {} if len(files) == 1: if 'simply.js' in files: project_type = 'simplyjs' elif 'app.js' in files: project_type = 'pebblejs' project_settings = { 'name': settings.get('longName', default_name), 'owner': user, 'app_uuid': generate_half_uuid(), 'app_short_name': settings.get('shortName', default_name), 'app_long_name': settings.get('longName', default_name), 'app_company_name': settings.get('companyName', user.username), 'app_version_code': 1, 'app_version_label': settings.get('versionLabel', '1.0'), 'app_is_watchface': settings.get('watchapp', {}).get('watchface', False), 'app_capabilities': ','.join(settings.get('capabilities', [])), 'app_keys': dict_to_pretty_json(settings.get('appKeys', {})), 'project_type': project_type } with transaction.commit_on_success(): project = Project.objects.create(**project_settings) if project_type != 'simplyjs': for filename in gist.files: if (project_type == 'native' and filename.endswith('.c') or filename.endswith('.h')) or filename.endswith('.js'): # Because gists can't have subdirectories. if filename == 'pebble-js-app.js': cp_filename = 'js/pebble-js-app.js' else: cp_filename = filename source_file = SourceFile.objects.create( project=project, file_name=cp_filename) source_file.save_file(gist.files[filename].content) media = settings.get('resources', {}).get('media', []) resources = {} for resource in media: kind = resource['type'] def_name = resource['name'] filename = resource['file'] regex = resource.get('characterRegex', None) tracking = resource.get('trackingAdjust', None) is_menu_icon = resource.get('menuIcon', False) compatibility = resource.get('compatibility', None) if filename not in gist.files: continue if filename not in resources: resources[filename] = ResourceFile.objects.create( project=project, file_name=filename, kind=kind, is_menu_icon=is_menu_icon) # We already have this as a unicode string in .content, but it shouldn't have become unicode # in the first place. resources[filename].save_file( urllib2.urlopen(gist.files[filename].raw_url)) ResourceIdentifier.objects.create( resource_file=resources[filename], resource_id=def_name, character_regex=regex, tracking=tracking, compatibility=compatibility) else: source_file = SourceFile.objects.create(project=project, file_name='app.js') source_file.save_file(gist.files['simply.js'].content) send_keen_event('cloudpebble', 'cloudpebble_gist_import', project=project, data={'data': { 'gist_id': gist_id }}) return project.id
def import_gist(user_id, gist_id): user = User.objects.get(pk=user_id) g = github.Github() try: gist = g.get_gist(gist_id) except github.UnknownObjectException: send_keen_event('cloudpebble', 'cloudpebble_gist_not_found', user=user, data={'data': {'gist_id': gist_id}}) raise Exception("Couldn't find gist to import.") files = gist.files default_name = gist.description or 'Sample project' project_type = 'native' if 'appinfo.json' in files: settings = json.loads(files['appinfo.json'].content) if 'projectType' in settings: project_type = settings['projectType'] elif len(files) == 2: if 'simply.js' in files: project_type = 'simplyjs' elif 'app.js' in files: project_type = 'pebblejs' else: settings = {} if len(files) == 1: if 'simply.js' in files: project_type = 'simplyjs' elif 'app.js' in files: project_type = 'pebblejs' project_settings = { 'name': settings.get('longName', default_name), 'owner': user, 'app_uuid': generate_half_uuid(), 'app_short_name': settings.get('shortName', default_name), 'app_long_name': settings.get('longName', default_name), 'app_company_name': settings.get('companyName', user.username), 'app_version_label': settings.get('versionLabel', '1.0'), 'app_is_watchface': settings.get('watchapp', {}).get('watchface', False), 'app_is_hidden': settings.get('watchapp', {}).get('hiddenApp', False), 'app_is_shown_on_communication': settings.get('watchapp', {}).get('onlyShownOnCommunication', False), 'app_capabilities': ','.join(settings.get('capabilities', [])), 'app_keys': dict_to_pretty_json(settings.get('appKeys', {})), 'project_type': project_type, 'sdk_version': settings.get('sdkVersion', '2'), } with transaction.atomic(): project = Project.objects.create(**project_settings) if project_type != 'simplyjs': for filename in gist.files: if (project_type == 'native' and filename.endswith('.c') or filename.endswith('.h')) or filename.endswith('.js'): # Because gists can't have subdirectories. if filename == 'pebble-js-app.js': cp_filename = 'js/pebble-js-app.js' else: cp_filename = filename source_file = SourceFile.objects.create(project=project, file_name=cp_filename) source_file.save_file(gist.files[filename].content) media = settings.get('resources', {}).get('media', []) resources = {} for resource in media: kind = resource['type'] def_name = resource['name'] filename = resource['file'] regex = resource.get('characterRegex', None) tracking = resource.get('trackingAdjust', None) memory_format = resource.get('memoryFormat', None) storage_format = resource.get('storageFormat', None) space_optimisation = resource.get('spaceOptimization', None) is_menu_icon = resource.get('menuIcon', False) compatibility = resource.get('compatibility', None) if filename not in gist.files: continue if filename not in resources: resources[filename] = ResourceFile.objects.create(project=project, file_name=filename, kind=kind, is_menu_icon=is_menu_icon) # We already have this as a unicode string in .content, but it shouldn't have become unicode # in the first place. default_variant = ResourceVariant.objects.create(resource_file=resources[filename], tags=ResourceVariant.TAGS_DEFAULT) default_variant.save_file(urllib2.urlopen(gist.files[filename].raw_url)) ResourceIdentifier.objects.create( resource_file=resources[filename], resource_id=def_name, character_regex=regex, tracking=tracking, compatibility=compatibility, memory_format=memory_format, storage_format=storage_format, space_optimisation=space_optimisation ) else: source_file = SourceFile.objects.create(project=project, file_name='app.js') source_file.save_file(gist.files['simply.js'].content) send_keen_event('cloudpebble', 'cloudpebble_gist_import', project=project, data={'data': {'gist_id': gist_id}}) return project.id
def import_gist(user_id, gist_id): user = User.objects.get(pk=user_id) g = github.Github() try: gist = g.get_gist(gist_id) except github.UnknownObjectException: send_keen_event('cloudpebble', 'cloudpebble_gist_not_found', user=user, data={'data': { 'gist_id': gist_id }}) raise Exception("Couldn't find gist to import.") files = gist.files default_name = gist.description or 'Sample project' is_native = True if 'appinfo.json' in files: settings = json.loads(files['appinfo.json'].content) if len(files) == 2 and 'simply.js' in files: is_native = False else: settings = {} if len(files) == 1 and 'simply.js' in files: is_native = False project_settings = { 'name': settings.get('longName', default_name), 'owner': user, 'sdk_version': 2, 'app_uuid': generate_half_uuid(), 'app_short_name': settings.get('shortName', default_name), 'app_long_name': settings.get('longName', default_name), 'app_company_name': settings.get('companyName', user.username), 'app_version_code': 1, 'app_version_label': settings.get('versionLabel', '1.0'), 'app_is_watchface': settings.get('watchapp', {}).get('watchface', False), 'app_capabilities': ','.join(settings.get('capabilities', [])), 'app_keys': dict_to_pretty_json(settings.get('appKeys', {})), 'project_type': 'native' if is_native else 'simplyjs' } with transaction.commit_on_success(): project = Project.objects.create(**project_settings) if is_native: for filename in gist.files: if filename.endswith('.c') or filename.endswith( '.h') or filename == 'pebble-js-app.js': # Because gists can't have subdirectories. if filename == 'pebble-js-app.js': cp_filename = 'js/pebble-js-app.js' else: cp_filename = filename source_file = SourceFile.objects.create( project=project, file_name=cp_filename) source_file.save_file(gist.files[filename].content) else: source_file = SourceFile.objects.create(project=project, file_name='app.js') source_file.save_file(gist.files['simply.js'].content) send_keen_event('cloudpebble', 'cloudpebble_gist_import', project=project, data={'data': { 'gist_id': gist_id }}) return project.id
def do_import_archive(project_id, archive, delete_project=False): project = Project.objects.get(pk=project_id) try: with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file: archive_file.write(archive) archive_file.flush() with zipfile.ZipFile(str(archive_file.name), 'r') as z: contents = z.infolist() # Requirements: # - Find the folder containing the project. This may or may not be at the root level. # - Read in the source files, resources and resource map. # Observations: # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file. # - Legal projects must have a resource map at resources/src/resource_map.json # Strategy: # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'. # - This is taken to be the project directory. # - Import every file in 'src/' with the extension .c or .h as a source file # - Parse resource_map.json and import files it references RESOURCE_MAP = 'resources/src/resource_map.json' MANIFEST = 'appinfo.json' SRC_DIR = 'src/' if len(contents) > 200: raise Exception("Too many files in zip file.") file_list = [x.filename for x in contents] version, base_dir = find_project_root(file_list) dir_end = len(base_dir) project.sdk_version = version # Now iterate over the things we found with transaction.commit_on_success(): for entry in contents: filename = entry.filename if filename[:dir_end] != base_dir: continue filename = filename[dir_end:] if filename == '': continue if not os.path.normpath( '/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % filename).startswith( '/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/' ): raise SuspiciousOperation( "Invalid zip file contents.") if entry.file_size > 5242880: # 5 MB raise Exception( "Excessively large compressed file.") if (filename == RESOURCE_MAP and version == '1') or (filename == MANIFEST and version == '2'): # We have a resource map! We can now try importing things from it. with z.open(entry) as f: m = json.loads(f.read()) if version == '1': project.version_def_name = m['versionDefName'] media_map = m['media'] elif version == '2': project.app_uuid = m['uuid'] project.app_short_name = m['shortName'] project.app_long_name = m['longName'] project.app_company_name = m['companyName'] project.app_version_code = m['versionCode'] project.app_version_label = m['versionLabel'] project.app_is_watchface = m.get( 'watchapp', {}).get('watchface', False) project.app_capabilities = ','.join( m.get('capabilities', [])) project.app_keys = dict_to_pretty_json( m.get('appKeys', {})) media_map = m['resources']['media'] resources = {} for resource in media_map: kind = resource['type'] def_name = resource[ 'defName'] if version == '1' else resource[ 'name'] file_name = resource['file'] regex = resource.get('characterRegex', None) tracking = resource.get('trackingAdjust', None) is_menu_icon = resource.get('menuIcon', False) if file_name not in resources: resources[ file_name] = ResourceFile.objects.create( project=project, file_name=os.path.basename( file_name), kind=kind, is_menu_icon=is_menu_icon) res_path = 'resources/src' if version == '1' else 'resources' resources[file_name].save_file( z.open( '%s%s/%s' % (base_dir, res_path, file_name))) ResourceIdentifier.objects.create( resource_file=resources[file_name], resource_id=def_name, character_regex=regex, tracking=tracking) elif filename.startswith(SRC_DIR): if (not filename.startswith('.')) and ( filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')): base_filename = os.path.basename( filename) if not filename.endswith( 'js/pebble-js-app.js' ) else 'js/pebble-js-app.js' source = SourceFile.objects.create( project=project, file_name=base_filename) with z.open(entry.filename) as f: source.save_file(f.read().decode('utf-8')) project.save() send_keen_event('cloudpebble', 'cloudpebble_zip_import_succeeded', project=project) # At this point we're supposed to have successfully created the project. return True except Exception as e: if delete_project: try: Project.objects.get(pk=project_id).delete() except: pass send_keen_event('cloudpebble', 'cloudpebble_zip_import_failed', user=project.owner, data={'data': { 'reason': e.message }}) raise
def do_import_archive(project_id, archive, delete_project=False): project = Project.objects.get(pk=project_id) try: with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file: archive_file.write(archive) archive_file.flush() with zipfile.ZipFile(str(archive_file.name), 'r') as z: contents = z.infolist() # Requirements: # - Find the folder containing the project. This may or may not be at the root level. # - Read in the source files, resources and resource map. # Observations: # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file. # - Legal projects must have a resource map at resources/src/resource_map.json # Strategy: # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'. # - This is taken to be the project directory. # - Import every file in 'src/' with the extension .c or .h as a source file # - Parse resource_map.json and import files it references RESOURCE_MAP = 'resources/src/resource_map.json' MANIFEST = 'appinfo.json' SRC_DIR = 'src/' if len(contents) > 200: raise Exception("Too many files in zip file.") file_list = [x.filename for x in contents] version, base_dir = find_project_root(file_list) dir_end = len(base_dir) project.sdk_version = version # Now iterate over the things we found with transaction.commit_on_success(): for entry in contents: filename = entry.filename if filename[:dir_end] != base_dir: continue filename = filename[dir_end:] if filename == '': continue if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'): raise SuspiciousOperation("Invalid zip file contents.") if entry.file_size > 5242880: # 5 MB raise Exception("Excessively large compressed file.") if (filename == RESOURCE_MAP and version == '1') or (filename == MANIFEST and version == '2'): # We have a resource map! We can now try importing things from it. with z.open(entry) as f: m = json.loads(f.read()) if version == '1': project.version_def_name = m['versionDefName'] media_map = m['media'] elif version == '2': project.app_uuid = m['uuid'] project.app_short_name = m['shortName'] project.app_long_name = m['longName'] project.app_company_name = m['companyName'] project.app_version_code = m['versionCode'] project.app_version_label = m['versionLabel'] project.app_is_watchface = m.get('watchapp', {}).get('watchface', False) project.app_capabilities = ','.join(m.get('capabilities', [])) project.app_keys = dict_to_pretty_json(m.get('appKeys', {})) media_map = m['resources']['media'] resources = {} for resource in media_map: kind = resource['type'] def_name = resource['defName'] if version == '1' else resource['name'] file_name = resource['file'] regex = resource.get('characterRegex', None) tracking = resource.get('trackingAdjust', None) is_menu_icon = resource.get('menuIcon', False) if file_name not in resources: resources[file_name] = ResourceFile.objects.create(project=project, file_name=os.path.basename(file_name), kind=kind, is_menu_icon=is_menu_icon) res_path = 'resources/src' if version == '1' else 'resources' resources[file_name].save_file(z.open('%s%s/%s' % (base_dir, res_path, file_name))) ResourceIdentifier.objects.create( resource_file=resources[file_name], resource_id=def_name, character_regex=regex, tracking=tracking ) elif filename.startswith(SRC_DIR): if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')): base_filename = os.path.basename(filename) if not filename.endswith('js/pebble-js-app.js') else 'js/pebble-js-app.js' source = SourceFile.objects.create(project=project, file_name=base_filename) with z.open(entry.filename) as f: source.save_file(f.read().decode('utf-8')) project.save() send_keen_event('cloudpebble', 'cloudpebble_zip_import_succeeded', project=project) # At this point we're supposed to have successfully created the project. return True except Exception as e: if delete_project: try: Project.objects.get(pk=project_id).delete() except: pass send_keen_event('cloudpebble', 'cloudpebble_zip_import_failed', user=project.owner, data={ 'data': { 'reason': e.message } }) raise
def do_import_archive(project_id, archive, delete_project=False): project = Project.objects.get(pk=project_id) try: with tempfile.NamedTemporaryFile(suffix='.zip') as archive_file: archive_file.write(archive) archive_file.flush() with zipfile.ZipFile(str(archive_file.name), 'r') as z: contents = z.infolist() # Requirements: # - Find the folder containing the project. This may or may not be at the root level. # - Read in the source files, resources and resource map. # Observations: # - Legal projects must keep their source in a directory called 'src' containing at least one *.c file. # - Legal projects must have a resource map at resources/src/resource_map.json # Strategy: # - Find the shortest common prefix for 'resources/src/resource_map.json' and 'src/'. # - This is taken to be the project directory. # - Import every file in 'src/' with the extension .c or .h as a source file # - Parse resource_map.json and import files it references MANIFEST = 'appinfo.json' SRC_DIR = 'src/' if len(contents) > 400: raise Exception("Too many files in zip file.") file_list = [x.filename for x in contents] base_dir = find_project_root(file_list) dir_end = len(base_dir) # Now iterate over the things we found with transaction.commit_on_success(): for entry in contents: filename = entry.filename if filename[:dir_end] != base_dir: continue filename = filename[dir_end:] if filename == '': continue if not os.path.normpath('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/%s' % filename).startswith('/SENTINEL_DO_NOT_ACTUALLY_USE_THIS_NAME/'): raise SuspiciousOperation("Invalid zip file contents.") if entry.file_size > 5242880: # 5 MB raise Exception("Excessively large compressed file.") if filename == MANIFEST: # We have a resource map! We can now try importing things from it. with z.open(entry) as f: m = json.loads(f.read()) project.app_uuid = m['uuid'] project.app_short_name = m['shortName'] project.app_long_name = m['longName'] project.app_company_name = m['companyName'] project.app_version_label = m['versionLabel'] project.sdk_version = m.get('sdkVersion', '2') project.app_is_watchface = m.get('watchapp', {}).get('watchface', False) project.app_capabilities = ','.join(m.get('capabilities', [])) if 'targetPlatforms' in m: project.app_platforms = ','.join(m['targetPlatforms']) project.app_keys = dict_to_pretty_json(m.get('appKeys', {})) project.project_type = m.get('projectType', 'native') if project.project_type not in [x[0] for x in Project.PROJECT_TYPES]: raise Exception("Illegal project type %s" % project.project_type) media_map = m['resources']['media'] resources = {} resource_files = {} resource_suffix_map = {v: k for k, v in ResourceVariant.VARIANT_SUFFIXES.iteritems()} del resource_suffix_map[''] # This mapping is confusing to keep around; everything is suffixed with nothing. for resource in media_map: kind = resource['type'] def_name = resource['name'] file_name = resource['file'] # Pebble.js and simply.js both have some internal resources that we don't import. if project.project_type in {'pebblejs', 'simplyjs'}: if def_name in {'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}: continue regex = resource.get('characterRegex', None) tracking = resource.get('trackingAdjust', None) is_menu_icon = resource.get('menuIcon', False) compatibility = resource.get('compatibility', None) if file_name not in resource_files: file_name_parts = os.path.splitext(file_name) for suffix in resource_suffix_map.iterkeys(): if file_name_parts[0].endswith(suffix): root_file_name = file_name_parts[0][:len(file_name_parts[0]) - len(suffix)] + "." + file_name_parts[1] variant = resource_suffix_map[suffix] break else: root_file_name = file_name variant = ResourceVariant.VARIANT_DEFAULT if root_file_name not in resources: resources[root_file_name] = ResourceFile.objects.create(project=project, file_name=os.path.basename(root_file_name), kind=kind, is_menu_icon=is_menu_icon) res_path = 'resources' resource_files[file_name] = ResourceVariant.objects.create(resource_file=resources[root_file_name], variant=variant) resource_files[file_name].save_file(z.open('%s%s/%s' % (base_dir, res_path, file_name))) ResourceIdentifier.objects.create( resource_file=resources[file_name], resource_id=def_name, character_regex=regex, tracking=tracking, compatibility=compatibility ) elif filename.startswith(SRC_DIR): if (not filename.startswith('.')) and (filename.endswith('.c') or filename.endswith('.h') or filename.endswith('.js')): base_filename = filename[len(SRC_DIR):] source = SourceFile.objects.create(project=project, file_name=base_filename) with z.open(entry.filename) as f: source.save_file(f.read().decode('utf-8')) project.save() send_keen_event('cloudpebble', 'cloudpebble_zip_import_succeeded', project=project) # At this point we're supposed to have successfully created the project. return True except Exception as e: if delete_project: try: Project.objects.get(pk=project_id).delete() except: pass send_keen_event('cloudpebble', 'cloudpebble_zip_import_failed', user=project.owner, data={ 'data': { 'reason': e.message } }) raise
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = { x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree } try: remote_version, root = find_project_root(paths) except: remote_version, root = project.sdk_version, '' src_root = root + 'src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement( path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[ repo_path]._InputGitTreeElement__content = our_content has_changed = True expected_source_files = [src_root + x.file_name for x in project_sources] for path in next_tree.keys(): if not path.startswith(src_root): continue if path not in expected_source_files: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Now try handling resource files. resources = project.resources.all() old_resource_root = root + ("resources/src/" if remote_version == '1' else 'resources/') new_resource_root = root + ("resources/src/" if project.sdk_version == '1' else 'resources/') # Migrate all the resources so we can subsequently ignore the issue. if old_resource_root != new_resource_root: print "moving resources" new_next_tree = next_tree.copy() for path in next_tree: if path.startswith(old_resource_root ) and not path.endswith('resource_map.json'): new_path = new_resource_root + path[len(old_resource_root):] print "moving %s to %s" % (path, new_path) next_tree[path]._InputGitTreeElement__path = new_path new_next_tree[new_path] = next_tree[path] del new_next_tree[path] next_tree = new_next_tree for res in resources: repo_path = new_resource_root + res.path if repo_path in next_tree: content = res.get_contents() if git_sha( content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path blob = repo.create_git_blob(base64.b64encode(res.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Both of these are used regardless of version remote_map_path = root + 'resources/src/resource_map.json' remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' if remote_version == '1': remote_map_sha = next_tree[ remote_map_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_map_sha is not None: their_res_dict = json.loads(git_blob(repo, remote_map_sha)) else: their_res_dict = { 'friendlyVersion': 'VERSION', 'versionDefName': '', 'media': [] } their_manifest_dict = {} else: remote_manifest_sha = next_tree[ remote_manifest_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads( git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} if project.sdk_version == '1': our_res_dict = generate_resource_dict(project, resources) else: our_manifest_dict = generate_v2_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set( x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = new_resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # Update the stored resource map, if applicable. if project.sdk_version == '1': if remote_map_path in next_tree: next_tree[remote_map_path]._InputGitTreeElement__sha = NotSet next_tree[ remote_map_path]._InputGitTreeElement__content = dict_to_pretty_json( our_res_dict) else: next_tree[remote_map_path] = InputGitTreeElement( path=remote_map_path, mode='100644', type='blob', content=dict_to_pretty_json(our_res_dict)) # Delete the v2 manifest, if one exists if remote_manifest_path in next_tree: del next_tree[remote_manifest_path] # This one is separate because there's more than just the resource map changing. if project.sdk_version == '2' and their_manifest_dict != our_manifest_dict: if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[ remote_manifest_path]._InputGitTreeElement__content = generate_v2_manifest( project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement( path=remote_manifest_path, mode='100644', type='blob', content=generate_v2_manifest(project, resources)) # Delete the v1 manifest, if one exists if remote_map_path in next_tree: del next_tree[remote_map_path] if project.sdk_version == '2': if remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement( path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True else: del next_tree[remote_wscript_path] # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print[x._InputGitTreeElement__mode for x in next_tree.values()] git_tree = repo.create_git_tree(next_tree.values()) print "Created tree %s" % git_tree.sha git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) print "Created commit %s" % git_commit.sha git_ref = repo.get_git_ref( 'heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) print "Updated ref %s" % git_ref.ref project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_keen_event('cloudpebble', 'cloudpebble_github_push', user=user, data={'data': { 'repo': project.github_repo }}) return False