def assemble_project(project, base_dir, build_result=None): """ Copy all files necessary to build a project into a directory. """ resources = project.resources.all() if project.is_standard_project_type: # Write out the sources, resources, and wscript and jshint file assemble_source_files(project, base_dir) if project.project_type != 'rocky': assemble_resource_directories(project, base_dir) assemble_resources(base_dir, project.resources_path, resources) with open(os.path.join(base_dir, 'wscript'), 'w') as wscript: wscript.write(generate_wscript_file(project)) with open(os.path.join(base_dir, 'pebble-jshintrc'), 'w') as jshint: jshint.write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': # SimplyJS is a particularly special case assemble_simplyjs_sources(project, base_dir, build_result) elif project.project_type == 'pebblejs': # PebbleJS projects have to import the entire pebblejs library, including its wscript assemble_resource_directories(project, base_dir) shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) assemble_resources(base_dir, project.resources_path, resources, type_restrictions=('png', 'bitmap')) assemble_source_files(project, base_dir) # All projects have a manifest manifest_filename = manifest_name_for_project(project) manifest_dict = generate_manifest_dict(project, resources) with open(os.path.join(base_dir, manifest_filename), 'w') as f: f.write(json.dumps(manifest_dict))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: src_dir = 'src' if project.project_type == 'native': if source.target == 'worker': src_dir = 'worker_src' elif project.app_modern_multi_js and source.file_name.endswith('.js'): src_dir = 'src/js' z.writestr('%s/%s/%s' % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources' for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, res_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) z.writestr('%s/appinfo.json' % prefix, manifest) if project.project_type == 'native': # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: src_dir = 'src' if project.project_type == 'native': if source.target == 'worker': src_dir = 'worker_src' elif project.app_modern_multi_js and source.file_name.endswith( '.js'): src_dir = 'src/js' z.writestr('%s/%s/%s' % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources' for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, res_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.project_type == 'native': # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: src_dir = 'worker_src' if source.target == 'worker' else 'src' z.writestr('%s/%s/%s' % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources' z.writestr('%s/%s/%s' % (prefix, res_path, resource.path), resource.get_contents()) manifest = generate_manifest(project, resources) z.writestr('%s/appinfo.json' % prefix, manifest) if project.project_type == 'native': # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=""): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r"[^\w]+", "_", project.name).strip("_").lower() for source in source_files: src_dir = "worker_src" if source.target == "worker" else "src" z.writestr("%s/%s/%s" % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = "resources" for variant in resource.variants.all(): z.writestr("%s/%s/%s" % (prefix, res_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) z.writestr("%s/appinfo.json" % prefix, manifest) if project.project_type == "native": # This file is always the same, but needed to build. z.writestr("%s/wscript" % prefix, generate_wscript_file(project, for_export=True)) z.writestr("%s/jshintrc" % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: path = os.path.join(prefix, source.project_path) z.writestr(path, source.get_contents()) for resource in resources: for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, project.resources_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.is_standard_project_type: # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: z.writestr('%s/src/%s' % (prefix, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources/src' if project.sdk_version == '1' else 'resources' z.writestr('%s/%s/%s' % (prefix, res_path, resource.path), resource.get_contents()) if project.sdk_version == '1': resource_map = generate_resource_map(project, resources) z.writestr('%s/resources/src/resource_map.json' % prefix, resource_map) else: manifest = generate_v2_manifest(project, resources) z.writestr('%s/appinfo.json' % prefix, manifest) # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix='', suffix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() prefix += suffix for source in source_files: path = os.path.join(prefix, source.project_path) z.writestr(path, source.get_contents()) for resource in resources: for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, project.resources_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.is_standard_project_type: # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root = find_project_root(paths) except: root = '' src_root = root + 'src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True expected_source_files = [src_root + x.file_name for x in project_sources] for path in next_tree.keys(): if not path.startswith(src_root): continue if path not in expected_source_files: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = root + 'resources/' for res in resources: for variant in res.variants.all(): repo_path = resource_root + variant.path if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print [x._InputGitTreeElement__mode for x in next_tree.values()]
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) manifest_filename = manifest_name_for_project(project) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind not in ('png', 'bitmap'): continue target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.get_default_variant().copy_to_path(abs_target) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = '' build_start_time = now() try: os.chdir(base_dir) # Install dependencies if there are any dependencies = project.get_dependencies() if dependencies: # Checking for path-based dependencies is performed by the database so in theory we shouldn't need to do # it here but we will do it anyway just to be extra safe. for version in dependencies.values(): validate_dependency_version(version) npm_command = [ settings.NPM_BINARY, "install", "--ignore-scripts" ] output = subprocess.check_output( npm_command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) subprocess.check_output([settings.NPM_BINARY, "dedupe"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) if project.sdk_version == '2': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK2_PEBBLE_WAF, "configure", "build"] elif project.sdk_version == '3': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK3_PEBBLE_WAF, "configure", "build"] else: raise Exception("invalid sdk version.") output += subprocess.check_output(command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits, env=environ) except subprocess.CalledProcessError as e: output = e.output logger.warning("Build command failed with error:\n%s\n", output) success = False except Exception as e: logger.exception("Unexpected exception during build") success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False logger.warning("Success was a lie.") finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: store_size_info(project, build_result, 'aplite', z) store_size_info(project, build_result, 'basalt', z) store_size_info(project, build_result, 'chalk', z) except Exception as e: logger.warning("Couldn't extract filesizes: %s", e) # Try pulling out debug information. if project.sdk_version == '2': save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'pebble-worker.elf')) else: save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-worker.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-worker.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-worker.elf')) build_result.save_pbw(temp_file) build_result.save_build_log(output or 'Failed to get output') build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_td_event(event_name, data, project=project) except Exception as e: logger.exception("Build failed due to internal error: %s", e) build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: shutil.rmtree(base_dir)
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind != 'png': continue target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.copy_to_path(ResourceVariant.VARIANT_DEFAULT, abs_target) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = 'Failed to get output' build_start_time = now() try: os.chdir(base_dir) if project.sdk_version == '2': tool = settings.SDK2_PEBBLE_TOOL elif project.sdk_version == '3': tool = settings.SDK3_PEBBLE_TOOL else: raise Exception("invalid sdk version.") output = subprocess.check_output([tool, "build"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) except subprocess.CalledProcessError as e: output = e.output print output success = False except Exception as e: success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False print "Success was a lie." finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: store_size_info(build_result, 'aplite', z) store_size_info(build_result, 'basalt', z) except Exception as e: print "Couldn't extract filesizes: %s" % e # Try pulling out debug information. if project.sdk_version == '2': save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'pebble-worker.elf')) else: save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-worker.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-worker.elf')) build_result.save_pbw(temp_file) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_keen_event(['cloudpebble', 'sdk'], event_name, data, project=project) except Exception as e: print "Build failed due to internal error: %s" % e traceback.print_exc() build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: # shutil.rmtree(base_dir) print base_dir
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root = find_project_root(paths) except: root = '' expected_paths = set() def update_expected_paths(new_path): # This adds the path *and* its parent directories to the list of expected paths. # The parent directories are already keys in next_tree, so if they aren't present in expected_paths # then, when iterating over next_tree to see which files have been deleted, we would have to treat # directories as special cases. split_path = new_path.split('/') expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1)) src_root = root + 'src/' worker_src_root = root + 'worker_src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if project.project_type == 'native': if source.target == 'worker': repo_path = worker_src_root + source.file_name elif project.app_modern_multi_js and source.file_name.endswith('.js'): repo_path = src_root + 'js/' + source.file_name update_expected_paths(repo_path) if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = root + 'resources/' for res in resources: for variant in res.variants.all(): repo_path = resource_root + variant.path update_expected_paths(repo_path) if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files for path in next_tree.keys(): if not (any(path.startswith(root) for root in (src_root, resource_root, worker_src_root))): continue if path not in expected_paths: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Compare the resource dicts remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: has_changed = True if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print [x._InputGitTreeElement__mode for x in next_tree.values()]
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: remote_version, root = find_project_root(paths) except: remote_version, root = project.sdk_version, '' src_root = root + 'src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True expected_source_files = [src_root + x.file_name for x in project_sources] for path in next_tree.keys(): if not path.startswith(src_root): continue if path not in expected_source_files: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Now try handling resource files. resources = project.resources.all() old_resource_root = root + ("resources/src/" if remote_version == '1' else 'resources/') new_resource_root = root + ("resources/src/" if project.sdk_version == '1' else 'resources/') # Migrate all the resources so we can subsequently ignore the issue. if old_resource_root != new_resource_root: print "moving resources" new_next_tree = next_tree.copy() for path in next_tree: if path.startswith(old_resource_root) and not path.endswith('resource_map.json'): new_path = new_resource_root + path[len(old_resource_root):] print "moving %s to %s" % (path, new_path) next_tree[path]._InputGitTreeElement__path = new_path new_next_tree[new_path] = next_tree[path] del new_next_tree[path] next_tree = new_next_tree for res in resources: repo_path = new_resource_root + res.path if repo_path in next_tree: content = res.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path blob = repo.create_git_blob(base64.b64encode(res.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Both of these are used regardless of version remote_map_path = root + 'resources/src/resource_map.json' remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' if remote_version == '1': remote_map_sha = next_tree[remote_map_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_map_sha is not None: their_res_dict = json.loads(git_blob(repo, remote_map_sha)) else: their_res_dict = {'friendlyVersion': 'VERSION', 'versionDefName': '', 'media': []} their_manifest_dict = {} else: remote_manifest_sha = next_tree[remote_manifest_path]._InputGitTreeElement__sha if remote_map_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} if project.sdk_version == '1': our_res_dict = generate_resource_dict(project, resources) else: our_manifest_dict = generate_v2_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = new_resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # Update the stored resource map, if applicable. if project.sdk_version == '1': if remote_map_path in next_tree: next_tree[remote_map_path]._InputGitTreeElement__sha = NotSet next_tree[remote_map_path]._InputGitTreeElement__content = dict_to_pretty_json(our_res_dict) else: next_tree[remote_map_path] = InputGitTreeElement(path=remote_map_path, mode='100644', type='blob', content=dict_to_pretty_json(our_res_dict)) # Delete the v2 manifest, if one exists if remote_manifest_path in next_tree: del next_tree[remote_manifest_path] # This one is separate because there's more than just the resource map changing. if project.sdk_version == '2' and their_manifest_dict != our_manifest_dict: if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_v2_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_v2_manifest(project, resources)) # Delete the v1 manifest, if one exists if remote_map_path in next_tree: del next_tree[remote_map_path] if project.sdk_version == '2': if remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True else: del next_tree[remote_wscript_path] # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print [x._InputGitTreeElement__mode for x in next_tree.values()]
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code src_dir = os.path.join(base_dir, 'src') os.mkdir(src_dir) create_source_files(source_files, src_dir) manifest_dict = generate_v2_manifest_dict(project, resources) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.copy_to_path(abs_target) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(source_files, os.path.join(base_dir, 'src', 'js')) for f in resources: if f.kind != 'png': continue target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.copy_to_path(abs_target) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = 'Failed to get output' build_start_time = now() try: os.chdir(base_dir) output = subprocess.check_output([settings.PEBBLE_TOOL, "build"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) except subprocess.CalledProcessError as e: output = e.output print output success = False else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False print "Success was a lie." finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: build_result.binary_size = z.getinfo('pebble-app.bin').file_size build_result.resource_size = z.getinfo('app_resources.pbpack').file_size except Exception as e: print "Couldn't extract filesizes: %s" % e # Try pulling out debug information. elf_file = os.path.join(base_dir, 'build', 'pebble-app.elf') if os.path.exists(elf_file): try: debug_info = apptools.addr2lines.create_coalesced_group(elf_file) except: print traceback.format_exc() else: build_result.save_debug_info(debug_info) build_result.save_pbw(temp_file) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_keen_event(['cloudpebble', 'sdk'], event_name, data, project=project) except Exception as e: print "Build failed due to internal error: %s" % e traceback.print_exc() build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: shutil.rmtree(base_dir)
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, "tmp") if settings.CHROOT_ROOT else None) try: # Resources resource_root = "resources" os.makedirs(os.path.join(base_dir, resource_root, "images")) os.makedirs(os.path.join(base_dir, resource_root, "fonts")) os.makedirs(os.path.join(base_dir, resource_root, "data")) if project.project_type == "native": # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, "appinfo.json"), "w").write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, "wscript"), "w").write(generate_wscript_file(project)) open(os.path.join(base_dir, "pebble-jshintrc"), "w").write(generate_jshint_file(project)) elif project.project_type == "simplyjs": shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = "\n\n".join(x.get_contents() for x in source_files if x.file_name.endswith(".js")) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, "appinfo.json"), "w").write(json.dumps(manifest_dict)) open(os.path.join(base_dir, "src", "js", "zzz_userscript.js"), "w").write( """ (function() { simply.mainScriptSource = %s; })(); """ % escaped_js ) elif project.project_type == "pebblejs": shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind != "png": continue target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.copy_to_path(ResourceVariant.VARIANT_DEFAULT, abs_target) open(os.path.join(base_dir, "appinfo.json"), "w").write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = "Failed to get output" build_start_time = now() try: os.chdir(base_dir) if project.sdk_version == "2": tool = settings.SDK2_PEBBLE_TOOL elif project.sdk_version == "3": tool = settings.SDK3_PEBBLE_TOOL else: raise Exception("invalid sdk version.") output = subprocess.check_output([tool, "build"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) except subprocess.CalledProcessError as e: output = e.output print output success = False except Exception as e: success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, "build", "%s.pbw" % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False print "Success was a lie." finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, "r") as z: store_size_info(build_result, "aplite", z) store_size_info(build_result, "basalt", z) except Exception as e: print "Couldn't extract filesizes: %s" % e # Try pulling out debug information. if project.sdk_version == "2": save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, "aplite", os.path.join(base_dir, "build", "pebble-app.elf"), ) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, "aplite", os.path.join(base_dir, "build", "pebble-worker.elf"), ) else: save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, "aplite", os.path.join(base_dir, "build", "aplite/pebble-app.elf"), ) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, "aplite", os.path.join(base_dir, "build", "aplite/pebble-worker.elf"), ) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, "basalt", os.path.join(base_dir, "build", "basalt/pebble-app.elf"), ) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, "basalt", os.path.join(base_dir, "build", "basalt/pebble-worker.elf"), ) build_result.save_pbw(temp_file) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { "data": { "cloudpebble": { "build_id": build_result.id, "job_run_time": (build_result.finished - build_result.started).total_seconds(), }, "build_time": (build_end_time - build_start_time).total_seconds(), } } event_name = "app_build_succeeded" if success else "app_build_failed" send_keen_event(["cloudpebble", "sdk"], event_name, data, project=project) except Exception as e: print "Build failed due to internal error: %s" % e traceback.print_exc() build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: # shutil.rmtree(base_dir) print base_dir
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) paths = [x.path for x in tree.tree] next_tree = { x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree } try: root = find_project_root(paths) except: root = '' src_root = root + 'src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement( path=repo_path, mode='100644', type='blob', content=source.get_contents()) print "New file: %s" % repo_path else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: print "Updated file: %s" % repo_path next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[ repo_path]._InputGitTreeElement__content = our_content has_changed = True expected_source_files = [src_root + x.file_name for x in project_sources] for path in next_tree.keys(): if not path.startswith(src_root): continue if path not in expected_source_files: del next_tree[path] print "Deleted file: %s" % path has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = root + 'resources/' for res in resources: repo_path = resource_root + res.path if repo_path in next_tree: content = res.get_contents() if git_sha( content) != next_tree[repo_path]._InputGitTreeElement__sha: print "Changed resource: %s" % repo_path has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: print "New resource: %s" % repo_path blob = repo.create_git_blob(base64.b64encode(res.get_contents()), 'base64') print "Created blob %s" % blob.sha next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) remote_manifest_path = root + 'appinfo.json' remote_wscript_path = root + 'wscript' remote_manifest_sha = next_tree[ remote_manifest_path]._InputGitTreeElement__sha if remote_manifest_path in next_tree else None if remote_manifest_sha is not None: their_manifest_dict = json.loads(git_blob(repo, remote_manifest_sha)) their_res_dict = their_manifest_dict['resources'] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: print "Resources mismatch." has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set( x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: print "Deleted resource: %s" % repo_path del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[ remote_manifest_path]._InputGitTreeElement__content = generate_manifest( project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement( path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement( path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: print "Has changed; committing" # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] print "removing subtree node %s" % x print[x._InputGitTreeElement__mode for x in next_tree.values()] git_tree = repo.create_git_tree(next_tree.values()) print "Created tree %s" % git_tree.sha git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) print "Created commit %s" % git_commit.sha git_ref = repo.get_git_ref( 'heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) print "Updated ref %s" % git_ref.ref project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_keen_event('cloudpebble', 'cloudpebble_github_push', user=user, data={'data': { 'repo': project.github_repo }}) return False
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree]) except InvalidProjectArchiveException: root = '' manifest_item = None expected_paths = set() def update_expected_paths(new_path): # This adds the path *and* its parent directories to the list of expected paths. # The parent directories are already keys in next_tree, so if they aren't present in expected_paths # then, when iterating over next_tree to see which files have been deleted, we would have to treat # directories as special cases. split_path = new_path.split('/') expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1)) src_root = root + 'src/' worker_src_root = root + 'worker_src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if project.project_type == 'native': if source.target == 'worker': repo_path = worker_src_root + source.file_name elif project.app_modern_multi_js and source.file_name.endswith('.js'): repo_path = src_root + 'js/' + source.file_name update_expected_paths(repo_path) if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) logger.debug("New file: %s", repo_path) else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: logger.debug("Updated file: %s", repo_path) next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = root + 'resources/' for res in resources: for variant in res.variants.all(): repo_path = resource_root + variant.path update_expected_paths(repo_path) if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: logger.debug("Changed resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: logger.debug("New resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files for path in next_tree.keys(): if not (any(path.startswith(root) for root in (src_root, resource_root, worker_src_root))): continue if path not in expected_paths: del next_tree[path] logger.debug("Deleted file: %s", path) has_changed = True # Compare the resource dicts remote_manifest_path = root + manifest_name_for_project(project) remote_wscript_path = root + 'wscript' if manifest_item: their_manifest_dict = json.loads(manifest_item.read()) their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []})) # If the manifest needs a new path (e.g. it is now package.json), delete the old one if manifest_item.path != remote_manifest_path: del next_tree[manifest_item.path] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []})) if our_res_dict != their_res_dict: logger.debug("Resources mismatch.") has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: logger.debug("Deleted resource: %s", repo_path) del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: has_changed = True if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: logger.debug("Has changed; committing") # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] logger.debug("removing subtree node %s", x) logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()]) git_tree = repo.create_git_tree(next_tree.values()) logger.debug("Created tree %s", git_tree.sha) git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) logger.debug("Created commit %s", git_commit.sha) git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) logger.debug("Updated ref %s", git_ref.ref) project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_td_event('cloudpebble_github_push', data={ 'data': { 'repo': project.github_repo } }, user=user) return False
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree]) except InvalidProjectArchiveException: root = '' manifest_item = None expected_paths = set() def update_expected_paths(new_path): # This adds the path *and* its parent directories to the list of expected paths. # The parent directories are already keys in next_tree, so if they aren't present in expected_paths # then, when iterating over next_tree to see which files have been deleted, we would have to treat # directories as special cases. split_path = new_path.split('/') expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1)) project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = os.path.join(root, source.project_path) update_expected_paths(repo_path) if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) logger.debug("New file: %s", repo_path) else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: logger.debug("Updated file: %s", repo_path) next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = project.resources_path for res in resources: for variant in res.variants.all(): repo_path = os.path.join(resource_root, variant.path) update_expected_paths(repo_path) if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: logger.debug("Changed resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: logger.debug("New resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files src_root = os.path.join(root, 'src') worker_src_root = os.path.join(root, 'worker_src') for path in next_tree.keys(): if not (any(path.startswith(root+'/') for root in (src_root, resource_root, worker_src_root))): continue if path not in expected_paths: del next_tree[path] logger.debug("Deleted file: %s", path) has_changed = True # Compare the resource dicts remote_manifest_path = root + manifest_name_for_project(project) remote_wscript_path = root + 'wscript' if manifest_item: their_manifest_dict = json.loads(manifest_item.read()) their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []})) # If the manifest needs a new path (e.g. it is now package.json), delete the old one if manifest_item.path != remote_manifest_path: del next_tree[manifest_item.path] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []})) if our_res_dict != their_res_dict: logger.debug("Resources mismatch.") has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: logger.debug("Deleted resource: %s", repo_path) del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: has_changed = True if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: logger.debug("Has changed; committing") # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] logger.debug("removing subtree node %s", x) logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()]) git_tree = repo.create_git_tree(next_tree.values()) logger.debug("Created tree %s", git_tree.sha) git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) logger.debug("Created commit %s", git_commit.sha) git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) logger.debug("Updated ref %s", git_ref.ref) project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_td_event('cloudpebble_github_push', data={ 'data': { 'repo': project.github_repo } }, user=user) return False
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind not in ('png', 'bitmap'): continue target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.get_default_variant().copy_to_path(abs_target) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = 'Failed to get output' build_start_time = now() try: os.chdir(base_dir) if project.sdk_version == '2': environ = os.environ environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK2_PEBBLE_WAF, "configure", "build"] elif project.sdk_version == '3': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK3_PEBBLE_WAF, "configure", "build"] else: raise Exception("invalid sdk version.") output = subprocess.check_output(command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits, env=environ) except subprocess.CalledProcessError as e: output = e.output print output success = False except Exception as e: success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False print "Success was a lie." finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: store_size_info(project, build_result, 'aplite', z) store_size_info(project, build_result, 'basalt', z) store_size_info(project, build_result, 'chalk', z) except Exception as e: print "Couldn't extract filesizes: %s" % e # Try pulling out debug information. if project.sdk_version == '2': save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'pebble-worker.elf')) else: save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-worker.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-worker.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-worker.elf')) build_result.save_pbw(temp_file) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_keen_event(['cloudpebble', 'sdk'], event_name, data, project=project) except Exception as e: print "Build failed due to internal error: %s" % e traceback.print_exc() build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: # shutil.rmtree(base_dir) print base_dir
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) if project.sdk_version == '1': build_result.state = BuildResult.STATE_FAILED build_result.finished = now() build_result.save() return # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) try: if project.project_type == 'native': # Source code src_dir = os.path.join(base_dir, 'src') os.mkdir(src_dir) create_source_files(source_files, src_dir) # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) manifest_dict = generate_v2_manifest_dict(project, resources) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.copy_to_path(abs_target) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': os.rmdir(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': os.rmdir(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project) create_source_files(source_files, os.path.join(base_dir, 'src', 'js')) open(os.path.join(base_dir, 'appinfo.json'), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = 'Failed to get output' try: if settings.CHROOT_JAIL is not None: output = subprocess.check_output([ settings.CHROOT_JAIL, project.sdk_version, base_dir[len(settings.CHROOT_ROOT):] ], stderr=subprocess.STDOUT) else: os.chdir(base_dir) output = subprocess.check_output( [settings.PEBBLE_TOOL, "build"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) except subprocess.CalledProcessError as e: output = e.output print output success = False else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False print "Success was a lie." finally: os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: build_result.binary_size = z.getinfo( 'pebble-app.bin').file_size build_result.resource_size = z.getinfo( 'app_resources.pbpack').file_size except Exception as e: print "Couldn't extract filesizes: %s" % e # Try pulling out debug information. elf_file = os.path.join(base_dir, 'build', 'pebble-app.elf') if os.path.exists(elf_file): try: debug_info = apptools.addr2lines.create_coalesced_group( elf_file) except: print traceback.format_exc() else: build_result.save_debug_info(debug_info) build_result.save_pbw(temp_file) send_keen_event( ['cloudpebble', 'sdk'], 'app_build_succeeded', data={'data': { 'cloudpebble_build_id': build_result.id }}, project=project) else: send_keen_event( ['cloudpebble', 'sdk'], 'app_build_failed', data={'data': { 'cloudpebble_build_id': build_result.id }}, project=project) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() except Exception as e: print "Build failed due to internal error: %s" % e traceback.print_exc() build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: shutil.rmtree(base_dir)