def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: src_dir = 'src' if project.project_type == 'native': if source.target == 'worker': src_dir = 'worker_src' elif project.app_modern_multi_js and source.file_name.endswith('.js'): src_dir = 'src/js' z.writestr('%s/%s/%s' % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources' for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, res_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.project_type == 'native': # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: src_dir = 'src' if project.project_type == 'native': if source.target == 'worker': src_dir = 'worker_src' elif project.app_modern_multi_js and source.file_name.endswith( '.js'): src_dir = 'src/js' z.writestr('%s/%s/%s' % (prefix, src_dir, source.file_name), source.get_contents()) for resource in resources: res_path = 'resources' for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, res_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.project_type == 'native': # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() for source in source_files: path = os.path.join(prefix, source.project_path) z.writestr(path, source.get_contents()) for resource in resources: for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, project.resources_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.is_standard_project_type: # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def add_project_to_archive(z, project, prefix='', suffix=''): source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) prefix += re.sub(r'[^\w]+', '_', project.name).strip('_').lower() prefix += suffix for source in source_files: path = os.path.join(prefix, source.project_path) z.writestr(path, source.get_contents()) for resource in resources: for variant in resource.variants.all(): z.writestr('%s/%s/%s' % (prefix, project.resources_path, variant.path), variant.get_contents()) manifest = generate_manifest(project, resources) manifest_name = manifest_name_for_project(project) z.writestr('%s/%s' % (prefix, manifest_name), manifest) if project.is_standard_project_type: # This file is always the same, but needed to build. z.writestr('%s/wscript' % prefix, generate_wscript_file(project, for_export=True)) z.writestr('%s/jshintrc' % prefix, generate_jshint_file(project))
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree]) except InvalidProjectArchiveException: root = '' manifest_item = None expected_paths = set() def update_expected_paths(new_path): # This adds the path *and* its parent directories to the list of expected paths. # The parent directories are already keys in next_tree, so if they aren't present in expected_paths # then, when iterating over next_tree to see which files have been deleted, we would have to treat # directories as special cases. split_path = new_path.split('/') expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1)) src_root = root + 'src/' worker_src_root = root + 'worker_src/' project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = src_root + source.file_name if project.project_type == 'native': if source.target == 'worker': repo_path = worker_src_root + source.file_name elif project.app_modern_multi_js and source.file_name.endswith('.js'): repo_path = src_root + 'js/' + source.file_name update_expected_paths(repo_path) if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) logger.debug("New file: %s", repo_path) else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: logger.debug("Updated file: %s", repo_path) next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = root + 'resources/' for res in resources: for variant in res.variants.all(): repo_path = resource_root + variant.path update_expected_paths(repo_path) if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: logger.debug("Changed resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: logger.debug("New resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files for path in next_tree.keys(): if not (any(path.startswith(root) for root in (src_root, resource_root, worker_src_root))): continue if path not in expected_paths: del next_tree[path] logger.debug("Deleted file: %s", path) has_changed = True # Compare the resource dicts remote_manifest_path = root + manifest_name_for_project(project) remote_wscript_path = root + 'wscript' if manifest_item: their_manifest_dict = json.loads(manifest_item.read()) their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []})) # If the manifest needs a new path (e.g. it is now package.json), delete the old one if manifest_item.path != remote_manifest_path: del next_tree[manifest_item.path] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []})) if our_res_dict != their_res_dict: logger.debug("Resources mismatch.") has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: logger.debug("Deleted resource: %s", repo_path) del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: has_changed = True if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: logger.debug("Has changed; committing") # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] logger.debug("removing subtree node %s", x) logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()]) git_tree = repo.create_git_tree(next_tree.values()) logger.debug("Created tree %s", git_tree.sha) git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) logger.debug("Created commit %s", git_commit.sha) git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) logger.debug("Updated ref %s", git_ref.ref) project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_td_event('cloudpebble_github_push', data={ 'data': { 'repo': project.github_repo } }, user=user) return False
def github_push(user, commit_message, repo_name, project): g = Github(user.github.token, client_id=settings.GITHUB_CLIENT_ID, client_secret=settings.GITHUB_CLIENT_SECRET) repo = g.get_repo(repo_name) try: branch = repo.get_branch(project.github_branch or repo.master_branch) except GithubException: raise Exception("Unable to get branch.") commit = repo.get_git_commit(branch.commit.sha) tree = repo.get_git_tree(commit.tree.sha, recursive=True) next_tree = {x.path: InputGitTreeElement(path=x.path, mode=x.mode, type=x.type, sha=x.sha) for x in tree.tree} try: root, manifest_item = find_project_root_and_manifest([GitProjectItem(repo, x) for x in tree.tree]) except InvalidProjectArchiveException: root = '' manifest_item = None expected_paths = set() def update_expected_paths(new_path): # This adds the path *and* its parent directories to the list of expected paths. # The parent directories are already keys in next_tree, so if they aren't present in expected_paths # then, when iterating over next_tree to see which files have been deleted, we would have to treat # directories as special cases. split_path = new_path.split('/') expected_paths.update('/'.join(split_path[:p]) for p in range(2, len(split_path) + 1)) project_sources = project.source_files.all() has_changed = False for source in project_sources: repo_path = os.path.join(root, source.project_path) update_expected_paths(repo_path) if repo_path not in next_tree: has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) logger.debug("New file: %s", repo_path) else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: logger.debug("Updated file: %s", repo_path) next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True # Now try handling resource files. resources = project.resources.all() resource_root = project.resources_path for res in resources: for variant in res.variants.all(): repo_path = os.path.join(resource_root, variant.path) update_expected_paths(repo_path) if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: logger.debug("Changed resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: logger.debug("New resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') logger.debug("Created blob %s", blob.sha) next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files src_root = os.path.join(root, 'src') worker_src_root = os.path.join(root, 'worker_src') for path in next_tree.keys(): if not (any(path.startswith(root+'/') for root in (src_root, resource_root, worker_src_root))): continue if path not in expected_paths: del next_tree[path] logger.debug("Deleted file: %s", path) has_changed = True # Compare the resource dicts remote_manifest_path = root + manifest_name_for_project(project) remote_wscript_path = root + 'wscript' if manifest_item: their_manifest_dict = json.loads(manifest_item.read()) their_res_dict = their_manifest_dict.get('resources', their_manifest_dict.get('pebble', their_manifest_dict).get('resources', {'media': []})) # If the manifest needs a new path (e.g. it is now package.json), delete the old one if manifest_item.path != remote_manifest_path: del next_tree[manifest_item.path] else: their_manifest_dict = {} their_res_dict = {'media': []} our_manifest_dict = generate_manifest_dict(project, resources) our_res_dict = our_manifest_dict.get('resources', our_manifest_dict.get('pebble', our_manifest_dict).get('resources', {'media': []})) if our_res_dict != their_res_dict: logger.debug("Resources mismatch.") has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: logger.debug("Deleted resource: %s", repo_path) del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. if their_manifest_dict != our_manifest_dict: has_changed = True if remote_manifest_path in next_tree: next_tree[remote_manifest_path]._InputGitTreeElement__sha = NotSet next_tree[remote_manifest_path]._InputGitTreeElement__content = generate_manifest(project, resources) else: next_tree[remote_manifest_path] = InputGitTreeElement(path=remote_manifest_path, mode='100644', type='blob', content=generate_manifest(project, resources)) if project.project_type == 'native' and remote_wscript_path not in next_tree: next_tree[remote_wscript_path] = InputGitTreeElement(path=remote_wscript_path, mode='100644', type='blob', content=generate_wscript_file(project, True)) has_changed = True # Commit the new tree. if has_changed: logger.debug("Has changed; committing") # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] logger.debug("removing subtree node %s", x) logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()]) git_tree = repo.create_git_tree(next_tree.values()) logger.debug("Created tree %s", git_tree.sha) git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) logger.debug("Created commit %s", git_commit.sha) git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) logger.debug("Updated ref %s", git_ref.ref) project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() return True send_td_event('cloudpebble_github_push', data={ 'data': { 'repo': project.github_repo } }, user=user) return False
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) manifest_filename = manifest_name_for_project(project) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind not in ('png', 'bitmap'): continue target_dir = os.path.abspath( os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath( os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.get_default_variant().copy_to_path(abs_target) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = '' build_start_time = now() try: os.chdir(base_dir) # Install dependencies if there are any dependencies = project.get_dependencies() if dependencies: # Checking for path-based dependencies is performed by the database so in theory we shouldn't need to do # it here but we will do it anyway just to be extra safe. for version in dependencies.values(): validate_dependency_version(version) npm_command = [ settings.NPM_BINARY, "install", "--ignore-scripts" ] output = subprocess.check_output( npm_command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) subprocess.check_output([settings.NPM_BINARY, "dedupe"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) if project.sdk_version == '2': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK2_PEBBLE_WAF, "configure", "build"] elif project.sdk_version == '3': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK3_PEBBLE_WAF, "configure", "build"] else: raise Exception("invalid sdk version.") output += subprocess.check_output(command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits, env=environ) except subprocess.CalledProcessError as e: output = e.output logger.warning("Build command failed with error:\n%s\n", output) success = False except Exception as e: logger.exception("Unexpected exception during build") success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False logger.warning("Success was a lie.") finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: store_size_info(project, build_result, 'aplite', z) store_size_info(project, build_result, 'basalt', z) store_size_info(project, build_result, 'chalk', z) except Exception as e: logger.warning("Couldn't extract filesizes: %s", e) # Try pulling out debug information. if project.sdk_version == '2': save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'pebble-worker.elf')) else: save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-worker.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-worker.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_APP, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-app.elf')) save_debug_info( base_dir, build_result, BuildResult.DEBUG_WORKER, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-worker.elf')) build_result.save_pbw(temp_file) build_result.save_build_log(output or 'Failed to get output') build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_td_event(event_name, data, project=project) except Exception as e: logger.exception("Build failed due to internal error: %s", e) build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: shutil.rmtree(base_dir)
def run_compile(build_result): build_result = BuildResult.objects.get(pk=build_result) project = build_result.project source_files = SourceFile.objects.filter(project=project) resources = ResourceFile.objects.filter(project=project) # Assemble the project somewhere base_dir = tempfile.mkdtemp(dir=os.path.join(settings.CHROOT_ROOT, 'tmp') if settings.CHROOT_ROOT else None) manifest_filename = manifest_name_for_project(project) try: # Resources resource_root = 'resources' os.makedirs(os.path.join(base_dir, resource_root, 'images')) os.makedirs(os.path.join(base_dir, resource_root, 'fonts')) os.makedirs(os.path.join(base_dir, resource_root, 'data')) if project.project_type == 'native': # Source code create_source_files(project, base_dir) manifest_dict = generate_manifest_dict(project, resources) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) for f in resources: target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) f.copy_all_variants_to_dir(target_dir) # Reconstitute the SDK open(os.path.join(base_dir, 'wscript'), 'w').write(generate_wscript_file(project)) open(os.path.join(base_dir, 'pebble-jshintrc'), 'w').write(generate_jshint_file(project)) elif project.project_type == 'simplyjs': shutil.rmtree(base_dir) shutil.copytree(settings.SIMPLYJS_ROOT, base_dir) manifest_dict = generate_simplyjs_manifest_dict(project) js = '\n\n'.join(x.get_contents() for x in source_files if x.file_name.endswith('.js')) escaped_js = json.dumps(js) build_result.save_simplyjs(js) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) open(os.path.join(base_dir, 'src', 'js', 'zzz_userscript.js'), 'w').write(""" (function() { simply.mainScriptSource = %s; })(); """ % escaped_js) elif project.project_type == 'pebblejs': shutil.rmtree(base_dir) shutil.copytree(settings.PEBBLEJS_ROOT, base_dir) manifest_dict = generate_pebblejs_manifest_dict(project, resources) create_source_files(project, base_dir) for f in resources: if f.kind not in ('png', 'bitmap'): continue target_dir = os.path.abspath(os.path.join(base_dir, resource_root, ResourceFile.DIR_MAP[f.kind])) abs_target = os.path.abspath(os.path.join(target_dir, f.file_name)) if not abs_target.startswith(target_dir): raise Exception("Suspicious filename: %s" % f.file_name) f.get_default_variant().copy_to_path(abs_target) open(os.path.join(base_dir, manifest_filename), 'w').write(json.dumps(manifest_dict)) # Build the thing cwd = os.getcwd() success = False output = '' build_start_time = now() try: os.chdir(base_dir) # Install dependencies if there are any dependencies = project.get_dependencies() if dependencies: # Checking for path-based dependencies is performed by the database so in theory we shouldn't need to do # it here but we will do it anyway just to be extra safe. for version in dependencies.values(): validate_dependency_version(version) npm_command = [settings.NPM_BINARY, "install", "--ignore-scripts"] output = subprocess.check_output(npm_command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) subprocess.check_output([settings.NPM_BINARY, "dedupe"], stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits) if project.sdk_version == '2': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK2_PEBBLE_WAF, "configure", "build"] elif project.sdk_version == '3': environ = os.environ.copy() environ['PATH'] = '{}:{}'.format(settings.ARM_CS_TOOLS, environ['PATH']) command = [settings.SDK3_PEBBLE_WAF, "configure", "build"] else: raise Exception("invalid sdk version.") output += subprocess.check_output(command, stderr=subprocess.STDOUT, preexec_fn=_set_resource_limits, env=environ) except subprocess.CalledProcessError as e: output = e.output logger.warning("Build command failed with error:\n%s\n", output) success = False except Exception as e: logger.exception("Unexpected exception during build") success = False output = str(e) else: success = True temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False logger.warning("Success was a lie.") finally: build_end_time = now() os.chdir(cwd) if success: # Try reading file sizes out of it first. try: s = os.stat(temp_file) build_result.total_size = s.st_size # Now peek into the zip to see the component parts with zipfile.ZipFile(temp_file, 'r') as z: store_size_info(project, build_result, 'aplite', z) store_size_info(project, build_result, 'basalt', z) store_size_info(project, build_result, 'chalk', z) except Exception as e: logger.warning("Couldn't extract filesizes: %s", e) # Try pulling out debug information. if project.sdk_version == '2': save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'pebble-worker.elf')) else: save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'aplite', os.path.join(base_dir, 'build', 'aplite/pebble-worker.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'basalt', os.path.join(base_dir, 'build', 'basalt/pebble-worker.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-worker.elf')) build_result.save_pbw(temp_file) build_result.save_build_log(output or 'Failed to get output') build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED build_result.finished = now() build_result.save() data = { 'data': { 'cloudpebble': { 'build_id': build_result.id, 'job_run_time': (build_result.finished - build_result.started).total_seconds(), }, 'build_time': (build_end_time - build_start_time).total_seconds(), } } event_name = 'app_build_succeeded' if success else 'app_build_failed' send_td_event(event_name, data, project=project) except Exception as e: logger.exception("Build failed due to internal error: %s", e) build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: build_result.save_build_log("Something broke:\n%s" % e) except: pass build_result.save() finally: shutil.rmtree(base_dir)