def save_log(self, project, data): logpath = os.path.join(self.config.logdir, project.path.replace(self.config.active_path, '').strip('\\/'), project.id + '.log') if not os.path.isdir(os.path.dirname(logpath)): shutil.mkdir(os.path.dirname(logpath)) with open(logpath, 'wb') as f: f.write(data)
def save_log(self, project, data): logpath = os.path.join( self.config.logdir, project.path.replace(self.config.active_path, '').strip('\\/'), project.id + '.log') if not os.path.isdir(os.path.dirname(logpath)): shutil.mkdir(os.path.dirname(logpath)) with open(logpath, 'wb') as f: f.write(data)
def _ensure_exists(filename, encoding='utf-8'): """Ensures that the configuration file exists and that it produces a correct empty configuration. """ filename = os.path.abspath(filename) if not os.path.isfile(filename): dirname = os.path.dirname(filename) if not os.path.isdir(dirname): shutil.mkdir(dirname) with io.open(filename, 'w', encoding=encoding) as f: fmt = _get_format(filename) if fmt in (Config.JSON, Config.YAML): f.write('{}') f.write('\n')
def handle(self, platform, configuration, build_number, no_deps, delete, *args, **kwargs): repos = [] # Build dependency graph, or not if no_deps: for repo in self.config.repositories: b = utils.load_build(repo) repos.append(b) else: all_repos = {} for repo in self.config.all_repositories: # Remove explicitly skipped skipped = self.config.get('options.skipped_repositories') or [] if repo.name not in skipped: b = utils.load_build(repo) all_repos[b.name] = b dg = DependencyGraph(all_repos.values()) for repo in dg.get_in_order( map(lambda x: all_repos[x.name].id, self.config.repositories), None): repos.append(repo) config = utils.BuildConfig(self.config, platform, configuration, build_number) # if --delete if delete and os.path.exists(config.output_path): if not shutil.remove(config.output_path): self.ui.error('Abort: "%s" could not be deleted' % config.output_path) return # create output path if not exist if not os.path.exists(config.output_path): shutil.mkdir(config.output_path) # Build for repo in repos: self.ui.info(str(repo)) for project in repo.projects: status, output, error = project.build(config) self.save_log(project, output + '\n\n\n' + error) self.ui.report(' %s' % project, status, { 'output': output, 'error': error })
def get_key(): from tea.system import get_appdata from tea.shutil import mkdir dir_path = os.path.join(get_appdata(), 'Tea') key_path = os.path.join(dir_path, 'key.bin') if os.path.exists(dir_path) and os.path.exists(key_path): with open(key_path, 'rb') as f: cr_key = Array[Byte](map(ord, f.read())) key = ProtectedData.Unprotect(cr_key, None, DataProtectionScope.CurrentUser) return [int(k, 10) for k in key] else: mkdir(dir_path) key = _generate_key() arr_key = Array[Byte](key) cr_key = ProtectedData.Protect(arr_key, None, DataProtectionScope.CurrentUser) with open(key_path, 'wb') as f: f.write(cr_key) return key
def setup(module, target='zip', output_path=None, data_dir=None): dist = os.path.abspath('dist') try: if target == 'zip': assert er('setup.py', 'install', '--no-compile', '--install-lib', os.path.join(dist, 'lib'), '--install-scripts', os.path.join(dist), *(['--install-data', os.path.join(dist, data_dir)] if data_dir is not None else [])) with shutil.goto(dist) as ok: assert ok assert compress.mkzip('%s.zip' % module, glob.glob(os.path.join('lib', '*'))) assert shutil.remove('lib') elif target == 'exe': assert er('setup.py', 'install', '--no-compile', '--install-lib', os.path.join(dist, 'lib', 'python'), '--install-scripts', os.path.join(dist, 'scripts'), *(['--install-data', os.path.join(dist, data_dir)] if data_dir is not None else [])) with shutil.goto(dist) as ok: assert ok modules = list(filter(os.path.exists, ['lib', 'scripts'] + ( [data_dir] if data_dir is not None else []))) assert compress.seven_zip('%s.exe' % module, modules, self_extracting=True) # Cleanup for module in modules: assert shutil.remove(module) if output_path is not None: output_path = os.path.abspath(output_path) if output_path != dist: if not os.path.isdir(output_path): assert shutil.mkdir(output_path) for filename in shutil.search(dist, '*'): output = os.path.join(output_path, filename.replace(dist, '', 1) .strip('\\/')) assert shutil.move(filename, output) return 0 except AssertionError as e: print(e) return 1 finally: # Cleanup if output_path != dist: shutil.remove(dist) if os.path.isdir('build'): shutil.remove('build')
def handle(self, platform, configuration, build_number, no_deps, delete, *args, **kwargs): repos = [] # Build dependency graph, or not if no_deps: for repo in self.config.repositories: b = utils.load_build(repo) repos.append(b) else: all_repos = {} for repo in self.config.all_repositories: # Remove explicitly skipped skipped = self.config.get('options.skipped_repositories') or [] if repo.name not in skipped: b = utils.load_build(repo) all_repos[b.name] = b dg = DependencyGraph(all_repos.values()) for repo in dg.get_in_order(map(lambda x: all_repos[x.name].id, self.config.repositories), None): repos.append(repo) config = utils.BuildConfig(self.config, platform, configuration, build_number) # if --delete if delete and os.path.exists(config.output_path): if not shutil.remove(config.output_path): self.ui.error('Abort: "%s" could not be deleted' % config.output_path) return # create output path if not exist if not os.path.exists(config.output_path): shutil.mkdir(config.output_path) # Build for repo in repos: self.ui.info(str(repo)) for project in repo.projects: status, output, error = project.build(config) self.save_log(project, output + '\n\n\n' + error) self.ui.report(' %s' % project, status, {'output': output, 'error': error})
def _save_repos(self): if not os.path.isdir(self.paths['config']): shutil.mkdir(self.paths['config']) with io.open(os.path.join(self.paths['config'], 'repos.json'), 'w+b') as f: json.dump(self.repos, f, indent=2, encoding='utf-8')