def compose(defs, target): '''Work through defs tree, building and assembling until target exists''' component = defs.get(target) if app.config.get('log-verbose'): app.log(target, "Composing", component['name']) # if we can't calculate cache key, we can't create this component if cache_key(defs, component) is False: return False # if this component is already cached, we're done if get_cache(defs, component): return cache_key(defs, component) # if we have a kbas, look there to see if this component exists if app.config.get('kbas-url'): with claim(defs, component): if get_remote(defs, component): app.config['counter'].increment() return cache_key(defs, component) if component.get('arch') and component['arch'] != app.config['arch']: return None with sandbox.setup(component): assemble(defs, component) if 'systems' not in component and not get_cache(defs, component): install_dependencies(defs, component) build(defs, component) return cache_key(defs, component)
def assemble(defs, target): '''Assemble dependencies and contents recursively until target exists.''' if cache.get_cache(defs, target): # needed for artifact splitting load_manifest(defs, target) return cache.cache_key(defs, target) random.seed(datetime.datetime.now()) component = defs.get(target) if component.get('arch') and component['arch'] != app.config['arch']: app.log(target, 'Skipping assembly for', component.get('arch')) return None def assemble_system_recursively(system): assemble(defs, system['path']) for subsystem in system.get('subsystems', []): assemble_system_recursively(subsystem) with app.timer(component, 'assembly'): sandbox.setup(component) systems = component.get('systems', []) random.shuffle(systems) for system in systems: assemble_system_recursively(system) dependencies = component.get('build-depends', []) random.shuffle(dependencies) for it in dependencies: dependency = defs.get(it) assemble(defs, dependency) sandbox.install(defs, component, dependency) contents = component.get('contents', []) random.shuffle(contents) for it in contents: subcomponent = defs.get(it) if subcomponent.get('build-mode') != 'bootstrap': assemble(defs, subcomponent) splits = None if component.get('kind') == 'system': splits = subcomponent.get('artifacts') sandbox.install(defs, component, subcomponent, splits) app.config['counter'] += 1 if 'systems' not in component: with app.timer(component, 'build'): build(defs, component) with app.timer(component, 'artifact creation'): do_manifest(defs, component) cache.cache(defs, component, full_root=component.get('kind') == "system") sandbox.remove(component) return cache.cache_key(defs, component)
def assemble(defs, target): '''Assemble dependencies and contents recursively until target exists.''' component = defs.get(target) if cache_key(defs, component) is False: return False if get_cache(defs, component): return cache_key(defs, component) if app.config.get('kbas-url'): with claim(defs, component): if get_remote(defs, component): app.config['counter'].increment() return cache_key(defs, component) random.seed(datetime.datetime.now()) if component.get('arch') and component['arch'] != app.config['arch']: return None sandbox.setup(component) systems = component.get('systems', []) random.shuffle(systems) for system in systems: assemble(defs, system['path']) for subsystem in system.get('subsystems', []): assemble(defs, subsystem) dependencies = component.get('build-depends', []) for it in dependencies: preinstall(defs, component, it) contents = component.get('contents', []) random.shuffle(contents) for it in contents: subcomponent = defs.get(it) if subcomponent.get('build-mode', 'staging') != 'bootstrap': preinstall(defs, component, subcomponent) if 'systems' not in component and not get_cache(defs, component): if app.config.get('instances', 1) > 1: with claim(defs, component): # in here, exceptions get eaten do_build(defs, component) else: # in here, exceptions do not get eaten do_build(defs, component) app.remove_dir(component['sandbox']) return cache_key(defs, component)
def assemble(defs, target): '''Assemble dependencies and contents recursively until target exists.''' component = defs.get(target) if get_cache(defs, component) or get_remote(defs, component): return cache_key(defs, component) random.seed(datetime.datetime.now()) if component.get('arch') and component['arch'] != app.config['arch']: app.log(target, 'Skipping assembly for', component.get('arch')) return None sandbox.setup(component) systems = component.get('systems', []) random.shuffle(systems) for system in systems: assemble(defs, system['path']) for subsystem in system.get('subsystems', []): assemble(defs, subsystem) dependencies = component.get('build-depends', []) for it in dependencies: preinstall(defs, component, it) contents = component.get('contents', []) random.shuffle(contents) for it in contents: subcomponent = defs.get(it) if subcomponent.get('build-mode', 'staging') != 'bootstrap': preinstall(defs, component, subcomponent) if 'systems' not in component: if is_building(defs, component): import time time.sleep(10) raise Exception app.config['counter'] += 1 if not get_cache(defs, component): with app.timer(component, 'build of %s' % component['cache']): with claim(defs, component): build(defs, component) with app.timer(component, 'artifact creation'): do_manifest(component) cache(defs, component) sandbox.remove(component) return cache_key(defs, component)
def assemble(defs, target): '''Assemble dependencies and contents recursively until target exists.''' if cache.get_cache(defs, target): return cache.cache_key(defs, target) component = defs.get(target) if component.get('arch') and component['arch'] != app.settings['arch']: app.log(target, 'Skipping assembly for', component.get('arch')) return None def assemble_system_recursively(system): assemble(defs, system['path']) for subsystem in system.get('subsystems', []): assemble_system_recursively(subsystem) with app.timer(component, 'Starting assembly'): sandbox.setup(component) for system_spec in component.get('systems', []): assemble_system_recursively(system_spec) dependencies = component.get('build-depends', []) random.shuffle(dependencies) for it in dependencies: dependency = defs.get(it) assemble(defs, dependency) sandbox.install(defs, component, dependency) contents = component.get('contents', []) random.shuffle(contents) for it in contents: subcomponent = defs.get(it) if subcomponent.get('build-mode') != 'bootstrap': assemble(defs, subcomponent) sandbox.install(defs, component, subcomponent) app.settings['counter'] += 1 if 'systems' not in component: build(defs, component) do_manifest(component) cache.cache(defs, component, full_root=component.get('kind') == "system") sandbox.remove(component) return cache.cache_key(defs, component)
def compose(dn): '''Work through defs tree, building and assembling until target exists''' if type(dn) is not dict: dn = app.defs.get(dn) # if we can't calculate cache key, we can't create this component if cache_key(dn) is False: if 'tried' not in dn: log(dn, 'No cache_key, so skipping compose') dn['tried'] = True return False # if dn is already cached, we're done if get_cache(dn): return cache_key(dn) log(dn, "Composing", dn['name'], verbose=True) # if we have a kbas, look there to see if this component exists if config.get('kbas-url') and not config.get('reproduce'): with claim(dn): if get_remote(dn): config['counter'].increment() return cache_key(dn) # we only work with user-specified arch if 'arch' in dn and dn['arch'] != config['arch']: return None # Create composite components (strata, systems, clusters) systems = dn.get('systems', []) shuffle(systems) for system in systems: for s in system.get('subsystems', []): subsystem = app.defs.get(s['path']) compose(subsystem) compose(system['path']) with sandbox.setup(dn): install_contents(dn) build(dn) # bring in 'build-depends', and run make return cache_key(dn)
def assemble(target): '''Assemble dependencies and contents recursively until target exists.''' if cache.get_cache(target): return cache.cache_key(target) defs = Definitions() this = defs.get(target) if this.get('arch') and this['arch'] != app.settings['arch']: app.log(target, 'Skipping assembly for', this['arch']) return None with app.timer(this, 'Starting assembly'): sandbox.setup(this) for it in this.get('systems', []): system = defs.get(it) assemble(system) for subsystem in this.get('subsystems', []): assemble(subsystem) dependencies = this.get('build-depends', []) random.shuffle(dependencies) for it in dependencies: dependency = defs.get(it) assemble(dependency) sandbox.install(this, dependency) contents = this.get('contents', []) random.shuffle(contents) for it in contents: component = defs.get(it) if component.get('build-mode') != 'bootstrap': assemble(component) sandbox.install(this, component) build(this) do_manifest(this) cache.cache(this, full_root=this.get('kind', None) == "system") sandbox.remove(this) return cache.cache_key(this)
if os.path.isdir(os.path.join(os.getcwd(), '..', 'definitions')): os.chdir(os.path.join(os.getcwd(), '..', 'definitions')) app.setup(sys.argv) app.cleanup(app.config['tmp']) with app.timer('TOTAL'): tmp_lock = open(os.path.join(app.config['tmp'], 'lock'), 'r') fcntl.flock(tmp_lock, fcntl.LOCK_SH | fcntl.LOCK_NB) target = os.path.join(app.config['defdir'], app.config['target']) app.log('TARGET', 'Target is %s' % target, app.config['arch']) with app.timer('DEFINITIONS', 'parsing %s' % app.config['def-version']): defs = Definitions() with app.timer('CACHE-KEYS', 'cache-key calculations'): cache.cache_key(defs, app.config['target']) cache.cull(app.config['artifacts']) target = defs.get(app.config['target']) if app.config['total'] == 0 or (app.config['total'] == 1 and target.get('kind') == 'cluster'): app.exit('ARCH', 'ERROR: no definitions found for', app.config['arch']) defs.save_trees() if app.config.get('mode', 'normal') == 'keys-only': with open(app.config['result-file'], 'w') as f: f.write(target['cache'] + '\n') app.log('RESULT', 'Cache-key for target is at', app.config['result-file']) os._exit(0)
if os.path.isdir(os.path.join(os.getcwd(), '..', 'definitions')): os.chdir(os.path.join(os.getcwd(), '..', 'definitions')) app.setup(sys.argv) app.cleanup(app.config['tmp']) with app.timer('TOTAL'): tmp_lock = open(os.path.join(app.config['tmp'], 'lock'), 'r') fcntl.flock(tmp_lock, fcntl.LOCK_SH | fcntl.LOCK_NB) target = os.path.join(app.config['defdir'], app.config['target']) app.log('TARGET', 'Target is %s' % target, app.config['arch']) with app.timer('DEFINITIONS', 'parsing %s' % app.config['def-version']): defs = Definitions() with app.timer('CACHE-KEYS', 'cache-key calculations'): cache.cache_key(defs, app.config['target']) cache.cull(app.config['artifacts']) target = defs.get(app.config['target']) if app.config['total'] == 0 or (app.config['total'] == 1 and target.get('kind') == 'cluster'): app.exit('ARCH', 'ERROR: no definitions found for', app.config['arch']) defs.save_trees() if app.config.get('mode', 'normal') == 'keys-only': with open('./ybd.result', 'w') as f: f.write(target['cache'] + '\n') os._exit(0) sandbox.executor = sandboxlib.executor_for_platform() app.log(app.config['target'], 'Sandbox using %s' % sandbox.executor)
def lockfile(defs, this): return os.path.join(app.config['tmp'], cache_key(defs, this) + '.lock')
def do_search_in_folder(self, folder): """List/search contents of a specific Smart Folder. Sends results to Alfred. :param folder: name or path of Smart Folder :type folder: ``unicode`` """ log.info(u'searching folder "%s" for "%s" ...', folder, self.query) files = [] folder_path = None for name, path in self.folders: if path == folder: folder_path = path break elif name == folder: folder_path = path break else: return self._terminate_with_error( u"Unknown folder '{}'".format(folder), 'Check your configuration with `smartfolders`') # Get contents of folder; update if necessary key = cache_key(folder_path) files = self.wf.cached_data(key, max_age=0) if files is None: files = [] if not self.wf.cached_data_fresh(key, CACHE_AGE_CONTENTS): run_in_background(key, ['/usr/bin/python', self.wf.workflowfile('cache.py'), '--folder', folder_path]) if is_running(key): self.wf.rerun = 0.5 if self.query: files = self.wf.filter(self.query, files, key=os.path.basename, min_score=10) if not files: if not self.query: self._add_message('Empty Smart Folder', icon=ICON_WARNING) else: self._add_message('No matching results', 'Try a different query', icon=ICON_WARNING) else: for i, path in enumerate(files): title = os.path.basename(path) subtitle = path.replace(os.getenv('HOME'), '~') self.wf.add_item(title, subtitle, uid=path, arg=path, valid=True, icon=path, icontype='fileicon', type='file') if (i + 1) == MAX_RESULTS: break self.wf.send_feedback()
with timer('TOTAL'): tmp_lock = open(os.path.join(config['tmp'], 'lock'), 'r') fcntl.flock(tmp_lock, fcntl.LOCK_SH | fcntl.LOCK_NB) target = os.path.join(config['defdir'], config['target']) log('TARGET', 'Target is %s' % target, config['arch']) with timer('DEFINITIONS', 'parsing %s' % config['def-version']): app.defs = Pots() target = app.defs.get(config['target']) if config.get('mode', 'normal') == 'parse-only': Pipeline(target) os._exit(0) with timer('CACHE-KEYS', 'cache-key calculations'): cache.cache_key(target) if 'release-note' in config: do_release_note(config['release-note']) if config['total'] == 0 or (config['total'] == 1 and target.get('kind') == 'cluster'): log('ARCH', 'No definitions for', config['arch'], exit=True) app.defs.save_trees() if config.get('mode', 'normal') == 'keys-only': write_cache_key() os._exit(0) cache.cull(config['artifacts'])
def lockfile(dn): return os.path.join(config['tmp'], cache_key(dn) + '.lock')
def do_search_in_folder(self, folder): """List/search contents of a specific Smart Folder. Sends results to Alfred. :param folder: name or path of Smart Folder :type folder: ``unicode`` """ log.info(u'searching folder "%s" for "%s" ...', folder, self.query) files = [] folder_path = None for name, path in self.folders: if path == folder: folder_path = path break elif name == folder: folder_path = path break else: return self._terminate_with_error( u"Unknown folder '{}'".format(folder), 'Check your configuration with `smartfolders`') # Get contents of folder; update if necessary key = cache_key(folder_path) files = self.wf.cached_data(key, max_age=0) if files is None: files = [] if not self.wf.cached_data_fresh(key, CACHE_AGE_CONTENTS): run_in_background(key, [ '/usr/bin/python', self.wf.workflowfile('cache.py'), '--folder', folder_path ]) if is_running(key): self.wf.rerun = 0.5 if self.query: files = self.wf.filter(self.query, files, key=os.path.basename, min_score=10) if not files: if not self.query: self._add_message('Empty Smart Folder', icon=ICON_WARNING) else: self._add_message('No matching results', 'Try a different query', icon=ICON_WARNING) else: for i, path in enumerate(files): title = os.path.basename(path) subtitle = path.replace(os.getenv('HOME'), '~') self.wf.add_item(title, subtitle, uid=path, arg=path, valid=True, icon=path, icontype='fileicon', type='file') if (i + 1) == MAX_RESULTS: break self.wf.send_feedback()