def task_version(): """update version on <pkg-name>/__init__.py and doc/conf.py""" # get package version from setup.py # version must be set with a string literal using single/double quotes # but not triple-quotes. def version_str2tuple(string): parts = [] for part in string.split('.'): parts.append(part if not part.isdigit() else int(part)) return tuple(repr(x) for x in parts) def get_version(): #cmd = ("""awk 'match($0, /version[[:space:]]*=[[:space:]]*""" # r"""['\''"](.*)['\''"].*/, ary) {print ary[1]}' setup.py""") cmd = 'python setup.py --version' version_str = subprocess.check_output(cmd, shell=True, universal_newlines=True) version_str = version_str.strip() version_tuple = version_str2tuple(version_str) return { 'version': '.'.join(version_tuple[:2]), 'release': version_str, 'tuple': version_tuple, } yield { 'name': 'get_from_setup', 'file_dep': ['setup.py'], 'actions': [get_version], } sed = "sed --in-place --regexp-extended " yield { 'name': 'set_pkg', 'uptodate': [result_dep('version:get_from_setup')], 'getargs': { 'version': ('version:get_from_setup', 'tuple') }, 'actions': [sed + r"'s/(__version__ = )(.*)/\1%(version)s/' " + init_file], 'targets': [init_file] } doc_file = 'doc/conf.py' yield { 'name': 'set_doc', 'uptodate': [result_dep('version:get_from_setup')], 'getargs': { 'version': ('version:get_from_setup', 'version'), 'release': ('version:get_from_setup', 'release') }, 'actions': [ sed + r""" "s/(version = )(.*)/\1'%(version)s'/" """ + doc_file, sed + r""" "s/(release = )(.*)/\1'%(release)s'/" """ + doc_file, ] }
def task_version(): """update version on <pkg-name>/__init__.py and doc/conf.py""" # get package version from setup.py # version must be set with a string literal using single/double quotes # but not triple-quotes. def version_str2tuple(string): parts = [] for part in string.split('.'): parts.append(part if not part.isdigit() else int(part)) return tuple(repr(x) for x in parts) def get_version(): #cmd = ("""awk 'match($0, /version[[:space:]]*=[[:space:]]*""" # r"""['\''"](.*)['\''"].*/, ary) {print ary[1]}' setup.py""") cmd = 'python setup.py --version' version_str = subprocess.check_output(cmd, shell=True, universal_newlines=True) version_str = version_str.strip() version_tuple = version_str2tuple(version_str) return { 'version': '.'.join(version_tuple[:2]), 'release': version_str, 'tuple': version_tuple, } yield { 'name': 'get_from_setup', 'file_dep': ['setup.py'], 'actions': [get_version], } sed = "sed --in-place --regexp-extended " yield { 'name': 'set_pkg', 'uptodate': [result_dep('version:get_from_setup')], 'getargs': {'version': ('version:get_from_setup', 'tuple')}, 'actions': [ sed + r"'s/(__version__ = )(.*)/\1%(version)s/' " + init_file], 'targets': [init_file] } doc_file = 'doc/conf.py' yield { 'name': 'set_doc', 'uptodate': [result_dep('version:get_from_setup')], 'getargs': { 'version': ('version:get_from_setup', 'version'), 'release': ('version:get_from_setup', 'release')}, 'actions': [ sed + r""" "s/(version = )(.*)/\1'%(version)s'/" """ + doc_file, sed + r""" "s/(release = )(.*)/\1'%(release)s'/" """ + doc_file, ] }
def test_single(self, depfile): dep_manager = depfile tasks = { 't1': task.Task("t1", None, uptodate=[tools.result_dep('t2')]), 't2': task.Task("t2", None), } # _config_task was executed and t2 added as task_dep assert ['t2'] == tasks['t1'].task_dep # first t2 result tasks['t2'].result = 'yes' dep_manager.save_success(tasks['t2']) assert 'run' == dep_manager.get_status(tasks['t1'], tasks) # first time tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'up-to-date' == dep_manager.get_status(tasks['t1'], tasks) # t2 result changed tasks['t2'].result = '222' dep_manager.save_success(tasks['t2']) tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'run' == dep_manager.get_status(tasks['t1'], tasks) tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'up-to-date' == dep_manager.get_status(tasks['t1'], tasks)
def test_group(self, depfile): dep_manager = depfile tasks = {'t1': task.Task("t1", None, uptodate=[tools.result_dep('t2')]), 't2': task.Task("t2", None, task_dep=['t2:a', 't2:b'], has_subtask=True), 't2:a': task.Task("t2:a", None), 't2:b': task.Task("t2:b", None), } # _config_task was executed and t2 added as task_dep assert ['t2'] == tasks['t1'].task_dep # first t2 result tasks['t2:a'].result = 'yes1' dep_manager.save_success(tasks['t2:a']) tasks['t2:b'].result = 'yes2' dep_manager.save_success(tasks['t2:b']) assert 'run' == dep_manager.get_status(tasks['t1'], tasks) # first time tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'up-to-date' == dep_manager.get_status(tasks['t1'], tasks) # t2 result changed tasks['t2:a'].result = '222' dep_manager.save_success(tasks['t2:a']) tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'run' == dep_manager.get_status(tasks['t1'], tasks) tasks['t1'].save_extra_values() dep_manager.save_success(tasks['t1']) assert 'up-to-date' == dep_manager.get_status(tasks['t1'], tasks)
def task_dist(): """create distribution save files on `dist` folder. This folder should contain the branch `releases`. Tags for bower releases should be made from the `releases` branch. """ # get version number from bower.json yield { 'name': 'version', 'actions': [get_bower_version], 'file_dep': ['bower.json'], } # concat files for source distribution sources = ['src/hoe.js', 'src/hoe.model.js'] # hoe.app.js is unreleased yield { 'name': 'dev', 'actions': [ 'echo "// hoe.js version: %(version)s" > %(targets)s', 'cat src/hoe.js >> %(targets)s', 'cat src/hoe.model.js >> %(targets)s', ], 'file_dep': sources, 'getargs': {'version': ('dist:version', 'version')}, 'uptodate': [result_dep('dist:version')], 'targets': ['dist/hoe.js'], 'clean': True, } # create minified file for distribution uglify = 'node_modules/uglify-js/bin/uglifyjs' yield { 'name': 'min', 'actions': [ 'echo "// hoe.js version: %(version)s" > %(targets)s', (uglify + ' %(dependencies)s ' + '--mangle --compress --comments >> %(targets)s'), ], 'file_dep': ['dist/hoe.js'], 'getargs': {'version': ('dist:version', 'version')}, 'targets': ['dist/hoe.min.js'], 'clean': True, } # copy other files to be included in bower package files = ['README.md', 'LICENSE', 'CHANGES'] for fname in files: yield { 'name': 'cp-{}'.format(fname), 'actions': ['cp %(dependencies)s %(targets)s'], 'file_dep': [fname], 'targets': ['dist/{}'.format(fname)], }
def gen_tasks(self): self.site.scan_posts() kw = { "translations": self.site.config['TRANSLATIONS'], "output_folder": self.site.config['OUTPUT_FOLDER'], } posts = self.site.timeline[:] dst_path = os.path.join(kw["output_folder"], "assets", "js", "tipuesearch_content.json") def save_data(): pages = [] for lang in kw["translations"]: for post in posts: # Don't index drafts (Issue #387) if post.is_draft or post.is_retired or post.publish_later: continue text = post.text(lang, strip_html=True) text = text.replace('^', '') data = {} data["title"] = post.title(lang) data["text"] = text data["tags"] = ",".join(post.tags) data["loc"] = post.permalink(lang) pages.append(data) output = json.dumps({"pages": pages}, indent=2) makedirs(os.path.dirname(dst_path)) with codecs.open(dst_path, "wb+", "utf8") as fd: fd.write(output) yield { "basename": str(self.name), "name": dst_path, "targets": [dst_path], "actions": [(save_data, [])], 'uptodate': [config_changed(kw), result_dep('sitemap')] } # Note: The task should run everytime a new file is added or a # file is changed. We cheat, and depend on the sitemap task, # to run everytime a new file is added. # Copy all the assets to the right places asset_folder = os.path.join(os.path.dirname(__file__), "files") for task in copy_tree(asset_folder, kw["output_folder"]): task["basename"] = str(self.name) yield task
def gen_tasks(self): self.site.scan_posts() kw = { "translations": self.site.config['TRANSLATIONS'], "output_folder": self.site.config['OUTPUT_FOLDER'], } posts = self.site.timeline[:] dst_path = os.path.join(kw["output_folder"], "assets", "js", "tipuesearch_content.json") def save_data(): pages = [] for lang in kw["translations"]: for post in posts: # Don't index drafts (Issue #387) if post.is_draft or post.is_private or post.publish_later: continue text = post.text(lang, strip_html=True) text = text.replace('^', '') data = {} data["title"] = post.title(lang) data["text"] = text data["tags"] = ",".join(post.tags) data["loc"] = post.permalink(lang) pages.append(data) output = json.dumps({"pages": pages}, indent=2) makedirs(os.path.dirname(dst_path)) with codecs.open(dst_path, "wb+", "utf8") as fd: fd.write(output) yield { "basename": str(self.name), "name": dst_path, "targets": [dst_path], "actions": [(save_data, [])], 'uptodate': [config_changed(kw), result_dep('sitemap')] } # Note: The task should run everytime a new file is added or a # file is changed. We cheat, and depend on the sitemap task, # to run everytime a new file is added. # Copy all the assets to the right places asset_folder = os.path.join(os.path.dirname(__file__), "files") for task in copy_tree(asset_folder, kw["output_folder"]): task["basename"] = str(self.name) yield task
def testStatus_result_dep_bug_gh44(self, dependency1, depfile): # make sure task dict is passed when checking up-to-date task_list = [Task("t1", [""], doc="t1 doc string"), Task("t2", [""], uptodate=[result_dep('t1')]),] depfile.save_success(task_list[0]) # t1 depfile.close() output = StringIO() cmd_list = CmdFactory(List, outstream=output, dep_file=depfile.name, backend='dbm', task_list=task_list) cmd_list._execute(status=True) got = [line.strip() for line in output.getvalue().split('\n') if line] assert 'R t1' in got assert 'R t2' in got
def task_send_email(): return {'actions': ['echo "TODO: send an email"'], 'uptodate': result_dep('version')}
def task_dist(): """create distribution save files on `dist` folder. This folder should contain the branch `releases`. Tags for bower releases should be made from the `releases` branch. """ # get version number from bower.json yield { 'name': 'version', 'actions': [get_bower_version], 'file_dep': ['bower.json'], } # concat files for source distribution sources = ['src/hoe.js', 'src/hoe.model.js'] # hoe.app.js is unreleased yield { 'name': 'dev', 'actions': [ 'echo "// hoe.js version: %(version)s" > %(targets)s', 'cat src/hoe.js >> %(targets)s', 'cat src/hoe.model.js >> %(targets)s', ], 'file_dep': sources, 'getargs': { 'version': ('dist:version', 'version') }, 'uptodate': [result_dep('dist:version')], 'targets': ['dist/hoe.js'], 'clean': True, } # create minified file for distribution uglify = 'node_modules/uglify-js/bin/uglifyjs' yield { 'name': 'min', 'actions': [ 'echo "// hoe.js version: %(version)s" > %(targets)s', (uglify + ' %(dependencies)s ' + '--mangle --compress --comments >> %(targets)s'), ], 'file_dep': ['dist/hoe.js'], 'getargs': { 'version': ('dist:version', 'version') }, 'targets': ['dist/hoe.min.js'], 'clean': True, } # copy other files to be included in bower package files = ['README.md', 'LICENSE', 'CHANGES'] for fname in files: yield { 'name': 'cp-{}'.format(fname), 'actions': ['cp %(dependencies)s %(targets)s'], 'file_dep': [fname], 'targets': ['dist/{}'.format(fname)], }
def parse_dodocker_yaml(mode): parse_errors = [] try: with open('dodocker.yaml','r') as f: yaml_data = yaml.safe_load(f) except IOError: sys.exit('No dodocker.yaml found') for task_description in yaml_data: paramize = task_description.get('parameterization') if paramize: if 'shell_action' in task_description: parse_errors.append('image {}: parameterization is not available with shell_actions'.format(image)) continue if 'tags' in task_description: parse_errors.append('image {}: tags parameter is not available outside of parameterization'.format(image)) continue if not paramize: paramize = [{}] for task_item in taskitems: image = task_description['image'] name = '%s_%s' % (mode, task_description['image']) path = str(task_description.get('path','')) if not path: parse_errors.append('image {}: no path given'.format(image)) dockerfile = task_description.get('dockerfile','Dockerfile') new_task = {'basename':name, 'verbosity':0} git_url = git_checkout = git_checkout_type = None git_options = task_description.get('git_url',"").split() if git_options: git_url = git_options[0] if len(git_options) == 2: try: (git_checkout_type, git_checkout) = git_options[1].split('/') except ValueError: pass if not git_checkout_type in ('branch','tags','commit'): parse_errors.append('image {}: wrong tree format {} for url {}'.format(image,git_options[1],git_url)) else: git_checkout_type = 'branch' git_checkout = 'master' """ task dependencies """ new_task['uptodate'] = [] new_task['task_dep'] = [] if 'depends' in task_description and mode in ('build','upload'): depends_subtask_name = task_description['depends'] new_task['task_dep'].append('{}_{}'.format(mode,depends_subtask_name)) if mode == 'git': if 'git_url' in task_description: new_task['actions']=[update_git(git_url, git_checkout_type, git_checkout)] else: continue elif mode == 'build': if 'shell_action' in task_description: task_type = 'shell' else: task_type = 'dockerfile' if git_url: new_task['task_dep'].append('git_{}'.format(image)) path = "{}/{}".format(git_repos_path(git_url,git_checkout_type,git_checkout),path) if task_type == 'shell': if not path: path = '.' new_task['actions'] = [shell_build(task_description['shell_action'],image,path=path, force=dodocker_config.get('no_cache',False))] elif task_type == 'dockerfile': pull = task_description.get('pull',False) rm = task_description.get('rm',True) new_task['actions'] = [docker_build(path,tag=image,dockerfile=dockerfile,pull=pull,rm=rm)] # tagging if not 'tags' in task_description: import pdb; pdb.set_trace() if paramize_item.get('tags'): tags = paramize_item['tags'] else: tags = [] else: tags = task_description['tags'] tag = None image_no_tag = image if ':' in image: image_no_tag, tag = image.split(':') new_task['actions'].append(docker_tag( image, '%s/%s' % (dodocker_config['registry_path'],image_no_tag),tag)) repo = tag = None for t in tags: if ':' in t: repo,tag = t.strip().split(':') if not repo: repo = image_no_tag else: repo = t tag = None new_task['actions'].append(docker_tag( image,'%s/%s' % (dodocker_config['registry_path'],repo) ,tag=tag)) new_task['actions'].append(docker_tag(image,repo ,tag=tag)) # IMPORTANT: image_id has to be the last action. The output of the last action is used for result_dep. new_task['actions'].append(image_id(image)) # intra build dependencies if task_description.get('file_dep'): new_task['file_dep'] = [os.path.join(path,i) for i in task_description.get('file_dep')] elif path: new_task['file_dep'] = get_file_dep(path) if 'depends' in task_description: new_task['uptodate'].append(result_dep('%s_%s' % (mode,depends_subtask_name))) if dodocker_config.get('no_cache') and image in dodocker_config['no_cache_targets']: # the image is not up to date, when the cache is disabled by the user new_task['uptodate'].append(lambda x=None: False) else: # an image has to be available new_task['uptodate'].append(check_available(image)) # every task has to run once to build the result_dep chain for every image new_task['uptodate'].append(run_once) elif mode == 'upload': tag = None if ':' in image: image, tag = image.split(':') new_task['actions'] = [docker_push('%s/%s' % (dodocker_config['registry_path'],image), tag)] yield new_task if parse_errors: sys.exit("\n".join(parse_errors))
def task_package_worker_gen(): if not os.path.exists(BUILD_CONFIG_NAME): return build_config = yaml.load(open(BUILD_CONFIG_NAME)) deps = [] for dep_info in build_config['dependencies']: name = dep_info["name"] if name in SKIP_PACKAGES: continue # first clone the dependency prepare_task_name = "package_worker_prepare_%s" % name yield { 'name': prepare_task_name, 'file_dep': [ BUILD_CONFIG_NAME, ], 'actions': [(prepare_package_repository, [name, dep_info["gitrepo"], dep_info["gitbranch"]])], 'params': [ { 'name': 'build_folder', 'short': 'f', 'default': 'build' }, { 'name': 'wipe', 'short': 'w', 'type': bool, 'default': False }, ], 'getargs': { 'config': ('load_config', "config"), }, 'uptodate': [ False, ], 'verbosity': 2, } # for local workspace build do not export or upload the code to conan as we are currently working with local direcories if not global_config["workspace"]: # then export it into the local conan cache export_task_name = "package_worker_export_%s" % name yield { 'name': export_task_name, 'file_dep': [ BUILD_CONFIG_NAME, ], 'actions': [(export_package, [ dep_info['conanuser'], dep_info.get('conanchannel', "stable") ])], 'getargs': { 'name': ("package_worker_gen:%s" % prepare_task_name, "name"), 'package_repo_folder': ("package_worker_gen:%s" % prepare_task_name, "package_repo_folder"), 'package_commit_rev': ("package_worker_gen:%s" % prepare_task_name, "commit_rev"), }, 'uptodate': [ result_dep("package_worker_gen:%s" % prepare_task_name), ], 'verbosity': 2, } # then upload it to the conan repository upload_task_name = "package_worker_upload_%s" % name yield { 'name': upload_task_name, 'file_dep': [ BUILD_CONFIG_NAME, ], 'actions': [(upload_package, )], 'getargs': { 'name': ("package_worker_gen:%s" % export_task_name, "name"), 'version': ("package_worker_gen:%s" % export_task_name, "version"), 'user': ("package_worker_gen:%s" % export_task_name, "user"), 'channel': ("package_worker_gen:%s" % export_task_name, "channel"), 'package_commit_rev': ("package_worker_gen:%s" % prepare_task_name, "commit_rev"), 'config': ('load_config', "config"), }, 'uptodate': [ result_dep("package_worker_gen:%s" % export_task_name), ], 'verbosity': 2, } deps.append(name) if global_config["workspace"]: # prepare the meta repository yield { 'name': 'prepare_meta_repository', 'actions': [(prepare_meta_repository, )], 'params': [ { 'name': 'wipe', 'short': 'w', 'type': bool, 'default': False }, ], 'getargs': { 'meta_repo_folder': ('load_config', "meta_repo_folder"), 'config': ('load_config', "config"), }, 'uptodate': [False], 'verbosity': 2, } yield { 'name': 'package_worker_workspace_build', 'actions': [(build_workspace, [ deps, ])], 'params': [ { 'name': 'build_folder', 'short': 'f', 'default': 'build' }, ], 'getargs': { 'config': ('load_config', "config"), }, #'uptodate': [result_dep("package_worker_gen:export_meta_package")], 'uptodate': [False], 'verbosity': 2, } else: # prepare the meta repository yield { 'name': 'prepare_meta_repository', 'actions': [(prepare_meta_repository, )], 'params': [ { 'name': 'wipe', 'short': 'w', 'type': bool, 'default': False }, ], 'getargs': { 'meta_repo_folder': ('load_config', "meta_repo_folder"), 'config': ('load_config', "config"), }, 'uptodate': [ result_dep("package_worker_gen:package_worker_upload_%s" % n) for n in deps ], 'verbosity': 2, } # export the meta repository yield { 'name': 'export_meta_package', 'actions': [(export_meta_package, )], 'getargs': { 'meta_repo_folder': ('package_worker_gen:prepare_meta_repository', "meta_repo_folder"), 'meta_commit_rev': ('package_worker_gen:prepare_meta_repository', "commit_rev"), 'config': ('load_config', "config"), }, 'uptodate': [result_dep('package_worker_gen:prepare_meta_repository')], 'verbosity': 2, } # now build the release yield { 'name': 'package_worker_build', 'actions': [(build_release, [ deps, ])], 'params': [ { 'name': 'build_folder', 'short': 'f', 'default': 'build' }, ], 'getargs': { 'config': ('load_config', "config"), }, 'uptodate': [result_dep("package_worker_gen:export_meta_package")], 'verbosity': 2, } # and deploy all resulting artefacts to the repository yield { 'name': 'package_worker_deploy', 'actions': [(deploy_release, )], 'getargs': { 'packages': ('package_worker_gen:package_worker_build', "packages"), 'config': ('load_config', "config"), }, 'uptodate': [ result_dep('package_worker_gen:package_worker_build'), ], 'verbosity': 2, }
def task_send_email(): return { 'actions': ['echo "TODO: send an email"'], 'uptodate': result_dep('version') }