def build_xsl_reports( locate_root_dir , tag , expected_results_file , failures_markup_file , comment_file , results_dir , result_file_prefix , dont_collect_logs = 0 , reports = report_types , warnings = [] , user = None , upload = False ): ( run_date ) = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.gmtime() ) root_paths.append( locate_root_dir ) root_paths.append( results_dir ) bin_boost_dir = os.path.join( locate_root_dir, 'bin', 'boost' ) output_dir = os.path.join( results_dir, result_file_prefix ) utils.makedirs( output_dir ) if expected_results_file != '': expected_results_file = os.path.abspath( expected_results_file ) else: expected_results_file = os.path.abspath( map_path( 'empty_expected_results.xml' ) ) extended_test_results = os.path.join( output_dir, 'extended_test_results.xml' ) execute_tasks( tag , user , run_date , comment_file , results_dir , output_dir , reports , warnings , extended_test_results , dont_collect_logs , expected_results_file , failures_markup_file ) if upload: upload_dir = 'regression-logs/' utils.log( 'Uploading results into "%s" [connecting as %s]...' % ( upload_dir, user ) ) archive_name = '%s.tar.gz' % result_file_prefix utils.tar( os.path.join( results_dir, result_file_prefix ) , archive_name ) utils.sourceforge.upload( os.path.join( results_dir, archive_name ), upload_dir, user ) utils.sourceforge.untar( os.path.join( upload_dir, archive_name ), user, background = True )
def build_xsl_reports( locate_root_dir , tag , expected_results_file , failures_markup_file , comment_file , results_dir , result_file_prefix , dont_collect_logs=0 , reports=report_types , warnings=[] , user=None , upload=False ): (run_date) = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) root_paths.append(locate_root_dir) root_paths.append(results_dir) bin_boost_dir = os.path.join(locate_root_dir, 'bin', 'boost') output_dir = os.path.join(results_dir, result_file_prefix) utils.makedirs(output_dir) if expected_results_file != '': expected_results_file = os.path.abspath(expected_results_file) else: expected_results_file = os.path.abspath(map_path('empty_expected_results.xml')) extended_test_results = os.path.join(output_dir, 'extended_test_results.xml') execute_tasks( tag , user , run_date , comment_file , results_dir , output_dir , reports , warnings , extended_test_results , dont_collect_logs , expected_results_file , failures_markup_file ) if upload: upload_dir = 'regression-logs/' utils.log('Uploading results into "%s" [connecting as %s]...' % (upload_dir, user)) archive_name = '%s.tar.gz' % result_file_prefix utils.tar( os.path.join(results_dir, result_file_prefix) , archive_name ) utils.sourceforge.upload(os.path.join(results_dir, archive_name), upload_dir, user) utils.sourceforge.untar(os.path.join(upload_dir, archive_name), user, background=True)
def build_xsl_reports(locate_root_dir, tag, expected_results_file, failures_markup_file, comment_file, results_dir, result_file_prefix, dont_collect_logs=0, reports=report_types, v2=0, user=None, upload=False): (run_date) = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) test_results_file = os.path.join(results_dir, 'test_results.xml') bin_boost_dir = os.path.join(locate_root_dir, 'bin', 'boost') if v2: import merger merger.merge_logs(tag, user, results_dir, test_results_file, dont_collect_logs) else: utils.log(' dont_collect_logs: %s' % dont_collect_logs) if not dont_collect_logs: f = open(test_results_file, 'w+') f.write('<tests>\n') runner.collect_test_logs([bin_boost_dir], f) f.write('</tests>\n') f.close() make_result_pages(test_results_file, expected_results_file, failures_markup_file, tag, run_date, comment_file, results_dir, result_file_prefix, reports, v2) if v2 and upload: upload_dir = 'regression-logs/' utils.log('Uploading v2 results into "%s" [connecting as %s]...' % (upload_dir, user)) archive_name = '%s.tar.gz' % result_file_prefix utils.tar(os.path.join(results_dir, result_file_prefix), archive_name) utils.sourceforge.upload(os.path.join(results_dir, archive_name), upload_dir, user) utils.sourceforge.untar(os.path.join(upload_dir, archive_name), user, background=True)
def build(self, path=None, tag=None, quiet=False, fileobj=None, nocache=False, rm=False): remote = context = headers = None if path is None and fileobj is None: raise Exception("Either path or fileobj needs to be provided.") if fileobj is not None: context = utils.mkbuildcontext(fileobj) elif (path.startswith('http://') or path.startswith('https://') or path.startswith('git://') or path.startswith('github.com/')): remote = path else: context = utils.tar(path) u = self._url('/build') params = { 't': tag, 'remote': remote, 'q': quiet, 'nocache': nocache, 'rm': rm } if context is not None: headers = { 'Content-Type': 'application/tar' } res = self._result(self.post(u, context, params=params, headers=headers, stream=True)) if context is not None: context.close() srch = r'Successfully built ([0-9a-f]+)' match = re.search(srch, res) if not match: return None, res return match.group(1), res
def build(self, path=None, tag=None, quiet=False, fileobj=None, nocache=False): remote = context = headers = None if path is None and fileobj is None: raise Exception("Either path or fileobj needs to be provided.") if fileobj is not None: context = utils.mkbuildcontext(fileobj) elif (path.startswith('http://') or path.startswith('https://') or path.startswith('git://') or path.startswith('github.com/')): remote = path else: context = utils.tar(path) u = self._url('/build') params = { 't': tag, 'remote': remote, 'q': quiet, 'nocache': nocache } if context is not None: headers = { 'Content-Type': 'application/tar' } res = self._result(self.post(u, context, params=params, headers=headers, stream=True)) if context is not None: context.close() srch = r'Successfully built ([0-9a-f]+)' match = re.search(srch, res) if not match: return None, res return match.group(1), res
def create(config=None, config_file=None, force=False, dryrun=False, no_validate=False, verbose=True): """Creates an agent package (tar.gz) This will try to identify the distribution of the host you're running on. If it can't identify it for some reason, you'll have to supply a `distribution` (e.g. Ubuntu) config object in the config.yaml. The same goes for the `release` (e.g. Trusty). A virtualenv will be created under cloudify/env. The order of the modules' installation is as follows: cloudify-rest-service cloudify-plugins-common cloudify-script-plugin cloudify-diamond-plugin cloudify-agent any additional modules specified under `additional_modules` in the yaml. any additional plugins specified under `additional_plugins` in the yaml. Once all modules are installed, excluded modules will be uninstalled; installation validation will occur; a tar.gz file will be created. The `output_tar` config object can be specified to determine the path to the output file. If omitted, a default path will be given with the format `DISTRIBUTION-RELEASE-agent.tar.gz`. """ set_global_verbosity_level(verbose) # this will be updated with installed plugins and modules and used # to validate the installation final_set = {'modules': [], 'plugins': []} if not config: config = _import_config(config_file) if config_file else \ _import_config() config = {} if not config else config name_params = {} try: (distro, release) = get_os_props() name_params['distro'] = config.get('distribution', distro) name_params['release'] = config.get('release', release) name_params['version'] = config.get( 'version', os.environ.get('VERSION', None)) name_params['milestone'] = config.get( 'milestone', os.environ.get('PRERELEASE', None)) name_params['build'] = config.get( 'build', os.environ.get('BUILD', None)) except Exception as ex: lgr.error( 'Distribution not found in configuration ' 'and could not be retrieved automatically. ' 'please specify the distribution in the yaml. ' '({0})'.format(ex.message)) sys.exit(codes.errors['could_not_identify_distribution']) python = config.get('python_path', '/usr/bin/python') venv = DEFAULT_VENV_PATH venv_already_exists = utils.is_virtualenv(venv) destination_tar = config.get('output_tar', _name_archive(**name_params)) lgr.debug('Distibution is: {0}'.format(name_params['distro'])) lgr.debug('Distribution release is: {0}'.format(name_params['release'])) lgr.debug('Python path is: {0}'.format(python)) lgr.debug('Destination tarfile is: {0}'.format(destination_tar)) if not dryrun: _make_venv(venv, python, force) _handle_output_file(destination_tar, force) modules = _set_defaults() modules = _merge_modules(modules, config) if dryrun: set_global_verbosity_level(True) lgr.debug('Modules and plugins to install: {0}'.format(json.dumps( modules, sort_keys=True, indent=4, separators=(',', ': ')))) if dryrun: lgr.info('Dryrun complete') sys.exit(codes.notifications['dryrun_complete']) final_set = _install(modules, venv, final_set) _uninstall_excluded(modules, venv) if not no_validate: _validate(final_set, venv) utils.tar(venv, destination_tar) lgr.info('The following modules and plugins were installed ' 'in the agent:\n{0}'.format(utils.get_installed(venv))) # if keep_virtualenv is explicitly specified to be false, the virtualenv # will not be deleted. # if keep_virtualenv is not in the config but the virtualenv already # existed, it will not be deleted. if ('keep_virtualenv' in config and not config['keep_virtualenv']) \ or ('keep_virtualenv' not in config and not venv_already_exists): lgr.info('Removing origin virtualenv...') shutil.rmtree(venv) # duh! lgr.info('Process complete!')
def build_xsl_reports( locate_root_dir , tag , expected_results_file , failures_markup_file , comment_file , results_dir , result_file_prefix , dont_collect_logs = 0 , reports = report_types , v2 = 0 , user = None , upload = False ): ( run_date ) = time.strftime('%a, %d %b %Y %H:%M:%S +0000', time.gmtime() ) test_results_file = os.path.join( results_dir, 'test_results.xml' ) bin_boost_dir = os.path.join( locate_root_dir, 'bin', 'boost' ) if v2: import merger merger.merge_logs( tag , user , results_dir , test_results_file , dont_collect_logs ) else: utils.log( ' dont_collect_logs: %s' % dont_collect_logs ) if not dont_collect_logs: f = open( test_results_file, 'w+' ) f.write( '<tests>\n' ) runner.collect_test_logs( [ bin_boost_dir ], f ) f.write( '</tests>\n' ) f.close() make_result_pages( test_results_file , expected_results_file , failures_markup_file , tag , run_date , comment_file , results_dir , result_file_prefix , reports , v2 ) if v2 and upload: upload_dir = 'regression-logs/' utils.log( 'Uploading v2 results into "%s" [connecting as %s]...' % ( upload_dir, user ) ) archive_name = '%s.tar.gz' % result_file_prefix utils.tar( os.path.join( results_dir, result_file_prefix ) , archive_name ) utils.sourceforge.upload( os.path.join( results_dir, archive_name ), upload_dir, user ) utils.sourceforge.untar( os.path.join( upload_dir, archive_name ), user, background = True )
def create(config=None, config_file=None, force=False, verbose=True): """Creates an agent package (tar.gz) This will try to identify the distribution of the host you're running on. If it can't identify it for some reason, you'll have to supply a `distribution` config object in the config.yaml. A virtualenv will be created under `/DISTRIBUTION-agent/env` unless configured in the yaml under the `venv` property. The order of the modules' installation is as follows: cloudify-rest-service cloudify-plugins-common cloudify-script-plugin cloudify-diamond-plugin agent and plugin installers from cloudify-manager any additional modules specified under `additional_modules` in the yaml. Once all modules are installed, a tar.gz file will be created. The `output_tar` config object can be specified to determine the path to the output file. If omitted, a default path will be given with the format `/DISTRIBUTION-agent.tar.gz`. """ _set_global_verbosity_level(verbose) if not config: config = _import_config(config_file) if config_file else \ _import_config() config = {} if not config else config try: distro = config.get('distribution', platform.dist()[0]) release = config.get('release', platform.dist()[2]) except Exception as ex: lgr.error('distribution not found in configuration ' 'and could not be retrieved automatically. ' 'please specify the distribution in the yaml. ' '({0})'.format(ex.message)) sys.exit(1) python = config.get('python_path', '/usr/bin/python') venv = config.get('venv', DEFAULT_VENV_PATH.format(distro, release)) keep_venv = config.get('keep_venv', False) destination_tar = config.get('output_tar', DEFAULT_OUTPUT_TAR_PATH.format( distro, release)) lgr.debug('distibution is: {0}'.format(distro)) lgr.debug('distribution release is: {0}'.format(release)) lgr.debug('python path is: {0}'.format(python)) lgr.debug('venv is: {0}'.format(venv)) lgr.debug('destination tarfile is: {0}'.format(destination_tar)) # virtualenv if os.path.isdir(venv): if force: lgr.info('removing previous venv...') shutil.rmtree(venv) else: lgr.error('virtualenv already exists at {0}. ' 'You can use the -f flag or delete the ' 'previous env.'.format(venv)) sys.exit(2) lgr.info('creating virtual environment: {0}'.format(venv)) utils.make_virtualenv(venv, python) # output file if os.path.isfile(destination_tar) and force: lgr.info('removing previous agent package...') os.remove(destination_tar) if os.path.exists(destination_tar): lgr.error('destination tar already exists: {0}'.format( destination_tar)) sys.exit(9) # create modules dictionary lgr.debug('retrieving modules to install...') modules = {} modules['base'] = BASE_MODULES modules['management'] = MANAGEMENT_MODULES modules['additional'] = [] modules = _merge_modules(modules, config) lgr.debug('modules to install: {0}'.format(json.dumps( modules, sort_keys=True, indent=4, separators=(',', ': ')))) # install external lgr.info('installing external modules...') for ext_module in EXTERNAL_MODULES: utils.install_module(ext_module, venv) # install base lgr.info('installing base modules...') base = modules['base'] if base.get('rest_client'): utils.install_module(base['rest_client'], venv) if base.get('plugins_common'): utils.install_module(base['plugins_common'], venv) if base.get('script_plugin'): utils.install_module(base['script_plugin'], venv) if base.get('diamond_plugin'): utils.install_module(base['diamond_plugin'], venv) # install management lgr.debug('retrieiving management modules code...') version = config.get('management_modules_version', 'master') manager_tmp_dir = _get_manager(MANAGER_REPO_URL.format(version), venv) lgr.info('installing management modules...') for mgmt_module in modules['management'].values(): if os.path.isdir(os.path.join(manager_tmp_dir, mgmt_module)): utils.install_module(os.path.join( manager_tmp_dir, mgmt_module), venv) else: if mgmt_module: utils.install_module(mgmt_module, venv) # install additional lgr.info('installing additional plugins...') for module in modules['additional']: utils.install_module(module, venv) # create agent tar lgr.info('creating tar file: {0}'.format(destination_tar)) utils.tar(venv, destination_tar) if not keep_venv: lgr.info('removing origin venv') shutil.rmtree(venv)
(mnist.train.num_examples, total_batch, batch_size)) start_time = datetime.datetime.now() for epoch in range(total_epoch): total_cost = 0 for i in range(total_batch): batch_xs, batch_ys = mnist.train.next_batch(batch_size) batch_xs = batch_xs.reshape(-1, 28, 28, 1) _, cost_val = sess.run([optimizer, loss], feed_dict={ X: batch_xs, Y: batch_ys }) total_cost += cost_val print("total cost : %s" % total_cost) print("--- Training time : {0} seconds /w {1} GPUs ---".format( datetime.datetime.now() - start_time, gpu_num)) print("Export Serving Model!") export_base_dir = create_export_dir(args.export_dir, args.model_name) export_dir = concat_timestamp(export_base_dir) tf.saved_model.simple_save(sess, export_dir, inputs={"image": X}, outputs={'classes': Y}) tar(export_base_dir, export_dir, args.model_name)
def create(config=None, config_file=None, force=False, dryrun=False, no_validate=False, verbose=True): """Creates an agent package (tar.gz) This will try to identify the distribution of the host you're running on. If it can't identify it for some reason, you'll have to supply a `distribution` (e.g. Ubuntu) config object in the config.yaml. The same goes for the `release` (e.g. Trusty). A virtualenv will be created under cloudify/env. The order of the modules' installation is as follows: cloudify-rest-service cloudify-plugins-common cloudify-script-plugin cloudify-diamond-plugin cloudify-agent any additional modules specified under `additional_modules` in the yaml. any additional plugins specified under `additional_plugins` in the yaml. Once all modules are installed, excluded modules will be uninstalled; installation validation will occur; a tar.gz file will be created. The `output_tar` config object can be specified to determine the path to the output file. If omitted, a default path will be given with the format `DISTRIBUTION-RELEASE-agent.tar.gz`. """ set_global_verbosity_level(verbose) # this will be updated with installed plugins and modules and used # to validate the installation final_set = {'modules': [], 'plugins': []} if not config: config = _import_config(config_file) if config_file else \ _import_config() config = {} if not config else config name_params = {} try: (distro, release) = get_os_props() name_params['distro'] = config.get('distribution', distro) name_params['release'] = config.get('release', release) name_params['version'] = config.get('version', os.environ.get('VERSION', None)) name_params['milestone'] = config.get( 'milestone', os.environ.get('PRERELEASE', None)) name_params['build'] = config.get('build', os.environ.get('BUILD', None)) except Exception as ex: lgr.error('Distribution not found in configuration ' 'and could not be retrieved automatically. ' 'please specify the distribution in the yaml. ' '({0})'.format(ex.message)) sys.exit(codes.errors['could_not_identify_distribution']) python = config.get('python_path', '/usr/bin/python') venv = DEFAULT_VENV_PATH venv_already_exists = utils.is_virtualenv(venv) destination_tar = config.get('output_tar', _name_archive(**name_params)) lgr.debug('Distibution is: {0}'.format(name_params['distro'])) lgr.debug('Distribution release is: {0}'.format(name_params['release'])) lgr.debug('Python path is: {0}'.format(python)) lgr.debug('Destination tarfile is: {0}'.format(destination_tar)) if not dryrun: _make_venv(venv, python, force) _handle_output_file(destination_tar, force) modules = _set_defaults() modules = _merge_modules(modules, config) if dryrun: set_global_verbosity_level(True) lgr.debug('Modules and plugins to install: {0}'.format( json.dumps(modules, sort_keys=True, indent=4, separators=(',', ': ')))) if dryrun: lgr.info('Dryrun complete') sys.exit(codes.notifications['dryrun_complete']) final_set = _install(modules, venv, final_set) _uninstall_excluded(modules, venv) if not no_validate: _validate(final_set, venv) utils.tar(venv, destination_tar) lgr.info('The following modules and plugins were installed ' 'in the agent:\n{0}'.format(utils.get_installed(venv))) # if keep_virtualenv is explicitly specified to be false, the virtualenv # will not be deleted. # if keep_virtualenv is not in the config but the virtualenv already # existed, it will not be deleted. if ('keep_virtualenv' in config and not config['keep_virtualenv']) \ or ('keep_virtualenv' not in config and not venv_already_exists): lgr.info('Removing origin virtualenv...') shutil.rmtree(venv) # duh! lgr.info('Process complete!')