def commit_starter_project(self): pushdir(self.details.starter_project_dir) run([ "git", "commit", "-m", F"'Update to Approvals {self.details.new_version}'" ]) popdir()
def install_packages(): """Get required Ubuntu/Debian packages. It is OK if they are already installed """ for package in ['apache2', 'subversion', 'trac', 'libapache2-svn', 'libapache2-mod-python', 'libapache2-mod-python-doc']: if package == 'trac': # Hack to get TRAC 0.11.4 on Ubuntu <9.04 # See http://serverfault.com/questions/11851/attachments-not-showing-up-in-trac s = 'apt-get remove trac' run(s) s = 'easy_install http://svn.edgewall.org/repos/trac/tags/trac-0.11.4' run(s) continue s = 'apt-get -y install %s > %s_install.log' % (package, package) err = run(s) if err != 0: msg = 'Installation of package %s failed. ' % package msg += 'See log file %s_install.log for details' % package raise Exception(msg)
def install_ubuntu_packages(): """Get required Ubuntu packages for riab_server. It is OK if they are already installed """ header('Installing Ubuntu packages') s = 'apt-get clean' run(s, verbose=True) for package in ['python-setuptools']: s = 'apt-get -y install %s' % package log_base = '%s_install' % package try: run(s, stdout=log_base + '.out', stderr=log_base + '.err', verbose=True) except: msg = 'Installation of package %s failed. ' % package msg += 'See log file %s.out and %s.err for details' % (log_base, log_base) raise Exception(msg)
def delete_style(self, style_name, verbose=False): """docstring for delete_style""" # TODO: add test run('curl -u %s:%s -d "purge=true" -X DELETE localhost:8080/geoserver/rest/styles/%s' % (self.geoserver_username, self.geoserver_userpass, style_name))
def update_starter_project(self): STARTER_PATH_OLD_SINGLE_HEADER = F"{self.details.starter_project_dir}/lib/{self.details.old_single_header}" STARTER_PATH_NEW_SINGLE_HEADER = F"{self.details.starter_project_dir}/lib/{self.details.new_single_header}" # Make sure starter project folder is clean pushdir(self.details.starter_project_dir) # Delete untracked files: # - does not delete ignored files # - does not delete untracked files in new, untracked directories run(["git", "clean", "-f"]) run(["git", "reset", "--hard"]) popdir() shutil.copyfile(self.details.release_new_single_header, STARTER_PATH_NEW_SINGLE_HEADER) # Delete the last release: if os.path.exists(STARTER_PATH_OLD_SINGLE_HEADER): os.remove(STARTER_PATH_OLD_SINGLE_HEADER) # Update the version in the "redirect" header: replace_text_in_file( F"{self.details.starter_project_dir}/lib/ApprovalTests.hpp", self.details.old_version, self.details.new_version) # Update the version number in the Visual Studio project: replace_text_in_file( F"{self.details.starter_project_dir}/visual-studio-2017/StarterProject.vcxproj", self.details.old_single_header, self.details.new_single_header)
def run(self, filename, filter_set, asset_path, output_path, interactive=False, verbose=False): arguments = [ "-p", asset_path + "\\", "-o", output_path + "\\", "-s", filter_set] if interactive: arguments.append("-i") arguments.append(filename) command = [self.havok_filter_manager] + arguments if verbose: utilities.print_line() print("Starting Havok Filter Manager...") utilities.print_line(True) print(' '.join(command)) # It's important that the current directory is set to the output path # or it won't output to the expected directory utilities.run(command, False, output_path) return
def run_nc2prof(windfield_directory, verbose=True): """Run nc2prof - extract wind profiles from NCEP data Requires - input file - NCEP wind files TMP.nc HGT.nc UGRD.nc VGRD.nc """ # FIXME: Perhaps include into AIM class (somehow) Fall3d_dir = get_fall3d_home() utilities_dir = os.path.join(Fall3d_dir, 'Utilities') executable = os.path.join(utilities_dir, 'nc2prof', 'nc2prof.exe') if verbose: header('Running nc2prof in %s' % windfield_directory) cmd = 'cd %s; %s ' % (windfield_directory, executable) logfile = 'run_nc2prof.log' run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
def set_granum(self, verbose=True): """Create grainsize profile Requires - input file """ grainfilename = self.scenario_name + '.grn' if grainfilename in os.listdir('.'): print 'Grainfile found - will not run SetGrn' s = 'cp %s %s' % (grainfilename, self.grainfile) run(s) return executable = os.path.join(self.utilities_dir, 'SetGrn', 'SetGrn.PUB.exe') logfile = self.logbasepath + '.SetGrn.log' if verbose: header('Setting grain size (SetGrn)') cmd = '%s %s %s %s' % (executable, logfile, self.inputfile, self.grainfile) self.runscript(cmd, 'SetGrn', logfile, lines=4, verbose=verbose)
def set_database(self, verbose=True): """Create meteorological database Requires - input file - topography - wind profile """ dbsfilename = self.scenario_name + '.dbs.nc' if dbsfilename in os.listdir('.'): print 'DBS file found - will not run SetDbs' s = 'cp %s %s' % (dbsfilename, self.databasefile) run(s) return executable = os.path.join(self.utilities_dir, 'SetDbs', 'SetDbs.PUB.exe') logfile = self.logbasepath + '.SetDbs.log' if verbose: header('Building meteorological database (SetDbs)') cmd = '%s '*7 % (executable, logfile, self.inputfile, self.wind_profile, self.databasefile, self.topography, self.meteorological_model) self.runscript(cmd, 'SetDbs', logfile, lines=5, verbose=verbose)
def set_database(self, verbose=True): """Create meteorological database Requires - input file - topography - wind profile """ dbsfilename = self.scenario_name + '.dbs.nc' if dbsfilename in os.listdir('.'): print 'DBS file found - will not run SetDbs' s = 'cp %s %s' % (dbsfilename, self.databasefile) run(s) return executable = os.path.join(self.utilities_dir, 'SetDbs', 'SetDbs.PUB.exe') logfile = self.logbasepath + '.SetDbs.log' if verbose: header('Building meteorological database (SetDbs)') cmd = '%s ' * 7 % (executable, logfile, self.inputfile, self.wind_profile, self.databasefile, self.topography, self.meteorological_model) self.runscript(cmd, 'SetDbs', logfile, lines=5, verbose=verbose)
def install_ubuntu_packages(): """Get required Ubuntu packages for geoserver. It is OK if they are already installed """ header('Installing Ubuntu packages') s = 'apt-get clean' run(s, verbose=True) #s = 'apt-get update' #run(s, verbose=True) for package in ['apache2', 'libxml2', 'libxml2-dev', 'libxslt1-dev', 'postgresql', 'postgis']: s = 'apt-get -y install %s' % package log_base = '%s_install' % package try: run(s, stdout=log_base + '.out', stderr=log_base + '.err', verbose=True) except: msg = 'Installation of package %s failed. ' % package msg += 'See log file %s.out and %s.err for details' % (log_base, log_base) raise Exception(msg)
def install_ubuntu_packages(): """Get required Ubuntu packages for geoserver. It is OK if they are already installed """ header('Installing Ubuntu packages') s = 'apt-get clean' run(s, verbose=True) #s = 'apt-get update' #run(s, verbose=True) for package in ['build-essential', 'libxaw7-dev']: # Possibly also 'netcdfg-dev' s = 'apt-get -y install %s' % package log_base = '%s_install' % package try: run(s, stdout=log_base + '.out', stderr=log_base + '.err', verbose=True) except: msg = 'Installation of package %s failed. ' % package msg += 'See log file %s.out and %s.err for details' % (log_base, log_base) raise Exception(msg)
def publish_tweet(self): # Draft the tweet check_step( "that you have created a screenshot of the release notes, for the Tweet" ) tweet_text = F"'https://twitter.com/intent/tweet?text=%23ApprovalTests.cpp+{self.details.new_version}+released%2C+now+with+___%21%0D%0Ahttps%3A%2F%2Fgithub.com%2Fapprovals%2FApprovalTests.cpp%2Freleases%2Ftag%2F{self.details.new_version}+%0D%0Aor+try+the+starter+project%3A+https%3A%2F%2Fgithub.com%2Fapprovals%2FApprovalTests.cpp.StarterProject%0D%0AThanks+%40LlewellynFalco+%40ClareMacraeUK+%21'" run(["open", tweet_text]) check_step("that the tweet is published")
def publish_starter_project(self): self.commit_starter_project() self.push_starter_project() run([ "open", "https://github.com/approvals/ApprovalTests.cpp.StarterProject/commits/master" ]) check_step("that the starter project is published")
def restore_output(self, verbose=False): """Move files back for post processing """ for dir in os.listdir(self.output_dir): if dir.endswith('h'): s = 'mv %s/%s/* %s' % (self.output_dir, dir, self.output_dir) run(s)
def organise_output(self, verbose=False): """Organise output files in directories by time Output files named e.g. merapi.grd.18may2010.03:00.depload.grd are renamed to merapi.03h.depload.asc and will all go to a sub directory named 03h """ # FIXME: I think it is better to place them in their final locations from the start. # Move log files away #logdir = os.path.join(self.output_dir, 'logfiles') #makedir(logdir) #for file in os.listdir(self.output_dir): # _, ext = os.path.splitext(file) # if ext in ['.log', '.stdout', '.stderr']: # filename = os.path.join(self.output_dir, file) # s = 'mv %s %s' % (filename, logdir) # run(s, verbose=False) # FIXME: This really needs to use a proper standard for time stamps dirname = None last_hour = -1 last_dir = None for file in os.listdir(self.output_dir): if file.startswith(self.scenario_name): fields = file.split('.') if fields[1][-1] == 'h': dirname = os.path.join(self.output_dir, fields[1]) filename = os.path.join(self.output_dir, file) makedir(dirname) s = 'mv %s %s' % (filename, dirname) run(s, verbose=verbose) # Record last hour hour = int(fields[1][:-1]) if hour > last_hour: last_hour = hour last_dir = dirname # Create shortcut to last dir if last_dir: s = 'ln -s %s %s/final_output' % (last_dir, self.output_dir) try: run(s, verbose=verbose) except: pass
def upload_release_to_github(self): # Draft the upload to github release_notes = read_file(self.details.new_release_notes_path) pyperclip.copy(release_notes) print('The release notes are on the clipboard') github_url = F"'https://github.com/approvals/ApprovalTests.cpp/releases/new?tag={self.details.new_version}&title=Single%20Hpp%20File%20-%20{self.details.new_version}'" run(["open", github_url]) run(["open", self.details.release_dir]) check_step("that the release is published")
def verify_signature(john_home): # Verify signature os.chdir(john_home) print 'Current working dir', os.getcwd() s = 'pgp -ka openwall-signatures.asc' run(s, verbose=True) s = 'pgp %s.tar.gz.sign %s.tar.gz' % (package_name, package_name) run(s, verbose=True)
def create_useraccount_interactively(username): if os.path.isfile(password_filename): cmd = 'htpasswd -m' else: cmd = 'htpasswd -cm' s = cmd + ' %s %s' % (password_filename, username) run(s) filenames_updated[password_filename] = 1
def predict(model_file_path, lable_feature_map_file_path, feature_extractors, input_file_path, prediction_file_path, output_file_path): # build corpus for prediction liblinear_file_path = input_file_path + ".liblinear" corpus = Corpus(input_file_path, lable_feature_map_file_path, feature_extractors) corpus.build(liblinear_file_path, is_for_training = False) # predict -q -b 1 convert_to_liblinear.test.liblinear model.liblinear predict.predictions.liblinear run(PREDICT_CMD + " -q -b 1 %s %s %s" % (liblinear_file_path, model_file_path, prediction_file_path)) lable_to_id_map = LabelFeatureMap.load_label_map(lable_feature_map_file_path) prediction_parser = PredictionParser(lable_to_id_map) prediction_parser.parse_predictions(prediction_file_path, output_file_path)
def evaluate(model_file_path, lable_feature_map_file_path, feature_extractors, input_file_path, prediction_output_file_path, eval_output_file_path): # build corpus for evaluation liblinear_file_path = input_file_path + ".liblinear" corpus = Corpus(input_file_path, lable_feature_map_file_path, feature_extractors) corpus.build(liblinear_file_path, is_for_training=False) run(PREDICT_CMD + " -b 1 %s %s %s > %s" % (liblinear_file_path, model_file_path, prediction_output_file_path, eval_output_file_path))
def install_openlayers(): """Install OpenLayers locally This will allow web frontend to run without Internet access """ ol_dir = '/var/www/openlayers' cmd = 'svn checkout http://svn.openlayers.org/trunk/openlayers/ %s' % ol_dir run(cmd, verbose=True) cmd = 'chown -R www-data:www-data %s' % ol_dir run(cmd, verbose=True)
def run_startup(): """Run geoserver startup script """ #cmd = '/etc/init.d/geoserver start' #run(cmd, verbose=True) geo_home = '/usr/local/%s' % geoserver cmd = 'export JAVA_HOME=%s; export GEOSERVER_HOME=%s; $GEOSERVER_HOME/bin/startup.sh &' % (java_home, geo_home) run(cmd, verbose=True) print 'Done'
def download(): """ Download ncview """ path = os.path.join(url, tarball) if not os.path.isfile(tarball): # FIXME: Should also check integrity of tgz file. cmd = 'wget ' + path run(cmd, verbose=True) else: print 'Using tarball: %s' % tarball
def Xgenerate_contours(self, interval=1, verbose=True): """Contour NetCDF grids directly """ # FIXME (Ole): This does not work - probably due to the GDAL NetCDF driver ignoring coordinate system if verbose: header('Contouring NetCDF thickness grids') for filename in os.listdir(self.output_dir): if filename.endswith('.res.nc'): pathname = os.path.join(self.output_dir, filename) if verbose: print ' ', pathname basename, ext = os.path.splitext(pathname) tiffile = basename + '.tif' shpfile = basename + '.shp' kmlfile = basename + '.kml' prjfile = basename + '.prj' # Generate GeoTIFF raster netcdf_subdata = 'NETCDF:"%s":THICKNESS' % pathname s = 'gdal_translate -of GTiff -b 1 %s %s' % (netcdf_subdata, tiffile) # FIXME: Band is hardwired run_with_errorcheck(s, tiffile, verbose=verbose) # Generate contours as shapefiles s = '/bin/rm -rf %s' % shpfile # Clear the way run(s, verbose=False) s = 'gdal_contour -i %f %s %s' % (interval, tiffile, shpfile) run_with_errorcheck(s, shpfile, verbose=verbose) # Generate KML if self.WKT_projection: # Create associated projection file fid = open(prjfile, 'w') fid.write(self.WKT_projection) fid.close() s = 'ogr2ogr -f KML -t_srs EPSG:4623 -s_srs %s %s %s' % (prjfile, kmlfile, shpfile) else: s = 'ogr2ogr -f KML -t_srs EPSG:4623 %s %s' % (kmlfile, shpfile) run_with_errorcheck(s, kmlfile, verbose=verbose)
def store_inputdata(self, verbose=False): """Create exact copy of input data into output area The intention is to ensure that all output has an audit trail. """ audit_dir = os.path.join(self.output_dir, 'input_data') makedir(audit_dir) # Store input files if os.path.exists(self.params['wind_profile']): s = 'cp %s %s' % (self.params['wind_profile'], audit_dir) try: run(s, verbose=verbose) except: pass #s = 'cp %s %s' % (self.topography_grid, audit_dir) #run(s, verbose=verbose) # Copy only if scenario is a file. scenario_file = self.params['scenario_name'] + '.py' if os.path.isfile(scenario_file): s = 'cp %s %s' % (scenario_file, audit_dir) run(s, verbose=verbose) else: if verbose: print( 'Scenario file "%s" does not exist. ' 'Assuming scenario was specified as a dictionary' % scenario_file) # Store actual parameters (as Python file) actual_params_file = os.path.join(audit_dir, 'actual_parameters.py') #if os.path.isfile(actual_params_file): # run('chmod +w %s' % actual_params_file, verbose=verbose) # In case it was there already fid = open(actual_params_file, 'w') fid.write('"""All actual parameters used in scenario %s\n\n'\ % self.basepath) fid.write('This file is automatically generated by AIM\n') fid.write('and in serves a log of all input parameters used in\n') fid.write('Fall3d/AIM whether supplied or derived.\n') fid.write('"""\n\n\n') for param in self.params: value = self.params[param] if isinstance(value, basestring): fid.write('%s = \'%s\'\n' % (param, value)) else: fid.write('%s = %s\n' % (param, value)) fid.close()
def install_python_packages(): """Python packages that are not part of Ubuntu """ # OWSLIB frozen at r1672 (5 August 2010) try: import owslib except: cmd = 'cd /tmp; svn co -r 1672 http://svn.gispython.org/svn/gispy/OWSLib/trunk OWSLib' run(cmd) cmd = 'cd /tmp/OWSLib; sudo python setup.py install' run(cmd)
def store_inputdata(self, verbose=False): """Create exact copy of input data into output area The intention is to ensure that all output has an audit trail. """ audit_dir = os.path.join(self.output_dir, 'input_data') makedir(audit_dir) # Store input files if os.path.exists(self.params['wind_profile']): s = 'cp %s %s' % (self.params['wind_profile'], audit_dir) try: run(s, verbose=verbose) except: pass #s = 'cp %s %s' % (self.topography_grid, audit_dir) #run(s, verbose=verbose) # Copy only if scenario is a file. scenario_file = self.params['scenario_name'] + '.py' if os.path.isfile(scenario_file): s = 'cp %s %s' % (scenario_file, audit_dir) run(s, verbose=verbose) else: if verbose: print ('Scenario file "%s" does not exist. ' 'Assuming scenario was specified as a dictionary' % scenario_file) # Store actual parameters (as Python file) actual_params_file = os.path.join(audit_dir, 'actual_parameters.py') #if os.path.isfile(actual_params_file): # run('chmod +w %s' % actual_params_file, verbose=verbose) # In case it was there already fid = open(actual_params_file, 'w') fid.write('"""All actual parameters used in scenario %s\n\n'\ % self.basepath) fid.write('This file is automatically generated by AIM\n') fid.write('and in serves a log of all input parameters used in\n') fid.write('Fall3d/AIM whether supplied or derived.\n') fid.write('"""\n\n\n') for param in self.params: value = self.params[param] if isinstance(value, basestring): fid.write('%s = \'%s\'\n' % (param, value)) else: fid.write('%s = %s\n' % (param, value)) fid.close()
def backup(project): dumppath = join(backup_dir, project) makedir(dumppath, cd=False) # Dump SVN repository projectpath = join(svn_home, project) dumpfile = join(dumppath, svndumpname) s = 'svnadmin dump %s > %s 2> %s' % (projectpath, dumpfile, errlog) err = run(s) if err != 0: print 'WARNING: SVN dump did not succeed for project %s. Error message was' % project run('cat %s' % errlog, verbose=False) # Dump TRAC system projectpath = join(trac_home, project) dumpdir = join(dumppath, tracdumpname) run('/bin/rm -rf %s' % dumpdir, verbose=False) # Clean up in case there was one already s = 'trac-admin %s hotcopy %s > %s 2> %s' % (projectpath, dumpdir, logfile, errlog) err = run(s) if err != 0: print 'WARNING: TRAC hotcopy did not succeed for project %s. Error message was' % project run('cat %s' % errlog, verbose=False) os.remove(errlog) os.remove(logfile)
def get_plugins(): """Get plugins such as REST """ path = geoserver_rest_plugin_url if not os.path.isfile(geoserver_rest_plugin): # FIXME: Should also check integrity of zip file. cmd = 'wget ' + path run(cmd, verbose=True) # Unpack into geoserver installation s = 'unzip %s -d /usr/local/%s/webapps/geoserver/WEB-INF/lib' % (geoserver_rest_plugin, geoserver) run(s, verbose=True)
def install_postgis_from_source(postgis): makedir(os.path.expanduser('~/Downloads')) if not os.path.exists('%s.tar.gz' % postgis): s = 'wget http://postgis.refractions.net/download/%s.tar.gz' % postgis run(s) s = 'tar xvfz %s.tar.gz' % postgis; run(s) os.chdir(postgis) s = './configure'; run(s) s = 'make'; run(s) s = 'make install'; run(s)
def train(model_file_path, lable_feature_map_file_path, feature_extractors, input_file_path): # build corpus for evaluation un_compacted_model_file_path = model_file_path + ".tmp" un_compacted_lable_feature_map_file_path = lable_feature_map_file_path + ".tmp" liblinear_file_path = input_file_path + ".liblinear" corpus = Corpus(input_file_path, un_compacted_lable_feature_map_file_path, feature_extractors) corpus.build(liblinear_file_path, is_for_training = True) # train model # train -s 6 -e 0.01 -c 3.0 -p 0.1 -B -1 -W /gpfs/nlu/data/users/xiaoliu/svn/git_root/dragon-mobile-assistant-nlu-switch/training/hertz_nlucore_main_lxh_prediction_annotator_feature_extractor_with_david_model/domain_classifier/cross_validation/fold_0/train_liblinear/convert_to_liblinear.weight.liblinear /gpfs/nlu/data/users/xiaoliu/svn/git_root/dragon-mobile-assistant-nlu-switch/training/hertz_nlucore_main_lxh_prediction_annotator_feature_extractor_with_david_model/domain_classifier/cross_validation/fold_0/train_liblinear/convert_to_liblinear.train.liblinear /gpfs/nlu/data/users/xiaoliu/svn/git_root/dragon-mobile-assistant-nlu-switch/training/hertz_nlucore_main_lxh_prediction_annotator_feature_extractor_with_david_model/domain_classifier/cross_validation/fold_0/train_liblinear/model.liblinear run(TRAIN_CMD + " -s 6 -e 0.01 -c 3.0 -p 0.1 -B -1 %s %s" % (liblinear_file_path, un_compacted_model_file_path)) # compact model compact(un_compacted_lable_feature_map_file_path, un_compacted_model_file_path, lable_feature_map_file_path, model_file_path) os.remove(un_compacted_model_file_path) os.remove(un_compacted_lable_feature_map_file_path)
def check_conan_repo(self): repo = Repo(self.details.conan_repo_dir) assert not repo.bare # TODO Add descriptions in case of failure assert (repo.active_branch.name == 'master') assert (len(repo.index.diff(None)) == 0) # Modified assert (len(repo.index.diff("HEAD")) == 0) # Staged run(["open", "https://github.com/conan-io/conan/releases"]) run(["conan", "--version"]) # TODO pip3 install --upgrade conan check_step("you are running the latest Conan release")
def create_single_header_file(self): os.chdir("../ApprovalTests") print(os.getcwd()) run(["java", "-version"]) run([ "java", "-jar", "../build/SingleHpp.v.0.0.2.jar", self.details.release_new_single_header ]) text = read_file(self.details.release_new_single_header) text = \ F"""// Approval Tests version {self.details.new_version} // More information at: https://github.com/approvals/ApprovalTests.cpp {text}""" write_file(self.details.release_new_single_header, text)
def Xgenerate_contours(self, interval=1, verbose=True): """Contour NetCDF grids directly """ # FIXME (Ole): This does not work - probably due to the GDAL NetCDF driver ignoring coordinate system if verbose: header('Contouring NetCDF thickness grids') for filename in os.listdir(self.output_dir): if filename.endswith('.res.nc'): pathname = os.path.join(self.output_dir, filename) if verbose: print ' ', pathname basename, ext = os.path.splitext(pathname) tiffile = basename + '.tif' shpfile = basename + '.shp' kmlfile = basename + '.kml' prjfile = basename + '.prj' # Generate GeoTIFF raster netcdf_subdata = 'NETCDF:"%s":THICKNESS' % pathname s = 'gdal_translate -of GTiff -b 1 %s %s' % ( netcdf_subdata, tiffile) # FIXME: Band is hardwired run_with_errorcheck(s, tiffile, verbose=verbose) # Generate contours as shapefiles s = '/bin/rm -rf %s' % shpfile # Clear the way run(s, verbose=False) s = 'gdal_contour -i %f %s %s' % (interval, tiffile, shpfile) run_with_errorcheck(s, shpfile, verbose=verbose) # Generate KML if self.WKT_projection: # Create associated projection file fid = open(prjfile, 'w') fid.write(self.WKT_projection) fid.close() s = 'ogr2ogr -f KML -t_srs EPSG:4623 -s_srs %s %s %s' % ( prjfile, kmlfile, shpfile) else: s = 'ogr2ogr -f KML -t_srs EPSG:4623 %s %s' % (kmlfile, shpfile) run_with_errorcheck(s, kmlfile, verbose=verbose)
def test_conan_and_create_pr(self): # TODO Fix this directory name pushdir(self.details.conan_data_dir) # We cannot test the new Conan recipe until the new release has been # published on github new_version_without_v = version.get_version_without_v( self.details.new_version) run(['conan', 'create', '.', F'{new_version_without_v}@']) # TODO Commit the changes - with message f'Add approvaltests.cpp {new_version_without_v}' # TODO Push the changes - NB on the feature branch for the release popdir() check_step( "that you have created a Pull Request for conan-center-index?")
def runscript(self, cmd, name, logfile, lines=5, verbose=False): """Run Fall3d script and report """ if verbose: print 'Logfile: %s' % logfile #print 'Shortcut: %s' % os.path.join(self.symlink, os.path.split(logfile)[-1]) stdout = self.logbasepath + '.%s.stdout' % name stderr = self.logbasepath + '.%s.stderr' % name err=run(cmd, stdout=stdout, stderr=stderr, verbose=False) if verbose: print 'Logfile ended as follows:' tail(logfile, lines) if err: msg = 'Script %s ended abnormally. Log files are:\n' % cmd msg += ' %s\n' % logfile msg += ' %s\n' % stdout msg += ' %s\n' % stderr raise Exception(msg)
def install_ubuntu_packages(): """Get required PostGIS packages. It is OK if they are already installed """ for package in ['postgresql', 'postgresql-server-dev-8.4', 'proj', 'libgeos-dev', 'postgis', # For things like shp2pgsql #'pgadmin3', #'libpq-dev', #'postgresql-contrib', ]: s = 'apt-get -y install %s > %s_install.log' % (package, package) run(s)
def get_plugins(): """Get plugins such as REST """ path = geoserver_rest_plugin_url archive = workdir + '/' + geoserver_rest_plugin print 'Archive', archive if not os.path.isfile(archive): # FIXME: Should also check integrity of zip file. cmd = 'cd %s; wget %s' % (workdir, path) run(cmd, verbose=True) # Unpack into geoserver installation s = 'unzip %s -d /usr/local/%s/webapps/geoserver/WEB-INF/lib' % (archive, geoserver) run(s, verbose=True)
def check_pre_conditions_for_publish(self): if self.details.push_to_production: repo = Repo(self.details.main_project_dir) assert not repo.bare assert (repo.active_branch.name == 'master') # From https://stackoverflow.com/questions/31959425/how-to-get-staged-files-using-gitpython assert len( repo.index.diff(None) ) == 0, "there are un-committed changes to ApprovalTests.cpp" # Modified assert len( repo.index.diff("HEAD") ) == 0, "there are un-committed changes to ApprovalTests.cpp" # Staged # From https://stackoverflow.com/questions/15849640/how-to-get-count-of-unpublished-commit-with-gitpython assert len( list(repo.iter_commits('master@{u}..master')) ) == 0, "there are un-pushed changes in ApprovalTests.cpp" run([ "open", "https://github.com/approvals/ApprovalTests.cpp/commits/master" ]) check_step("the builds are passing") run([ "open", "https://github.com/approvals/ApprovalTests.cpp/blob/master/build/relnotes_x.y.z.md" ]) run([ "open", F"https://github.com/approvals/ApprovalTests.cpp/compare/{self.details.old_version}...master" ]) check_step("the release notes are ready") run(["open", "https://github.com/approvals/ApprovalTests.cpp/issues"]) check_step("any issues resolved in this release are closed") run([ "open", "https://github.com/approvals/ApprovalTests.cpp/milestones" ]) check_step( "the milestone (if any) is up to date, including actual version number of release" )
def run(self, filename): arguments = [] arguments.append(filename) utilities.print_line() print("Starting Havok Filter Manager...") command = [self.havok_tool_standalone] + arguments utilities.print_line(True) print(' '.join(command)) cd = os.path.dirname(filename) # It's important that the current directory is set to the output path # or it won't output to the expected directory utilities.run(command, False, cd) return
def parseeazy(catalog, n): from utilities import run import os f = open(catalog, 'r').readlines() sntmp = open('sntmp', 'w') keys = [] for line in f: if line[0:2] == '# ': import re res2 = re.split('\s+', line[:-1]) print res2 for k in res2[1:]: keys.append('EAZY_' + k) break if line[0] != '#': break print keys tempconf = '/tmp/' + os.environ['USER'] + 'photoz.conf' conflist = open(tempconf, 'w') for key in keys: if key == 'EAZY_id': conflist.write( 'COL_NAME = SeqNr\nCOL_TTYPE = LONG\nCOL_HTYPE = INT\nCOL_COMM = ""\nCOL_UNIT = ""\nCOL_DEPTH = 1\n#\n' ) else: conflist.write( 'COL_NAME = ' + key + '\nCOL_TTYPE = DOUBLE\nCOL_HTYPE = FLOAT\nCOL_COMM = ""\nCOL_UNIT = ""\nCOL_DEPTH = 1\n#\n' ) conflist.close() import os tempcat = '/tmp/' + os.environ['USER'] + 'zs.cat' run( 'asctoldac -i ' + catalog + ' -o ' + catalog + '.temp.tab' + ' -c ' + tempconf + ' -t STDTAB', [tempcat]) command = 'ldacaddkey -i ' + catalog + '.temp.tab -o ' + catalog + '.tab -t STDTAB -k EAZY_NUMBER ' + str( n) + ' FLOAT "" ' print command os.system(command) print catalog + '.tab'
def download_john_source(john_home): makedir(john_home) os.chdir(john_home) print 'Current working dir', os.getcwd() # Clean out s = '/bin/rm -rf %s/*' % john_home run(s, verbose=True) # Get source and verification files = ['http://www.openwall.com/john/g/%s.tar.gz' % package_name, 'http://www.openwall.com/john/g/%s.tar.gz.sign' % package_name, 'http://www.openwall.com/signatures/openwall-signatures.asc'] for file in files: path = os.path.join(john_home, file) s = 'wget %s' % file run(s, verbose=True)
def run_hazardmap(model_output_directory, verbose=True): """Run HazardMapping.exe Requires - input file - Directory with FALL3D model outputs """ # FIXME: Perhaps include into AIM class (somehow) Fall3d_dir = get_fall3d_home() utilities_dir = os.path.join(Fall3d_dir, 'Utilities') executable = os.path.join(utilities_dir, 'HazardMaps', 'HazardMapping.exe') if verbose: header('Running hazard mapping in %s' % model_output_directory) cmd = 'cd %s; %s ' % (model_output_directory, executable) logfile = 'run_hazardmapping.log' run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
def test_conan_and_create_pr(self): pushdir(os.path.join(self.details.conan_approvaltests_dir, 'all')) # We cannot test the new Conan recipe until the new release has been # published on github new_version_without_v = version.get_version_without_v( self.details.new_version) run(['conan', 'create', '.', F'{new_version_without_v}@']) check_step( F"Commit the changes - with message 'Add approvaltests.cpp {new_version_without_v}'" ) check_step( 'Push the changes - NB on the feature branch for the release') popdir() print( F"Create a pull request, including this in the description: **approvaltests.cpp/{new_version_without_v}**" ) check_step( "that you have created a Pull Request for conan-center-index?")
def parsebpz(catalog, n): '''this adds BPZ_NUMBER on the end, but it's always =0 currently (see /nfs/slac/g/ki/ki18/anja/SUBARU/MACS1226+21/PHOTOMETRY_W-C-RC_aper/all_bpzAPER1CWWSB_capak.list1_0.bpz.tab.txt)''' import os, re from utilities import run f = open(catalog, 'r').readlines() sntmp = open(os.environ['USER'] + 'sntmp', 'w') keys = [] for line in f: if line[0:2] == '# ': res2 = re.split('\s+', line[:-1]) print res2 keys.append('BPZ_' + res2[2]) if line[0] != '#': break tempconf = '/tmp/' + os.environ['USER'] + 'photoz.conf' conflist = open(tempconf, 'w') for key in keys: if key == 'BPZ_ID': conflist.write( 'COL_NAME = SeqNr\nCOL_TTYPE = LONG\nCOL_HTYPE = INT\nCOL_COMM = ""\nCOL_UNIT = ""\nCOL_DEPTH = 1\n#\n' ) else: conflist.write( 'COL_NAME = ' + key + '\nCOL_TTYPE = DOUBLE\nCOL_HTYPE = FLOAT\nCOL_COMM = ""\nCOL_UNIT = ""\nCOL_DEPTH = 1\n#\n' ) conflist.close() tempcat = '/tmp/' + os.environ['USER'] + 'zs.cat' run( 'asctoldac -i ' + catalog + ' -o ' + catalog + '.temp.tab' + ' -c ' + tempconf + ' -t STDTAB', [tempcat]) command = 'ldacaddkey -i ' + catalog + '.temp.tab -o ' + catalog + '.tab -t STDTAB -k BPZ_NUMBER ' + str( n) + ' FLOAT "" ' print ' command=', command os.system(command) print catalog + '.tab' print 'here'
def cutout(infile, color='red'): import os, utilities ppid = str(os.getppid()) print ppid + 'a' #pylab.show() outfile = raw_input('name of output file?') color = raw_input('color of regions?') limits = ['lower_mag', 'upper_mag', 'lower_diff', 'upper_diff'] lim_dict = {} for lim in limits: print lim + '?' b = raw_input() lim_dict[lim] = b utilities.run( 'ldacfilter -i ' + infile + ' -t PSSC\ -c "(((SEx_' + mag + '_' + filter + '>' + str(lim_dict['lower_mag']) + ') AND (SEx_' + mag + '_' + filter + '<' + str(lim_dict['upper_mag']) + ')) AND (magdiff>' + str(lim_dict['lower_diff']) + ')) AND (magdiff<' + str(lim_dict['upper_diff']) + ');"\ -o cutout1.' + ppid, ['cutout1.' + ppid]) utilities.run( 'ldactoasc -b -q -i cutout1.' + ppid + ' -t PSSC\ -k Ra Dec > /tmp/' + outfile, ['/tmp/' + outfile]) utilities.run('mkreg.pl -c -rad 8 -xcol 0 -ycol 1 -wcs -colour ' + color + ' /tmp/' + outfile)
def compile_ncview(): """Configure and make """ cmd = 'cd %s; ./configure' % ncview run(cmd) cmd = 'cd %s; make' % ncview run(cmd) cmd = 'cd %s; make install' % ncview run(cmd)
def extract(): """Extract files from tarball """ # Cleanup #s = '/bin/rm -rf %s' % fall3d_distro #run(s, verbose=False) print 'Unpacking tarball' print # Unpack FALL3D using tar: # # x: Extract # v: Be verbose # f: Filename coming up # z: Uncompress as well # err = run('tar xvfz %s > /dev/null' % tarball, verbose=False) if err != 0: msg = 'Could not unpack %s' % tarball raise Exception(msg)