def _add_files_and_push(vcs, dest, clone_url=None, **kwargs): """ Generate some files, add it to DEST repo and push back vcs is git or hg and defines what VCS we want to make those files for """ # commit some stuff into this repo cwd = path = jn(dest) added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('%s add %s' % (vcs, added_file)) for i in xrange(kwargs.get('files_no', 3)): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) author_str = 'Marcin Kuźminski <*****@*****.**>' if vcs == 'hg': cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( i, author_str, added_file) elif vcs == 'git': cmd = """EMAIL="*****@*****.**" git commit -m 'commited new %s' """\ """--author '%s' %s """ % (i, author_str, added_file) Command(cwd).execute(cmd) # PUSH it back stdout = stderr = None if vcs == 'hg': stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) elif vcs == 'git': stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") return stdout, stderr
def create_test_repositories(test_path, config): """ Creates test repositories in the temporary directory. Repositories are extracted from archives within the rc_testdata package. """ import rc_testdata from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO log.debug('making test vcs repositories') idx_path = config['search.location'] data_path = config['cache_dir'] # clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s', idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s', data_path) shutil.rmtree(data_path) rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) # Note: Subversion is in the process of being integrated with the system, # until we have a properly packed version of the test svn repository, this # tries to copy over the repo from a package "rc_testdata" svn_repo_path = rc_testdata.get_svn_repo_archive() with tarfile.open(svn_repo_path) as tar: tar.extractall(jn(test_path, SVN_REPO))
def test_push_new_file(commits=15, with_clone=True): if with_clone: test_clone_with_credentials(no_errors=True) cwd = path = jn(TESTS_TMP_PATH, HG_REPO) added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('hg add %s' % added_file) for i in xrange(commits): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited new %s' -u '%s' %s """ % (i, 'Marcin Kuźminski <*****@*****.**>', added_file) Command(cwd).execute(cmd) push_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':jn(TESTS_TMP_PATH, HG_REPO)} Command(cwd).execute('hg push --verbose --debug %s' % push_url)
def test_push_new_file(commits=15, with_clone=True): if with_clone: test_clone_with_credentials(no_errors=True) cwd = path = jn(TESTS_TMP_PATH, HG_REPO) added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('hg add %s' % added_file) for i in xrange(commits): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited new %s' -u '%s' %s """ % ( i, 'Marcin Kuźminski <*****@*****.**>', added_file) Command(cwd).execute(cmd) push_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':jn(TESTS_TMP_PATH, HG_REPO)} Command(cwd).execute('hg push --verbose --debug %s' % push_url)
def test_push_wrong_path(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) added_file = jn(path, 'somefile.py') try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) print '\tmade dirs %s' % jn(path) except OSError: raise Command(cwd).execute("""echo '' > %s""" % added_file) Command(cwd).execute("""hg init %s""" % path) Command(cwd).execute("""hg add %s""" % added_file) for i in xrange(2): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited new %s' %s """ % (i, added_file) Command(cwd).execute(cmd) clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO + '_error', 'dest':jn(TESTS_TMP_PATH, HG_REPO)} stdout, stderr = Command(cwd).execute('hg push %s' % clone_url) if not """abort: HTTP Error 403: Forbidden""" in stderr: raise Exception('Failure')
def post_process(paras, cwtmatr, b, fs, times, freqs): zz = sqrt(cwtmatr**2) # Smooth out the output with gaussian kernel if paras['filter_gaussian'] != [1, 1]: zz = ndimage.filters.gaussian_filter(zz, paras['filter_gaussian'], mode='constant') # Downsampling output: zz = zz[::paras['downsampling_array'][0],::paras['downsampling_array'][1]] times = times[::paras['downsampling_array'][0]] freqs = freqs[::paras['downsampling_array'][1]] b = b[::paras['downsampling_array'][0]] # Save numpy array if not paras['output_pic'] is None: path = jn(paras['results_dir'], paras['output_pic']) print ('Saving .pic file as', path) with open(path, 'wb') as f: pickle.dump((b, zz, times, freqs), f) # Save stl file path = jn(paras['results_dir'], paras['output_stl']) print ('Saving .stl file as', path) numpy2stl(zz, path, **paras['stl_options']) # Rescale x/y axis the_mesh = mesh.Mesh.from_file(path) xsize = the_mesh.x.max() - the_mesh.x.min() ysize = the_mesh.y.max() - the_mesh.y.min() the_mesh.y = the_mesh.y*(xsize/ysize)/paras['ratio_xy'] the_mesh.save(path) return b, zz, times, freqs
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config def _make_file(ext_file, tmpl): bdir = os.path.split(ext_file)[0] if not os.path.isdir(bdir): os.makedirs(bdir) with open(ext_file, 'wb') as f: f.write(tmpl) log.info('Writen new extensions file to %s' % ext_file) here = config['here'] tmpl = pkg_resources.resource_string( 'rhodecode', jn('config', 'rcextensions', '__init__.py')) ext_file = jn(here, 'rcextensions', '__init__.py') if os.path.exists(ext_file): msg = ('Extension file already exists, do you want ' 'to overwrite it ? [y/n]') if ask_ok(msg): _make_file(ext_file, tmpl) else: log.info('nothing done...') else: _make_file(ext_file, tmpl)
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config def _make_file(ext_file, tmpl): bdir = os.path.split(ext_file)[0] if not os.path.isdir(bdir): os.makedirs(bdir) with open(ext_file, 'wb') as f: f.write(tmpl) log.info('Writen new extensions file to %s' % ext_file) here = config['here'] tmpl = pkg_resources.resource_string( 'rhodecode', jn('config', 'rcextensions', '__init__.py') ) ext_file = jn(here, 'rcextensions', '__init__.py') if os.path.exists(ext_file): msg = ('Extension file already exists, do you want ' 'to overwrite it ? [y/n]') if ask_ok(msg): _make_file(ext_file, tmpl) else: log.info('nothing done...') else: _make_file(ext_file, tmpl)
def distances_distribution_plot(self): for method in self.methods: distances = [] # for frame in tqdm(range(n_frames)): distances = self.get_distance(0, method) pkl.dump(distances, open(jn(self.output, 'distances_' + method + '.p'), 'wb')) f = plt.figure(figsize=[8.2, 4.8]) ax1 = plt.subplot(121) ax1.scatter(range(distances.shape[0]), np.sort(distances), marker='+', color='k') ax1.set_title('Ordered distances ' + method) if method[0] == "d": plt.yticks(np.arange(0, 5, 0.25)) plt.ticklabel_format(style='sci', axis='x', scilimits=(3, 3)) plt.grid(linestyle=':') plt.ylabel('Distances (nm)') plt.xlabel('Order') ax2 = plt.subplot(122) ax2.set_title('Distance distribution ' + method) plt.xlabel('Distances (nm)') plt.ylabel('Occurences') sns.set_color_codes() sns.distplot(distances, ax=ax2, color='k') plt.tight_layout() plt.savefig(jn(self.output, 'distances_' + method + '.png')) plt.close()
def copy_files(self, files: List[str], remote_path: "_SPATH", local_path: "_SPATH", *, direction: "_DIRECTION", follow_symlinks: bool = True, quiet: bool = False): with context_timeit(quiet): for f in files: if direction == "get": src = jn(self.c._path2str(remote_path), f) dst = jn(self.c._path2str(local_path), f) elif direction == "put": dst = jn(self.c._path2str(remote_path), f) src = jn(self.c._path2str(local_path), f) else: raise ValueError(f"{direction} is not valid direction. " f"Choose 'put' or 'get'") self.copyfile(src, dst, direction=direction, follow_symlinks=follow_symlinks, callback=None, quiet=quiet)
def create_test_env(repos_test_path, config): """ Makes a fresh database and install test repository into tmp dir """ from kallithea.lib.db_manage import DbManage from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH # PART ONE create db dbconf = config['sqlalchemy.db1.url'] log.debug('making test db %s' % dbconf) # create test dir if it doesn't exist if not os.path.isdir(repos_test_path): log.debug('Creating testdir %s' % repos_test_path) os.makedirs(repos_test_path) dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], tests=True) dbmanage.create_tables(override=True) # for tests dynamically set new root paths based on generated content dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) dbmanage.create_default_user() dbmanage.admin_prompt() dbmanage.create_permissions() dbmanage.populate_default_permissions() Session().commit() # PART TWO make test repo log.debug('making test vcs repositories') idx_path = config['app_conf']['index_dir'] data_path = config['app_conf']['cache_dir'] #clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s' % idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s' % data_path) shutil.rmtree(data_path) #CREATE DEFAULT TEST REPOS cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) tar.close() cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO)) tar.close() #LOAD VCS test stuff from kallithea.tests.vcs import setup_package setup_package()
def create_test_env(repos_test_path, config): """ Makes a fresh database and install test repository into tmp dir """ from kallithea.lib.db_manage import DbManage from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH # PART ONE create db dbconf = config['sqlalchemy.db1.url'] log.debug('making test db %s', dbconf) # create test dir if it doesn't exist if not os.path.isdir(repos_test_path): log.debug('Creating testdir %s', repos_test_path) os.makedirs(repos_test_path) dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], tests=True) dbmanage.create_tables(override=True) # for tests dynamically set new root paths based on generated content dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) dbmanage.create_default_user() dbmanage.admin_prompt() dbmanage.create_permissions() dbmanage.populate_default_permissions() Session().commit() # PART TWO make test repo log.debug('making test vcs repositories') idx_path = config['app_conf']['index_dir'] data_path = config['app_conf']['cache_dir'] #clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s', idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s', data_path) shutil.rmtree(data_path) #CREATE DEFAULT TEST REPOS cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) tar.close() cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO)) tar.close() #LOAD VCS test stuff from kallithea.tests.vcs import setup_package setup_package()
def install_git_hook(self, repo, force_create=False): """ Creates a rhodecode hook inside a git repository :param repo: Instance of VCS repo :param force_create: Create even if same name hook exists """ loc = jn(repo.path, 'hooks') if not repo.bare: loc = jn(repo.path, '.git', 'hooks') if not os.path.isdir(loc): os.makedirs(loc) tmpl_post = pkg_resources.resource_string( 'rhodecode', jn('config', 'post_receive_tmpl.py') ) tmpl_pre = pkg_resources.resource_string( 'rhodecode', jn('config', 'pre_receive_tmpl.py') ) for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: _hook_file = jn(loc, '%s-receive' % h_type) _rhodecode_hook = False log.debug('Installing git hook in repo %s' % repo) if os.path.exists(_hook_file): # let's take a look at this hook, maybe it's rhodecode ? log.debug('hook exists, checking if it is from rhodecode') with open(_hook_file, 'rb') as f: data = f.read() matches = re.compile(r'(?:%s)\s*=\s*(.*)' % 'RC_HOOK_VER').search(data) if matches: try: ver = matches.groups()[0] log.debug('got %s it is rhodecode' % (ver)) _rhodecode_hook = True except Exception: log.error(traceback.format_exc()) else: # there is no hook in this dir, so we want to create one _rhodecode_hook = True if _rhodecode_hook or force_create: log.debug('writing %s hook file !' % (h_type,)) try: with open(_hook_file, 'wb') as f: tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) f.write(tmpl) os.chmod(_hook_file, 0755) except IOError, e: log.error('error writing %s: %s' % (_hook_file, e)) else: log.debug('skipping writing hook file')
def test_push_modify_file(f_name='setup.py'): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) modified_file = jn(TESTS_TMP_PATH, HG_REPO, f_name) for i in xrange(5): cmd = """echo 'added_line%s' >> %s""" % (i, modified_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'changed file %s' %s """ % (i, modified_file) Command(cwd).execute(cmd) Command(cwd).execute('hg push %s' % jn(TESTS_TMP_PATH, HG_REPO))
def plot_ID(self): for method in self.methods: if method == "TICA": distances = self.tica() elif method == "RMSD": distances = self.rmsd() # IDEstimator(distances).fit(jn(self.output, self.string+method+'_fit.png')) if self.split_chains: for i, chain in enumerate(self.chains_label): IDEstimator(distances[i], discard_tail=self.discard_tail).fit(jn(self.output, self.string+method+'_chain'+chain+'_fit.png')) else: IDEstimator(distances, discard_tail=self.discard_tail).fit(jn(self.output, self.string+method+'_fit.png'))
def _get_package_data(): """Iterates over the `init` dir for directories and returns all files within them. Only files within `binaries` and `templates` will be added. """ from os.path import join as jn from os import listdir as ls x = 'init' b = jn('serv', x) dr = ['binaries', 'templates'] return [jn(x, d, f) for d in ls(b) if d in dr for f in ls(jn(b, d))]
def install_git_hook(self, repo, force_create=False): """ Creates a kallithea hook inside a git repository :param repo: Instance of VCS repo :param force_create: Create even if same name hook exists """ loc = jn(repo.path, 'hooks') if not repo.bare: loc = jn(repo.path, '.git', 'hooks') if not os.path.isdir(loc): os.makedirs(loc) tmpl_post = pkg_resources.resource_string( 'kallithea', jn('config', 'post_receive_tmpl.py')) tmpl_pre = pkg_resources.resource_string( 'kallithea', jn('config', 'pre_receive_tmpl.py')) for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: _hook_file = jn(loc, '%s-receive' % h_type) has_hook = False log.debug('Installing git hook in repo %s' % repo) if os.path.exists(_hook_file): # let's take a look at this hook, maybe it's kallithea ? log.debug('hook exists, checking if it is from kallithea') with open(_hook_file, 'rb') as f: data = f.read() matches = re.compile(r'(?:%s)\s*=\s*(.*)' % 'KALLITHEA_HOOK_VER').search(data) if matches: try: ver = matches.groups()[0] log.debug('got %s it is kallithea' % (ver)) has_hook = True except Exception: log.error(traceback.format_exc()) else: # there is no hook in this dir, so we want to create one has_hook = True if has_hook or force_create: log.debug('writing %s hook file !' % (h_type, )) try: with open(_hook_file, 'wb') as f: tmpl = tmpl.replace('_TMPL_', kallithea.__version__) f.write(tmpl) os.chmod(_hook_file, 0755) except IOError, e: log.error('error writing %s: %s' % (_hook_file, e)) else: log.debug('skipping writing hook file')
def create_std_sim_plot(self): for method in self.methods: distances = self.get_distances(method) if method[-2:] == "NN": mean = np.mean(distances, axis=-1) std = np.std(distances, axis=-1) else: self.mean = [np.mean(i) for i in distances] self.std = [np.std(i) for i in distances] pkl.dump(mean, open(jn(self.output, 'mean_' + method + '.p'), 'wb')) pkl.dump(std, open(jn(self.output, 'std_' + method + '.p'), 'wb')) self._plot(method)
def create_test_env(repos_test_path, config): """Makes a fresh database and install test repository into tmp dir """ from rhodecode.lib.db_manage import DbManage from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ HG_FORK, GIT_FORK, TESTS_TMP_PATH import tarfile import shutil from os.path import abspath # PART ONE create db dbconf = config['sqlalchemy.db1.url'] log.debug('making test db %s', dbconf) # create test dir if it doesn't exist if not os.path.isdir(repos_test_path): log.debug('Creating testdir %s' % repos_test_path) os.makedirs(repos_test_path) dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], tests=True) dbmanage.create_tables(override=True) dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) dbmanage.create_default_user() dbmanage.admin_prompt() dbmanage.create_permissions() dbmanage.populate_default_permissions() # PART TWO make test repo log.debug('making test vcs repositories') idx_path = config['app_conf']['index_dir'] data_path = config['app_conf']['cache_dir'] #clean index and data if idx_path and os.path.exists(idx_path): log.debug('remove %s' % idx_path) shutil.rmtree(idx_path) if data_path and os.path.exists(data_path): log.debug('remove %s' % data_path) shutil.rmtree(data_path) #CREATE DEFAULT HG REPOSITORY cur_dir = dn(dn(abspath(__file__))) tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) tar.close()
def do_ABM_tactical(input_file, output_file, config_file, verbose=2, shock_tmp=jn(main_dir, 'abm_tactical/config/shock_tmp.dat'), bound_latlon=jn(main_dir, 'abm_tactical/config/bound_latlon.dat'), temp_nvp=jn(main_dir, 'abm_tactical/config/temp_nvp.dat'), capacity_file=main_dir+'/abm_tactical/config/sector_capacities.dat'): """ Main function of control of the tactical ABM. The function uses tactical_simulation, which has beem compiled precedently using the wrapper. Parameters ---------- input_file : string full path to M1 file (planned trajectories) output_file : string full path to save M3 file (trajectories after control) config_file : string full path to config file. verbose : integer, optional verbosity shock_tmp : string, optional full path to file containing the possible coordinates of shocks. bound_latlon : string, optional full path to file containing the coordinates of the boundary of the controlled airspace. temp_nvp : string full path to file containing temporary navigation points used in the simulations. """ for fil, name in [(shock_tmp, 'shock_tmp'), (bound_latlon, 'bound_file'), (temp_nvp, 'temp_nvp'), (capacity_file, 'capacity_file')]: choose_paras(name, fil, fil=config_file) try: inpt = ["", input_file, output_file, config_file] if verbose>1: print "M1 source:", inpt[1] print "Destination output:", inpt[2] print "Config file:", inpt[3] print print "Running ABM Tactical model..." with silence(verbose==0): # Does not work. tactical_simulation(inpt) except: pass #TODO if verbose==2: print print "Done."
def run(inp, temp, eig): ''' A method to calculate the eigenvalue for one row Reads from an input pickle file that has pickled a list of floats and will continue to run until there are no more rows to process from the pickled file inp (str): the input file temp (str): the temp file to save the modified rows to eig (str): the file where the eigenvalue should be saved ''' # Load the pickle file as a generator rows = loadall(jn(PROC, inp)) # Open the temp file for writing with open(jn(PROC, temp), 'wb') as fout: try: # Try to get the first row from the pickle top = rows.__next__() # print("Top: {0}".format(top)) except StopIteration: # The pickle is empty. We're done print("Stopped") return (0) res = Parallel(n_jobs=-1)(delayed(transform)(r, top) for r in rows) ''' for row in rows: # Iterate through the rest of the rows and transform them before # saving them to the temp file # Figure out what the scalar value is num1 = row[0] mult = num1 / top[0] print("Top row scaled") subt = Parallel(n_jobs = -1)(delayed(multiply)(mult, c) for c in top) # subt = [mult * c for c in top] # row = [r - t for r, t in zip(row, subt)] print("Row Transformed") res = Parallel(n_jobs = -1)(delayed(subtract)(float(r), t) for r, t in zip(row.split("\t"), subt)) # pickle.dump(row[1:], fout) # print(row[1:10]) pickle.dump(res[1:], fout) ''' pickle.dump(res[1:], fout) print("Finished one run! Time: {0}".format(time.time() - start)) with open(jn(PROC, eig), 'a') as eout: eout.write("\t{0}".format(top[0])) eout.close() return (1)
def _plot(self, residue, dic, plot_str): plt.style.use('classic') mpl.rcParams.update({'axes.formatter.useoffset': False}) resname = self.resid2name[residue] # if resname == 'K19:F': # pkl.dump(dic, open('results/k19'+plot_str+'.p', 'wb')) f = plt.figure() f.patch.set_facecolor('white') plt.title(resname) xtime = np.arange(0, 100, 0.1) for sim in range(self.n_plots): plt.subplot(self.n_plots // 2, 2, sim + 1) if sim == 0: plt.plot(xtime, dic[sim][residue][:1000], c='b', label='Volume ' + plot_str + ' (apo)') plt.plot(xtime, dic[sim][residue][1000:], c='r', label='Volume ' + plot_str + ' (PRFAR)') else: plt.plot(xtime, dic[sim][residue][:1000], c='b') plt.plot(xtime, dic[sim][residue][1000:], c='r') plt.ylabel('Volume (nm$^3$)') plt.xlabel('Time (ns)') f.legend(loc=8) f.tight_layout() f.subplots_adjust(bottom=0.22) plt.savefig( jn('results', 'residues_volume', plot_str, resname + '.png')) plt.close()
def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, seq=None, backend='hg'): cwd = path = jn(TESTS_TMP_PATH, repo) if seq is None: seq = _RandomNameSequence().next() try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user': USER, 'pass': PASS, 'host': HOST, 'cloned_repo': repo, } dest = path + seq if method == 'pull': stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) else: stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) print stdout,'sdasdsadsa' if not no_errors: if backend == 'hg': assert """adding file changes""" in stdout, 'no messages about cloning' assert """abort""" not in stderr , 'got error from clone' elif backend == 'git': assert """Cloning into""" in stdout, 'no messages about cloning'
def plot_apo_holo(self, table, char, method): f = plt.figure() table_apo = table[self.n_trajs // 2 * self.n_frames[0]:] table_holo = table[:self.n_trajs // 2 * self.n_frames[0]] previous = 0 for i in range(self.n_trajs // 2): if self.n_trajs // 4 >= 1: plt.subplot(2, self.n_trajs // 4, i + 1) plt.title("Simulation " + str(i + 1)) if i == 0: plt.plot(table_apo[previous:previous + self.n_frames[i]], c='b', label="apo") plt.plot(table_holo[previous:previous + self.n_frames[i]], c='r', label="PRFAR") else: plt.plot(table_apo[previous:previous + self.n_frames[i]], c='b') plt.plot(table_holo[previous:previous + self.n_frames[i]], c='r') plt.ylabel(char + ' (nm)') plt.xlabel('Time (ns)') plt.xticks(np.arange(0, 1001, 250), np.arange(0, 101, 25)) previous += self.n_frames[i] f.legend(loc=8, ncol=2) f.tight_layout() f.subplots_adjust(bottom=0.12) plt.savefig(jn(self.output, char + '_' + method + '.png')) plt.close()
def write_down_sectors_from_network(G, rep=None): os.system('mkdir -p ' + rep) for n in G.nodes(): boundary = list(G.polygons[n].exterior.coords) with open(jn(rep, str(n)+'_bound_latlon.dat'), 'w') as f: for x, y in boundary: f.write(str(x) + '\t' + str(y) + '\n')
def test_clone_anonymous(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise print '\tchecking if anonymous access is enabled' anonymous_access = get_anonymous_access() if not anonymous_access: print '\tnot enabled, enabling it ' set_anonymous_access(enable=True) clone_url = 'http://%(host)s/%(cloned_repo)s %(dest)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':path} stdout, stderr = Command(cwd).execute('hg clone', clone_url) assert """adding file changes""" in stdout, 'no messages about cloning' assert """abort""" not in stderr, 'got error from clone' #disable if it was enabled if not anonymous_access: print '\tdisabling anonymous access' set_anonymous_access(enable=False)
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config add_cache(config) engine = engine_from_config(config, 'sqlalchemy.db1.') init_model(engine) index_location = config['index_dir'] repo_location = self.options.repo_location \ if self.options.repo_location else RepoModel().repos_path repo_list = map(strip, self.options.repo_list.split(',')) \ if self.options.repo_list else None load_rcextensions(config['here']) #====================================================================== # WHOOSH DAEMON #====================================================================== from rhodecode.lib.pidlock import LockHeld, DaemonLock from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon try: l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, repo_list=repo_list,)\ .run(full_index=self.options.full_index) l.release() except LockHeld: sys.exit(1)
def test_clone_wrong_credentials(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise print '\tchecking if anonymous access is enabled' anonymous_access = get_anonymous_access() if anonymous_access: print '\tenabled, disabling it ' set_anonymous_access(enable=False) clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s %(dest)s' % \ {'user':USER + 'error', 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':path} stdout, stderr = Command(cwd).execute('hg clone', clone_url) if not """abort: authorization failed""" in stderr: raise Exception('Failure')
def rmtree(self, path: "_SPATH", ignore_errors: bool = False, quiet: bool = True): sn = self.c.server_name path = self.c._path2str(path) with context_timeit(quiet): lprint(quiet)(f"{G}Recursively removing dir:{R} {sn}@{path}") try: for root, _, files in self.c.os.walk(path, followlinks=True): for f in files: f = jn(root, f) lprint(quiet)(f"{G}removing file:{R} {sn}@{f}") if self.c.os.isfile(f): self.c.sftp.remove(f) if self.c.os.isdir(root): self.c.sftp.rmdir(root) if self.c.os.isdir(path): self.c.sftp.rmdir(path) except FileNotFoundError as e: if ignore_errors: log.warning("Directory does not exist") else: raise FileNotFoundError(e)
def create_test_index(repo_location, config, full_index): """ Makes default test index :param config: test config :param full_index: """ from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon from rhodecode.lib.pidlock import DaemonLock, LockHeld repo_location = repo_location index_location = os.path.join(config['app_conf']['index_dir']) if not os.path.exists(index_location): os.makedirs(index_location) try: l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location)\ .run(full_index=full_index) l.release() except LockHeld: pass
def test_changeset_walk(proj, limit=None): repo, proj = _get_repo(proj) print 'processing', jn(PROJECT_PATH, proj) total_time = 0 cnt = 0 for i in repo: cnt += 1 raw_cs = '/'.join((proj, 'changeset', i.raw_id)) if limit and limit == cnt: break full_uri = (BASE_URI % raw_cs) print '%s visiting %s\%s' % (cnt, full_uri, i) s = time.time() f = o.open(full_uri) size = len(f.read()) e = time.time() - s total_time += e print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) print 'total_time', total_time print 'average on req', total_time / float(cnt)
def command(self): logging.config.fileConfig(self.path_to_ini_file) from pylons import config add_cache(config) engine = engine_from_config(config, 'sqlalchemy.db1.') init_model(engine) index_location = config['index_dir'] repo_location = self.options.repo_location \ if self.options.repo_location else RepoModel().repos_path repo_list = map(strip, self.options.repo_list.split(',')) \ if self.options.repo_list else None repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ if self.options.repo_update_list else None load_rcextensions(config['here']) #====================================================================== # WHOOSH DAEMON #====================================================================== from rhodecode.lib.pidlock import LockHeld, DaemonLock from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon try: l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, repo_list=repo_list, repo_update_list=repo_update_list)\ .run(full_index=self.options.full_index) l.release() except LockHeld: sys.exit(1)
def test_clone_anonymous(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise print '\tchecking if anonymous access is enabled' anonymous_access = get_anonymous_access() if not anonymous_access: print '\tnot enabled, enabling it ' set_anonymous_access(enable=True) time.sleep(1) clone_url = 'http://%(host)s/%(cloned_repo)s %(dest)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':path} stdout, stderr = Command(cwd).execute('hg clone', clone_url) assert """adding file changes""" in stdout, 'no messages about cloning' assert """abort""" not in stderr , 'got error from clone' #disable if it was enabled if not anonymous_access: print '\tdisabling anonymous access' set_anonymous_access(enable=False)
def create_test_index(repo_location, config, full_index): """ Makes default test index :param config: test config :param full_index: """ from kallithea.lib.indexers.daemon import WhooshIndexingDaemon from kallithea.lib.pidlock import DaemonLock, LockHeld repo_location = repo_location index_location = os.path.join(config['app_conf']['index_dir']) if not os.path.exists(index_location): os.makedirs(index_location) try: l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location) \ .run(full_index=full_index) l.release() except LockHeld: pass
def wrapper(*args, **kwargs): if 'rep' in kwargs.keys(): rep = kwargs['rep'] kwargs.pop('rep', None) else: rep = '.' if 'name' in kwargs.keys(): name = kwargs['name'] kwargs.pop('name', None) else: name = plot.func_name if 'suffix' in kwargs.keys(): suffix = kwargs['suffix'] kwargs.pop('suffix', None) name += '_' + suffix if 'save' in kwargs.keys(): save = kwargs['save'] kwargs.pop('save', None) else: save = True name += '.png' ret = plot(*args, **kwargs) full_path = jn(rep, name) if save: plt.savefig(full_path) print 'Saved as', full_path return ret
def _add_files_and_push(vcs, DEST, **kwargs): """ Generate some files, add it to DEST repo and push back vcs is git or hg and defines what VCS we want to make those files for :param vcs: :param DEST: """ # commit some stuff into this repo cwd = path = jn(DEST) #added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) added_file = jn(path, '%ssetup.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('%s add %s' % (vcs, added_file)) for i in xrange(kwargs.get('files_no', 3)): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) author_str = 'Marcin Kuźminski <*****@*****.**>' if vcs == 'hg': cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( i, author_str, added_file ) elif vcs == 'git': cmd = """EMAIL="*****@*****.**" git commit -m 'commited new %s' --author '%s' %s """ % ( i, author_str, added_file ) Command(cwd).execute(cmd) # PUSH it back _REPO = None if vcs == 'hg': _REPO = HG_REPO elif vcs == 'git': _REPO = GIT_REPO kwargs['dest'] = '' clone_url = _construct_url(_REPO, **kwargs) if 'clone_url' in kwargs: clone_url = kwargs['clone_url'] stdout = stderr = None if vcs == 'hg': stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) elif vcs == 'git': stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") return stdout, stderr
def test_push_wrong_credentials(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER + 'xxx', 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':jn(TESTS_TMP_PATH, HG_REPO)} modified_file = jn(TESTS_TMP_PATH, HG_REPO, 'setup.py') for i in xrange(5): cmd = """echo 'added_line%s' >> %s""" % (i, modified_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited %s' %s """ % (i, modified_file) Command(cwd).execute(cmd) Command(cwd).execute('hg push %s' % clone_url)
def create_atomic_multi(traj_list, topo_list, name_list, output_folder, **kwargs): output_atomic_list = [ jn(output_folder, '{0}.anpy'.format(name)) for name in name_list ] mkdir(output_folder, exist_ok=True) for traj, topo, output_atomic in zip(traj_list, topo_list, output_atomic_list): AtomicNetMaker(traj, topo, output=output_atomic, **kwargs)
def _get_repo(proj): if isinstance(proj, basestring): repo = vcs.get_repo(jn(PROJECT_PATH, proj)) proj = proj else: repo = proj proj = repo.name return repo, proj
def _parse_files(self): dir_ = set(spe(file)[0] for file in listdir(backup_dir)) # Get unique hashes for file in dir_: try: qResume = qTorFile(jn(backup_dir, file + '.fastresume')) except FileNotFoundError: qResume = None try: qTorrent = qTorFile(jn(backup_dir, file + '.torrent')) except FileNotFoundError: qTorrent = None if qResume and qTorrent: if file not in self._files.keys(): self._files[file] = (qTorrent, qResume) qData = {**qTorrent.data, **qResume.data} self._data[file] = {key: qData[key] for key in sorted(qData)} # Making use of the fact that the keys of each file are unique. else: print(f"{file} is missing a {'.fastresume' if qTorrent else '.torrent'} file.")
def smart_loader(self, path): if len(basename(path)) == 0: #should get the name of a folder with / output = jn(self.outdir, basename(path[:-1]) + '.npy') else: #should get all the rest output = jn(self.outdir, basename(path).split('.')[0] + '.npy') if isdir(path): assert output, print('no output path specified !') #loads a folder and do the average aanet, saves mat, net = self.create_timed(path) np.save(output, mat) nx.write_gpickle(net, output.replace('.npy', '.net')) elif path[-4:] == '.npy': #directly loads a precomputed network mat = np.load(path) net = nx.read_gpickle(path.replace('.npy', '.net')) else: print('Unknown extension for file %s' % path) return None return mat, net
def build_path_average(paras, vers=main_version, in_title=['tau', 'par', 'ACtot', 'nA'], Gname=None, rep=result_dir): """ Build the path for results. """ if Gname==None: Gname=paras['G'].name rep = jn(rep, 'Sim_v' + vers + '_' + Gname) return build_path_single(paras, vers=vers, rep=rep) + '_iter' + str(paras['n_iter']) + '.pic'
def do_plots(paras, b, zz, times, freqs): fig = figure(figsize=paras['figure_size']) gs = gridspec.GridSpec(2, 1, width_ratios=[1], height_ratios=[3, 1]) ax1 = subplot(gs[0]) ax2 = subplot(gs[1], sharex=ax1) ax1.pcolormesh(times, freqs, zz, cmap=paras['cmap']) if paras['log_scale']: ax1.set_yscale('log') ax2.plot(times, b) print('Saving .png file as:', jn(paras['results_dir'], paras['output_png'])) savefig(jn(paras['results_dir'], paras['output_png']), dpi=paras['dpi']) if paras['show_plot']: show()
def test_changeset_walk(limit=None): print 'processing', jn(PROJECT_PATH, PROJECT) total_time = 0 repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT)) cnt = 0 for i in repo: cnt += 1 raw_cs = '/'.join((PROJECT, 'changeset', i.raw_id)) if limit and limit == cnt: break full_uri = (BASE_URI % raw_cs) s = time.time() f = o.open(full_uri) size = len(f.read()) e = time.time() - s total_time += e print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) print 'total_time', total_time print 'average on req', total_time / float(cnt)
def __wrapper(func, *fargs, **fkwargs): lockkey = __get_lockkey(func, *fargs, **fkwargs) lockkey_path = config['here'] log.info('running task with lockkey %s', lockkey) try: l = DaemonLock(file_=jn(lockkey_path, lockkey)) ret = func(*fargs, **fkwargs) l.release() return ret except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def __wrapper(func, *fargs, **fkwargs): lockkey = __get_lockkey(func, *fargs, **fkwargs) lockkey_path = config["here"] log.info("running task with lockkey %s" % lockkey) try: l = DaemonLock(file_=jn(lockkey_path, lockkey)) ret = func(*fargs, **fkwargs) l.release() return ret except LockHeld: log.info("LockHeld") return "Task with key %s already running" % lockkey
def __wrapper(func, *fargs, **fkwargs): lockkey = __get_lockkey(func, *fargs, **fkwargs) lockkey_path = dn(dn(dn(os.path.abspath(__file__)))) log.info('running task with lockkey %s', lockkey) try: l = DaemonLock(jn(lockkey_path, lockkey)) ret = func(*fargs, **fkwargs) l.release() return ret except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def get_full_content(self, docid): res = self.searcher.stored_fields(docid[0]) log.debug('result: %s' % res) if self.search_type == 'content': full_repo_path = jn(self.repo_location, res['repository']) f_path = res['path'].split(full_repo_path)[-1] f_path = f_path.lstrip(os.sep) content_short = self.get_short_content(res, docid[1]) res.update({'content_short': content_short, 'content_short_hl': self.highlight(content_short), 'f_path': f_path }) elif self.search_type == 'path': full_repo_path = jn(self.repo_location, res['repository']) f_path = res['path'].split(full_repo_path)[-1] f_path = f_path.lstrip(os.sep) res.update({'f_path': f_path}) elif self.search_type == 'message': res.update({'message_hl': self.highlight(res['message'])}) log.debug('result: %s' % res) return res
def test_files_walk(limit=100): print 'processing', jn(PROJECT_PATH, PROJECT) total_time = 0 repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT)) from rhodecode.lib.oset import OrderedSet paths_ = OrderedSet(['']) try: tip = repo.get_changeset('tip') for topnode, dirs, files in tip.walk('/'): for dir in dirs: paths_.add(dir.path) for f in dir: paths_.add(f.path) for f in files: paths_.add(f.path) except vcs.exception.RepositoryError, e: pass
def get_paths(self, repo): """recursive walk in root dir and return a set of all path in that dir based on repository walk function """ index_paths_ = set() try: tip = repo.get_changeset('tip') for topnode, dirs, files in tip.walk('/'): for f in files: index_paths_.add(jn(repo.path, f.path)) except RepositoryError, e: log.debug(traceback.format_exc()) pass
def get_paths(self, repo): """ recursive walk in root dir and return a set of all path in that dir based on repository walk function """ index_paths_ = set() try: cs = self._get_index_changeset(repo) for _topnode, _dirs, files in cs.walk('/'): for f in files: index_paths_.add(jn(safe_str(repo.path), safe_str(f.path))) except RepositoryError: log.debug(traceback.format_exc()) pass return index_paths_
def test_files_walk(proj, limit=100): repo, proj = _get_repo(proj) print 'processing', jn(PROJECT_PATH, proj) total_time = 0 paths_ = OrderedSet(['']) try: tip = repo.get_changeset('tip') for topnode, dirs, files in tip.walk('/'): for dir in dirs: paths_.add(dir.path) for f in dir: paths_.add(f.path) for f in files: paths_.add(f.path) except RepositoryError, e: pass
def test_clone_wrong_credentials(): cwd = path = jn(TESTS_TMP_PATH, HG_REPO) try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s %(dest)s' % \ {'user':USER + 'error', 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':path} stdout, stderr = Command(cwd).execute('hg clone', clone_url) assert """abort: authorization failed""" in stderr , 'no error from wrong credentials'