def testfork(self): # create fork so the repo stays untouched git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence())) fixture.create_fork(base.GIT_REPO, git_fork_name) hg_fork_name = '%s_fork%s' % (base.HG_REPO, next( _RandomNameSequence())) fixture.create_fork(base.HG_REPO, hg_fork_name) return {'git': git_fork_name, 'hg': hg_fork_name}
def test_push_invalidates_cache_git(self, webserver): key = CacheInvalidation.query().filter( CacheInvalidation.cache_key == GIT_REPO).scalar() if not key: key = CacheInvalidation(GIT_REPO, GIT_REPO) Session().add(key) key.cache_active = True Session().commit() DEST = _get_tmp_dir() clone_url = webserver.repo_url(GIT_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute( 'git clone', clone_url, DEST) # commit some stuff into this repo fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'git', DEST, files_no=1, clone_url=clone_url) _check_proper_git_push(stdout, stderr) key = CacheInvalidation.query().filter( CacheInvalidation.cache_key == fork_name).all() assert key == []
def _add_files(vcs, dest_dir, files_no=3): """ Generate some files, add it to dest_dir repo and push back vcs is git or hg and defines what VCS we want to make those files for :param vcs: :param dest_dir: """ added_file = '%ssetup.py' % next(_RandomNameSequence()) open(os.path.join(dest_dir, added_file), 'a').close() Command(dest_dir).execute(vcs, 'add', added_file) email = '*****@*****.**' if os.name == 'nt': author_str = 'User <%s>' % email else: author_str = 'User ǝɯɐᴎ <%s>' % email for i in range(files_no): cmd = """echo "added_line%s" >> %s""" % (i, added_file) Command(dest_dir).execute(cmd) if vcs == 'hg': cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % ( i, author_str, added_file) elif vcs == 'git': cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % ( i, author_str, added_file) # git commit needs EMAIL on some machines Command(dest_dir).execute(cmd, EMAIL=email)
class Arena(object): _rand = tempfile._RandomNameSequence() def __init__(self, size): self.size = size for i in range(100): name = 'pym-%d-%s' % (os.getpid(), next(self._rand)) buf = mmap.mmap(-1, size, tagname=name) if win32.GetLastError() == 0: break # we have reopened a preexisting map buf.close() else: exc = IOError('Cannot find name for new mmap') exc.errno = errno.EEXIST raise exc self.name = name self.buffer = buf self._state = (self.size, self.name) def __getstate__(self): context.assert_spawning(self) return self._state def __setstate__(self, state): self.size, self.name = self._state = state self.buffer = mmap.mmap(-1, self.size, tagname=self.name)
def test_push_unlocks_repository_git(self, webserver): # enable locking fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) r = Repository.get_by_repo_name(fork_name) r.enable_locking = True Session().commit() #clone some temp DEST = _get_tmp_dir() clone_url = webserver.repo_url(fork_name) stdout, stderr = Command(TESTS_TMP_PATH).execute( 'git clone', clone_url, DEST) #check for lock repo after clone r = Repository.get_by_repo_name(fork_name) assert r.locked[0] == User.get_by_username( TEST_USER_ADMIN_LOGIN).user_id #push is ok and repo is now unlocked stdout, stderr = _add_files_and_push(webserver, 'git', DEST, clone_url=clone_url) _check_proper_git_push(stdout, stderr) assert ('remote: Released lock on repo `%s`' % fork_name) in stderr #we need to cleanup the Session Here ! Session.remove() r = Repository.get_by_repo_name(fork_name) assert r.locked == [None, None]
def test_push_unlocks_repository_git(self, webserver): # enable locking fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) r = Repository.get_by_repo_name(fork_name) r.enable_locking = True Session().commit() #clone some temp DEST = _get_tmp_dir() clone_url = webserver.repo_url(fork_name) stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, DEST) #check for lock repo after clone r = Repository.get_by_repo_name(fork_name) assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id #push is ok and repo is now unlocked stdout, stderr = _add_files_and_push(webserver, 'git', DEST, clone_url=clone_url) _check_proper_git_push(stdout, stderr) assert ('remote: Released lock on repo `%s`' % fork_name) in stderr #we need to cleanup the Session Here ! Session.remove() r = Repository.get_by_repo_name(fork_name) assert r.locked == [None, None]
def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, seq=None, backend='hg'): cwd = path = jn(TESTS_TMP_PATH, repo) if seq is None: seq = _RandomNameSequence().next() try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user': USER, 'pass': PASS, 'host': HOST, 'cloned_repo': repo, } dest = path + seq if method == 'pull': stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) else: stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) print stdout,'sdasdsadsa' if not no_errors: if backend == 'hg': assert """adding file changes""" in stdout, 'no messages about cloning' assert """abort""" not in stderr , 'got error from clone' elif backend == 'git': assert """Cloning into""" in stdout, 'no messages about cloning'
def random_track(number_of_features=15000000, size=1000, jump=1000, orig_start=0, chrs=20): import random, tempfile yield 'track type=bed name="Features" description="Intervals" source="Random generator"\n' name_gen = tempfile._RandomNameSequence() chr = 0 for i in range(number_of_features): if i % (number_of_features / chrs) == 0: chr += 1 start = orig_start start = start + (random.randint(0, jump)) end = start + (random.randint(1, size)) thick_start = start + (random.randint(-size * 0.25, size * 0.25)) thick_end = end + (random.randint(-size * 0.25, size * 0.25)) name = name_gen.next() + name_gen.next() strand = random.random() < 0.5 and '+' or '-' score = random.random() line = [ 'chr' + str(chr), str(start), str(end), name, score, strand, str(thick_start), str(thick_end) ] yield ('\t'.join(line) + '\n')
def textInputDialog(title): """Opens a text input dialog and returns it. When None is returned it means the user cancelled the dialog.""" # Create a random name for the transient user value import tempfile randname = tempfile._RandomNameSequence().next() # Create a lambda shortcut for executing commands to save some typing cmdSvc = lx.service.Command() execCmd = lambda text: cmdSvc.ExecuteArgString(-1, lx.symbol.iCTAG_NULL, text) # Create a transient user value to use as text input dialog execCmd('user.defNew %s string' % randname) execCmd('user.def %s transient true' % randname) execCmd('user.def %s username "%s"' % (randname, title)) # Show the modal input dialog try: execCmd('user.value %s' % randname) except RuntimeError: # User cancelled return None # Fetch and return the user value's string content uservalue = lx.service.ScriptSys().UserValueLookup(randname) result = uservalue.GetString() return result
def test_push_invalidates_cache_hg(self, webserver): key = CacheInvalidation.query().filter( CacheInvalidation.cache_key == HG_REPO).scalar() if not key: key = CacheInvalidation(HG_REPO, HG_REPO) Session().add(key) key.cache_active = True Session().commit() DEST = _get_tmp_dir() clone_url = webserver.repo_url(HG_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute( 'hg clone', clone_url, DEST) fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next()) fixture.create_fork(HG_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'hg', DEST, files_no=1, clone_url=clone_url) key = CacheInvalidation.query().filter( CacheInvalidation.cache_key == fork_name).all() assert key == []
class Arena(object): _rand = tempfile._RandomNameSequence() def __init__(self, size): self.size = size for i in range(100): name = 'pym-%d-%s' % (os.getpid(), next(self._rand)) buf = mmap.mmap(-1, size, tagname=name) if _winapi.GetLastError() == 0: break # We have reopened a preexisting mmap. buf.close() else: raise FileExistsError('Cannot find name for new mmap') self.name = name self.buffer = buf self._state = (self.size, self.name) def __getstate__(self): popen.assert_spawning(self) return self._state def __setstate__(self, state): self.size, self.name = self._state = state self.buffer = mmap.mmap(-1, self.size, tagname=self.name) assert _winapi.GetLastError() == _winapi.ERROR_ALREADY_EXISTS
def test_push_new_file(commits=15, with_clone=True): if with_clone: test_clone_with_credentials(no_errors=True) cwd = path = jn(TESTS_TMP_PATH, HG_REPO) added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('hg add %s' % added_file) for i in xrange(commits): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited new %s' -u '%s' %s """ % ( i, 'Marcin Kuźminski <*****@*****.**>', added_file) Command(cwd).execute(cmd) push_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':jn(TESTS_TMP_PATH, HG_REPO)} Command(cwd).execute('hg push --verbose --debug %s' % push_url)
class Arena(object): """ A shared memory area backed by anonymous memory (Windows). """ _rand = tempfile._RandomNameSequence() def __init__(self, size): self.size = size for i in range(100): name = 'pym-%d-%s' % (os.getpid(), next(self._rand)) buf = mmap.mmap(-1, size, tagname=name) if _winapi.GetLastError() == 0: break # We have reopened a preexisting mmap. buf.close() else: raise FileExistsError('Cannot find name for new mmap') self.name = name self.buffer = buf self._state = (self.size, self.name) def __getstate__(self): assert_spawning(self) return self._state def __setstate__(self, state): self.size, self.name = self._state = state # Reopen existing mmap self.buffer = mmap.mmap(-1, self.size, tagname=self.name)
def temp_file(self): temp_file = os.path.join(tempfile.gettempdir(), tempfile._RandomNameSequence().next()) try: yield temp_file finally: os.remove(temp_file)
def _add_files_and_push(vcs, dest, clone_url=None, **kwargs): """ Generate some files, add it to DEST repo and push back vcs is git or hg and defines what VCS we want to make those files for """ # commit some stuff into this repo cwd = path = jn(dest) added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('%s add %s' % (vcs, added_file)) for i in xrange(kwargs.get('files_no', 3)): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) author_str = 'Marcin Kuźminski <*****@*****.**>' if vcs == 'hg': cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( i, author_str, added_file) elif vcs == 'git': cmd = """EMAIL="*****@*****.**" git commit -m 'commited new %s' """\ """--author '%s' %s """ % (i, author_str, added_file) Command(cwd).execute(cmd) # PUSH it back stdout = stderr = None if vcs == 'hg': stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) elif vcs == 'git': stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") return stdout, stderr
def test_push_new_file(commits=15, with_clone=True): if with_clone: test_clone_with_credentials(no_errors=True) cwd = path = jn(TESTS_TMP_PATH, HG_REPO) added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('hg add %s' % added_file) for i in xrange(commits): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) cmd = """hg ci -m 'commited new %s' -u '%s' %s """ % (i, 'Marcin Kuźminski <*****@*****.**>', added_file) Command(cwd).execute(cmd) push_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':HG_REPO, 'dest':jn(TESTS_TMP_PATH, HG_REPO)} Command(cwd).execute('hg push --verbose --debug %s' % push_url)
def memmap_data(data, read_only: bool = True): new_folder_name = ("flirt_memmap_%d" % os.getpid()) temp_dir, _ = __get_temp_dir(new_folder_name) filename = os.path.join( temp_dir, 'memmap_%s.mmap' % next(tempfile._RandomNameSequence())) if os.path.exists(filename): os.unlink(filename) _ = dump(data, filename) return load(filename, mmap_mode='r+' if read_only else 'w+'), filename
def test_push_new_file_git(self, webserver): DEST = _get_tmp_dir() clone_url = webserver.repo_url(GIT_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, DEST) # commit some stuff into this repo fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'git', DEST, clone_url=clone_url) print [(x.repo_full_path,x.repo_path) for x in Repository.query()] # TODO: what is this for
def CreateCustomVRT(vrtxml, vrtcols, vrtrows): try: vrt = [] vrt.append('<VRTDataset rasterXSize="%s" rasterYSize="%s">' % (vrtcols, vrtrows)) vrt.append("%s" % vrtxml) vrt.append("</VRTDataset>") vrtfn = "/vsimem/%s.vrt" % tempfile._RandomNameSequence().next() write_vsimem(vrtfn, "\n".join(vrt)) return vrtfn except: return None
def vimopen(txt=''): rns = tmp._RandomNameSequence() name = os.path.join(tmp.gettempdir(), rns.__next__() + '.md') f = open(name, 'w', 1) f.write(txt) f.close() os.system('vim ' + name) f = open(name, 'r', 1) data = f.read() f.close() os.unlink(name) return data
def generate_api_key(str_, salt=None): """ Generates API KEY from given string :param str_: :param salt: """ if salt is None: salt = _RandomNameSequence().next() return hashlib.sha1(str_ + salt).hexdigest()
def CreateCustomVRT(vrtxml, vrtcols, vrtrows): try: vrt = [] vrt.append('<VRTDataset rasterXSize="%s" rasterYSize="%s">' % (vrtcols, vrtrows)) vrt.append('%s' % vrtxml) vrt.append('</VRTDataset>') vrtfn = '/vsimem/%s.vrt' % tempfile._RandomNameSequence().next() write_vsi(vrtfn, '\n'.join(vrt)) return vrtfn except: return None
def _add_files_and_push(webserver, vcs, DEST, ignoreReturnCode=False, files_no=3, clone_url=None, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS): """ Generate some files, add it to DEST repo and push back vcs is git or hg and defines what VCS we want to make those files for :param vcs: :param DEST: """ # commit some stuff into this repo cwd = os.path.join(DEST) #added_file = '%ssetupążźć.py' % _RandomNameSequence().next() added_file = '%ssetup.py' % _RandomNameSequence().next() Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('%s add %s' % (vcs, added_file)) email = '*****@*****.**' if os.name == 'nt': author_str = 'User <%s>' % email else: author_str = 'User ǝɯɐᴎ <%s>' % email for i in xrange(files_no): cmd = """echo "added_line%s" >> %s""" % (i, added_file) Command(cwd).execute(cmd) if vcs == 'hg': cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % ( i, author_str, added_file ) elif vcs == 'git': cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % ( i, author_str, added_file ) # git commit needs EMAIL on some machines Command(cwd).execute(cmd, EMAIL=email) # PUSH it back _REPO = None if vcs == 'hg': _REPO = HG_REPO elif vcs == 'git': _REPO = GIT_REPO if clone_url is None: clone_url = webserver.repo_url(_REPO, username=username, password=password) stdout = stderr = None if vcs == 'hg': stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url, ignoreReturnCode=ignoreReturnCode) elif vcs == 'git': stdout, stderr = Command(cwd).execute('git push --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode) return stdout, stderr
def test_push_new_file_hg(self, webserver): DEST = _get_tmp_dir() clone_url = webserver.repo_url(HG_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute('hg clone', clone_url, DEST) fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next()) fixture.create_fork(HG_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'hg', DEST, clone_url=clone_url) assert 'pushing to' in stdout assert 'Repository size' in stdout assert 'Last revision is now' in stdout
def __init__(self, obj, name, **opts): if isinstance(obj, ZFSDataset): for snapname in _RandomNameSequence(): try: self.snap = TemporarySnapshot(obj, snapname) except ZFSException as exc: if exc.code == ZFSErrorCode.EXISTS: continue else: break else: self.snap = obj self.fs = create_clone(self.snap, name, **opts)
def _add_files_and_push(vcs, DEST, **kwargs): """ Generate some files, add it to DEST repo and push back vcs is git or hg and defines what VCS we want to make those files for :param vcs: :param DEST: """ # commit some stuff into this repo cwd = path = jn(DEST) #added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) added_file = jn(path, '%ssetup.py' % _RandomNameSequence().next()) Command(cwd).execute('touch %s' % added_file) Command(cwd).execute('%s add %s' % (vcs, added_file)) for i in xrange(kwargs.get('files_no', 3)): cmd = """echo 'added_line%s' >> %s""" % (i, added_file) Command(cwd).execute(cmd) author_str = 'Marcin Kuźminski <*****@*****.**>' if vcs == 'hg': cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( i, author_str, added_file ) elif vcs == 'git': cmd = """EMAIL="*****@*****.**" git commit -m 'commited new %s' --author '%s' %s """ % ( i, author_str, added_file ) Command(cwd).execute(cmd) # PUSH it back _REPO = None if vcs == 'hg': _REPO = HG_REPO elif vcs == 'git': _REPO = GIT_REPO kwargs['dest'] = '' clone_url = _construct_url(_REPO, **kwargs) if 'clone_url' in kwargs: clone_url = kwargs['clone_url'] stdout = stderr = None if vcs == 'hg': stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) elif vcs == 'git': stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") return stdout, stderr
def generate_api_key(username, salt=None): """ Generates unique API key for given username, if salt is not given it'll be generated from some random string :param username: username as string :param salt: salt to hash generate KEY :rtype: str :returns: sha1 hash from username+salt """ from tempfile import _RandomNameSequence import hashlib if salt is None: salt = _RandomNameSequence().next() return hashlib.sha1(username + salt).hexdigest()
def test_push_new_file_git(self, webserver): DEST = _get_tmp_dir() clone_url = webserver.repo_url(GIT_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute( 'git clone', clone_url, DEST) # commit some stuff into this repo fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'git', DEST, clone_url=clone_url) print[(x.repo_full_path, x.repo_path) for x in Repository.query()] # TODO: what is this for _check_proper_git_push(stdout, stderr)
def test_push_new_file_hg(self, webserver): DEST = _get_tmp_dir() clone_url = webserver.repo_url(HG_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute( 'hg clone', clone_url, DEST) fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next()) fixture.create_fork(HG_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'hg', DEST, clone_url=clone_url) assert 'pushing to' in stdout assert 'Repository size' in stdout assert 'Last revision is now' in stdout
def formulate_command(arguments, no_rm=False, random_tempdir=True, mount_parent=False, cwd=os.getcwd()): """ Generates a docker command, with automatic bind mounting """ # Build the docker command up as two lists docker_command = ['docker', 'run'] container_command = [] if not no_rm: docker_command += ['--rm'] # We will use this for generating mount locations name_gen = tempfile._RandomNameSequence() # Convert paths into bind mounts for arg in arguments: # If it's a file or directory, we need to mount it if os.path.isdir(arg) or os.path.isfile(arg): arg_path = Path(arg) # Make all relative paths absolute if not arg_path.is_absolute(): arg_path = Path(cwd) / arg_path # Generate a random temporary directory by default if random_tempdir: temp_dir = Path(tempfile.gettempdir()) / next(name_gen) else: temp_dir = Path(tempfile.gettempdir()) if mount_parent: mount_dest = temp_dir / arg_path.parent.name docker_command += ['-v', f'{arg_path.parent}:{mount_dest}'] container_command.append(mount_dest / arg_path.name) else: mount_dest = temp_dir / arg_path.name docker_command += ['-v', f'{arg_path}:{mount_dest}'] container_command.append(mount_dest) else: container_command.append(arg) # Convert everything back to strings return [str(arg) for arg in [*docker_command, *container_command]]
def random_track(number_of_features=15000000, size=1000, jump=1000, orig_start=0, chrs=20): import random, tempfile yield 'track type=bed name="Features" description="Intervals" source="Random generator"\n' name_gen = tempfile._RandomNameSequence() chr = 0 for i in range(number_of_features): if i % (number_of_features / chrs) == 0: chr += 1 start = orig_start start = start + (random.randint(0,jump)) end = start + (random.randint(1,size)) thick_start = start + (random.randint(-size*0.25,size*0.25)) thick_end = end + (random.randint(-size*0.25,size*0.25)) name = name_gen.next() + name_gen.next() strand = random.random() < 0.5 and '+' or '-' score = random.random() line = ['chr' + str(chr), str(start), str(end), name, score, strand, str(thick_start), str(thick_end)] yield ('\t'.join(line) + '\n')
def __init__(self, queue_size=20, verbose=False): super().__init__() from .loom import Spool self.randomname = _RandomNameSequence() self.verbose = verbose self.queue_size = queue_size self.trash_queue = [] try: import send2trash self._osTrash = send2trash.send2trash except ImportError: logger.warning("send2trash unavailible, using unsafe delete") self._osTrash = os.unlink self._spool = Spool(8, "os trash")
def parse(self): # Initialize # chromosomes = [chrsuffix + str(x) for x in range(10)] name_generator = tempfile._RandomNameSequence() name_generator.rng.seed(0) # New track # self.handler.newTrack("Random track generator") self.handler.defineFields(["start", "end", "name", "score", "strand"]) self.handler.defineChrmeta(dict([(ch, dict([("length", sys.maxint)])) for ch in chromosomes])) # Lots of features # for chrom in chromosomes: start = 0 for feat in range(int(feature_factor + 4 * feature_factor * random.random())): start = start + (random.randint(0, 100)) end = start + (random.randint(1, 100)) name = name_generator.next() score = random.gammavariate(1, 0.1) * 1000 strand = map(lambda x: x == 1 and 1 or -1, [random.randint(0, 1)])[0] self.handler.newFeature(chrom, (start, end, name, score, strand))
def test_push_invalidates_cache_hg(self, webserver): key = CacheInvalidation.query().filter(CacheInvalidation.cache_key ==HG_REPO).scalar() if not key: key = CacheInvalidation(HG_REPO, HG_REPO) Session().add(key) key.cache_active = True Session().commit() DEST = _get_tmp_dir() clone_url = webserver.repo_url(HG_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute('hg clone', clone_url, DEST) fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next()) fixture.create_fork(HG_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'hg', DEST, files_no=1, clone_url=clone_url) key = CacheInvalidation.query().filter(CacheInvalidation.cache_key ==fork_name).all() assert key == []
def open( mode="w+b", suffix="", prefix=tempfile.template, dir="/tmp" ) -> AiofilesContextManager: """ Create and return a temporary file. Args: mode: The mode argument to io.open (default "w+b"). suffix: If 'suffix' is not None, the file name will end with that suffix, otherwise there will be no suffix. prefix:If 'prefix' is not None, the file name will begin with that prefix, otherwise a default prefix is used. dir: If 'dir' is not None, the file will be created in that directory, otherwise a default directory is used. Returns: The corresponding temporary file. """ # pylint: disable=protected-access name = next(tempfile._RandomNameSequence()) path = Path(dir).joinpath(f"{prefix}{name}{suffix}") # aiofiles_context_manager = aiofiles.open(path, mode=mode) aiofiles_context_manager = AiofilesContextManager(_open(path, mode=mode)) aiofiles_context_manager.__setattr__("name", str(path)) return aiofiles_context_manager
def setUp(self): """Initialize arithmetic coding unit test. This method is called when an EncodeDirTest object is run. It gets the contents of the current directory, creates the names used for temporary files containing encoded and decoded data, and it creates an instance of an ArithmeticCode object. Arguments: None. Return Value(s): None. Side Effects: Instance variables are initialized. Exceptions Raised: None. """ self.dir = os.listdir('.') # generate unique tmp file names by cheating makesuffix = tempfile._RandomNameSequence() self.encoded = tempfile.gettempprefix() + makesuffix.next() self.decoded = tempfile.gettempprefix() + makesuffix.next() while self.encoded in self.dir: self.encoded = tempfile.gettempprefix() + makesuffix.next() while self.decoded in self.dir: self.decoded = tempfile.gettempprefix() + makesuffix.next() self.ar = ArithmeticCode()
def parse(self): # Initialize # chromosomes = [chrsuffix + str(x) for x in range(10)] name_generator = tempfile._RandomNameSequence() name_generator.rng.seed(0) # New track # self.handler.newTrack('Random track generator') self.handler.defineFields(['start', 'end', 'name', 'score', 'strand']) self.handler.defineChrmeta( dict([(ch, dict([('length', sys.maxint)])) for ch in chromosomes])) # Lots of features # for chrom in chromosomes: start = 0 for feat in range( int(feature_factor + 4 * feature_factor * random.random())): start = start + (random.randint(0, 100)) end = start + (random.randint(1, 100)) name = name_generator.next() score = random.gammavariate(1, 0.1) * 1000 strand = map(lambda x: x == 1 and 1 or -1, [random.randint(0, 1)])[0] self.handler.newFeature(chrom, (start, end, name, score, strand))
def test_push_invalidates_cache_git(self, webserver): key = CacheInvalidation.query().filter(CacheInvalidation.cache_key ==GIT_REPO).scalar() if not key: key = CacheInvalidation(GIT_REPO, GIT_REPO) Session().add(key) key.cache_active = True Session().commit() DEST = _get_tmp_dir() clone_url = webserver.repo_url(GIT_REPO) stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, DEST) # commit some stuff into this repo fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next()) fixture.create_fork(GIT_REPO, fork_name) clone_url = webserver.repo_url(fork_name) stdout, stderr = _add_files_and_push(webserver, 'git', DEST, files_no=1, clone_url=clone_url) _check_proper_git_push(stdout, stderr) key = CacheInvalidation.query().filter(CacheInvalidation.cache_key ==fork_name).all() assert key == []
def test_clone_with_credentials(no_errors=False, repo=HG_REPO): cwd = path = jn(TESTS_TMP_PATH, repo) try: shutil.rmtree(path, ignore_errors=True) os.makedirs(path) #print 'made dirs %s' % jn(path) except OSError: raise clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s %(dest)s' % \ {'user':USER, 'pass':PASS, 'host':HOST, 'cloned_repo':repo, 'dest':path + _RandomNameSequence().next()} stdout, stderr = Command(cwd).execute('hg clone', clone_url) if no_errors is False: assert """adding file changes""" in stdout, 'no messages about cloning' assert """abort""" not in stderr , 'got error from clone'
def setUp(self): self.r = tempfile._RandomNameSequence()
"Rhodecode Password", "Enter new password for the Rhodecode 'admin' account.") if not email: if 'd' not in locals(): d = Dialog('TurnKey Linux - First boot configuration') email = d.get_email( "Rhodecode Email", "Enter email address for the Rhodecode 'admin' account.", "*****@*****.**") # salt = "".join(random.choice(string.letters) for line in range(16)) hashpass = bcrypt.hashpw(password, bcrypt.gensalt(10)) salt = _RandomNameSequence().next() admin_apikey = hashlib.sha1('admin' + salt).hexdigest() salt = _RandomNameSequence().next() default_apikey = hashlib.sha1('default' + salt).hexdigest() m = MySQL() m.execute('UPDATE rhodecode.users SET email=\"%s\" WHERE username=\"admin\";' % email) m.execute('UPDATE rhodecode.users SET password=\"%s\" WHERE username=\"admin\";' % hashpass) m.execute('UPDATE rhodecode.users SET api_key=\"%s\" WHERE username=\"admin\";' % admin_apikey) m.execute('UPDATE rhodecode.users SET api_key=\"%s\" WHERE username=\"default\";' % default_apikey) script = os.path.join(os.path.dirname(__file__), 'rhodecode-reinit.sh') os.system(script) if __name__ == "__main__": main()
'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS', 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN', 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO', 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO', 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', 'GIT_REMOTE_REPO', 'SCM_TESTS', 'remove_all_notifications', ] # Invoke websetup with the current config file # SetupCommand('setup-app').run([config_file]) environ = {} #SOME GLOBALS FOR TESTS TESTS_TMP_PATH = jn(tempfile.gettempdir(), 'rc_test_%s' % _RandomNameSequence().next()) TEST_USER_ADMIN_LOGIN = '******' TEST_USER_ADMIN_PASS = '******' TEST_USER_ADMIN_EMAIL = '*****@*****.**' TEST_USER_REGULAR_LOGIN = '******' TEST_USER_REGULAR_PASS = '******' TEST_USER_REGULAR_EMAIL = '*****@*****.**' TEST_USER_REGULAR2_LOGIN = '******' TEST_USER_REGULAR2_PASS = '******' TEST_USER_REGULAR2_EMAIL = '*****@*****.**' HG_REPO = u'vcs_test_hg' GIT_REPO = u'vcs_test_git'
def make_track_name_random(path): name_gen = tempfile._RandomNameSequence() return ' '.join([name_gen.next() for x in range(10)]) + ' ' + path.split('/')[-1]
def _create_file(self, name=None): name = name or tempfile._RandomNameSequence().next() path = os.path.join(settings.MEDIA_ROOT, name) with open(path, 'a+b') as fd: fd.write(b'foo')
'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', 'GIT_REMOTE_REPO', 'SCM_TESTS', '_get_repo_create_params' ] # Invoke websetup with the current config file # SetupCommand('setup-app').run([config_file]) ##RUNNING DESIRED TESTS # nosetests -x rhodecode.tests.functional.test_admin_settings:TestSettingsController.test_my_account # nosetests --pdb --pdb-failures # nosetests --with-coverage --cover-package=rhodecode.model.validators rhodecode.tests.test_validators environ = {} #SOME GLOBALS FOR TESTS TESTS_TMP_PATH = jn('/', 'tmp', 'rc_test_%s' % _RandomNameSequence().next()) TEST_USER_ADMIN_LOGIN = '******' TEST_USER_ADMIN_PASS = '******' TEST_USER_ADMIN_EMAIL = '*****@*****.**' TEST_USER_REGULAR_LOGIN = '******' TEST_USER_REGULAR_PASS = '******' TEST_USER_REGULAR_EMAIL = '*****@*****.**' TEST_USER_REGULAR2_LOGIN = '******' TEST_USER_REGULAR2_PASS = '******' TEST_USER_REGULAR2_EMAIL = '*****@*****.**' HG_REPO = 'vcs_test_hg' GIT_REPO = 'vcs_test_git'
# -*- coding: utf-8 -*- """Helper methods for libtmux and downstream libtmux libraries.""" from __future__ import (absolute_import, division, print_function, unicode_literals, with_statement) import contextlib import logging import os import tempfile logger = logging.getLogger(__name__) TEST_SESSION_PREFIX = 'libtmux_' namer = tempfile._RandomNameSequence() current_dir = os.path.abspath(os.path.dirname(__file__)) example_dir = os.path.abspath(os.path.join(current_dir, '..', 'examples')) fixtures_dir = os.path.realpath(os.path.join(current_dir, 'fixtures')) def get_test_session_name(server, prefix=TEST_SESSION_PREFIX): while True: session_name = prefix + next(namer) if not server.has_session(session_name): break return session_name def get_test_window_name(session, prefix=TEST_SESSION_PREFIX): while True:
def generate_api_key(username, salt=None): if salt is None: salt = _RandomNameSequence().next() return hashlib.sha1(username + salt).hexdigest()
def getoverview(ds, outfile, width, format, bands, stretch_type, *stretch_args): """ Generate overviews for imagery @type ds: C{GDALDataset} @param ds: a GDALDataset object @type outfile: C{str} @param outfile: a filepath to the output overview image. If supplied, format is determined from the file extension @type width: C{int} @param width: output image width @type format: C{str} @param format: format to generate overview image, one of ['JPG','PNG','GIF','BMP','TIF']. Not required if outfile is supplied. @type bands: C{list} @param bands: list of band numbers (base 1) in processing order - e.g [3,2,1] @type stretch_type: C{str} @param stretch_type: stretch to apply to overview image, one of [L{NONE<_stretch_NONE>},L{PERCENT<_stretch_PERCENT>},L{MINMAX<_stretch_MINMAX>},L{STDDEV<_stretch_STDDEV>},L{COLOURTABLE<_stretch_COLOURTABLE>},L{COLOURTABLELUT<_stretch_COLOURTABLELUT>},L{RANDOM<_stretch_RANDOM>},L{UNIQUE<_stretch_UNIQUE>}]. @type stretch_args: C{list} @param stretch_args: args to pass to the stretch algorithms @rtype: C{str} @return: filepath (if outfile is supplied)/binary image data (if outfile is not supplied) """ # mapping table for file extension -> GDAL format code imageformats = { "JPG": "JPEG", # JPEG JFIF (.jpg) "PNG": "PNG", # Portable Network Graphics (.png) "GIF": "GIF", # Graphics Interchange Format (.gif) "BMP": "BMP", # Microsoft Windows Device Independent Bitmap (.bmp) "TIF": "GTiff", # Tagged Image File Format/GeoTIFF (.tif) } worldfileexts = { "JPG": ".jgw", # JPEG JFIF (.jpg) "PNG": ".pgw", # Portable Network Graphics (.png) "GIF": ".gfw", # Graphics Interchange Format (.gif) "BMP": ".bpw", # Microsoft Windows Device Independent Bitmap (.bmp) "TIF": ".tfw", # Tagged Image File Format/GeoTIFF (.tif) } if outfile: outfile = utilities.encode(outfile) format = os.path.splitext(outfile)[1].replace(".", "") # overrides "format" arg if supplied ovdriver = gdal.GetDriverByName( imageformats.get(format.upper(), "JPEG") ) # Get format code, default to 'JPEG' if supplied format doesn't match the predefined ones... cols = ds.RasterXSize rows = ds.RasterYSize vrtcols = width vrtrows = int(math.ceil(width * float(rows) / cols)) gt = ds.GetGeoTransform() vrtpx = cols / float(vrtcols) * gt[1] vrtpy = rows / float(vrtrows) * gt[5] vrtgt = (gt[0], vrtpx, gt[2], gt[3], gt[4], vrtpy) vrtfn = stretch(stretch_type, vrtcols, vrtrows, ds, bands, *stretch_args) gdal.UseExceptions() vrtds = gdal.Open(vrtfn, gdal.GA_ReadOnly) vrtds.SetGeoTransform(vrtgt) if outfile: cpds = ovdriver.CreateCopy(outfile, vrtds) wf_ext = worldfileexts.get(format.upper(), ".jgw") open(outfile[:-4] + wf_ext, "w").write( "\n".join( [ str(vrtgt[1]), str(vrtgt[4]), str(vrtgt[2]), str(vrtgt[5]), str(vrtgt[0] + 0.5 * vrtgt[1] + 0.5 * vrtgt[2]), str(vrtgt[3] + 0.5 * vrtgt[4] + 0.5 * vrtgt[5]), ] ) ) if not cpds: raise geometry.GDALError, "Unable to generate overview image." else: fn = "/vsimem/%s.%s" % (tempfile._RandomNameSequence().next(), format.lower()) cpds = ovdriver.CreateCopy(fn, vrtds) if not cpds: raise geometry.GDALError, "Unable to generate overview image." outfile = read_vsimem(fn) gdal.Unlink(fn) return outfile
def setUp(self) -> None: self.r = tempfile._RandomNameSequence() super().setUp()
class SemLock(object): _rand = tempfile._RandomNameSequence() def __init__(self, kind, value, maxvalue, *, ctx): if ctx is None: ctx = context._default_context.get_context() name = ctx.get_start_method() unlink_now = sys.platform == 'win32' or name == 'fork' for i in range(100): try: sl = self._semlock = _multiprocessing.SemLock( kind, value, maxvalue, self._make_name(), unlink_now) except FileExistsError: pass else: break else: raise FileExistsError('cannot find name for semaphore') util.debug('created semlock with handle %s' % sl.handle) self._make_methods() if sys.platform != 'win32': def _after_fork(obj): obj._semlock._after_fork() util.register_after_fork(self, _after_fork) if self._semlock.name is not None: # We only get here if we are on Unix with forking # disabled. When the object is garbage collected or the # process shuts down we unlink the semaphore name from .semaphore_tracker import register register(self._semlock.name) util.Finalize(self, SemLock._cleanup, (self._semlock.name, ), exitpriority=0) @staticmethod def _cleanup(name): from .semaphore_tracker import unregister sem_unlink(name) unregister(name) def _make_methods(self): self.acquire = self._semlock.acquire self.release = self._semlock.release def __enter__(self): return self._semlock.__enter__() def __exit__(self, *args): return self._semlock.__exit__(*args) def __getstate__(self): context.assert_spawning(self) sl = self._semlock if sys.platform == 'win32': h = context.get_spawning_popen().duplicate_for_child(sl.handle) else: h = sl.handle return (h, sl.kind, sl.maxvalue, sl.name) def __setstate__(self, state): self._semlock = _multiprocessing.SemLock._rebuild(*state) util.debug('recreated blocker with handle %r' % state[0]) self._make_methods() @staticmethod def _make_name(): return '%s-%s' % (process.current_process()._config['semprefix'], next(SemLock._rand))
assert """Cloning into""" in stdout, 'no messages about cloning' if __name__ == '__main__': try: create_test_user(force=False) seq = None import time try: METHOD = sys.argv[3] except Exception: pass try: backend = sys.argv[4] except Exception: backend = 'hg' if METHOD == 'pull': seq = _RandomNameSequence().next() test_clone_with_credentials(repo=sys.argv[1], method='clone', seq=seq, backend=backend) s = time.time() for i in range(1, int(sys.argv[2]) + 1): print 'take', i test_clone_with_credentials(repo=sys.argv[1], method=METHOD, seq=seq, backend=backend) print 'time taken %.3f' % (time.time() - s) except Exception, e: sys.exit('stop on %s' % e)
############################################################################### log.info("Splitting file") splits_dir = tempfile.mkdtemp(dir=temp_dir) rc = subprocess.call(['split', '-n', str(splits), filepath, os.path.join(splits_dir, 'split.')]) assert rc == 0 split_filenames = sorted(os.listdir(splits_dir)) log.debug("local split dir: %s", splits_dir) log.debug("split filenames: %s", split_filenames) ############################################################################### log.info("Making remote target directory for transfer") # noinspection PyProtectedMember remote_tmp_dir = os.path.join(remote_dir, '.' + tempfile._RandomNameSequence().next()) log.debug("remote tmp transfer path: %s", remote_tmp_dir) subprocess.call(['ssh', '{user}@{host}'.format(user=ssh_user, host=ssh_host), 'mkdir -p {dir}'.format(dir=remote_tmp_dir)]) ############################################################################### log.info("Spawn transfer processes") #: :type: list[subprocess.Popen] transfer_procs = [] for i, fn in enumerate(split_filenames): log.debug(" - [%d] %s", i, fn) transfer_procs.append( subprocess.Popen(['rsync', '-Pvh', os.path.join(splits_dir, fn), "{user}@{host}:{dir}/".format(
def generate_data(): global num_entries name_gen = tempfile._RandomNameSequence() for i in xrange(num_entries): yield (name_gen.next(), name_gen.next(), random.randint(1,1000))