def test_create_hg_non_ascii(self): self.log_user() non_ascii = "ąęł" repo_name = "%s%s" % (NEW_HG_REPO, non_ascii) repo_name_unicode = repo_name.decode('utf8') description = 'description for newly created repo' + non_ascii description_unicode = description.decode('utf8') private = False response = self.app.post(url('repos'), {'repo_name':repo_name, 'repo_type':'hg', 'clone_uri':'', 'repo_group':'', 'description':description, 'private':private}) self.checkSessionFlash(response, 'created repository %s' % (repo_name_unicode)) #test if the repo was created in the database new_repo = self.sa.query(Repository).filter(Repository.repo_name == repo_name_unicode).one() self.assertEqual(new_repo.repo_name, repo_name_unicode) self.assertEqual(new_repo.description, description_unicode) #test if repository is visible in the list ? response = response.follow() self.assertTrue(repo_name in response.body) #test if repository was created on filesystem try: vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name)) except: self.fail('no repo in filesystem')
def test_create_git(self): return self.log_user() repo_name = NEW_GIT_REPO description = 'description for newly created repo' private = False response = self.app.post(url('repos'), {'repo_name':repo_name, 'repo_type':'git', 'clone_uri':'', 'repo_group':'', 'description':description, 'private':private}) #test if we have a message for that repository assert '''created repository %s''' % (repo_name) in response.session['flash'][0], 'No flash message about new repo' #test if the fork was created in the database new_repo = self.sa.query(Repository).filter(Repository.repo_name == repo_name).one() assert new_repo.repo_name == repo_name, 'wrong name of repo name in db' assert new_repo.description == description, 'wrong description' #test if repository is visible in the list ? response = response.follow() assert repo_name in response.body, 'missing new repo from the main repos list' #test if repository was created on filesystem try: vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name)) except: assert False , 'no repo in filesystem'
def test_get_repo_autoalias_git(self): alias = 'git' path = TEST_GIT_REPO backend = get_backend(alias) repo = backend(path) self.assertEqual(repo.__class__, get_repo(path).__class__) self.assertEqual(repo.path, get_repo(path).path)
def test_get_repo_autoalias_hg(self): alias = "hg" path = TEST_HG_REPO backend = get_backend(alias) repo = backend(path) self.assertEqual(repo.__class__, get_repo(path).__class__) self.assertEqual(repo.path, get_repo(path).path)
def test_get_repo(self): alias = 'hg' path = TEST_HG_REPO backend = get_backend(alias) repo = backend(path) self.assertEqual(repo.__class__, get_repo(path, alias).__class__) self.assertEqual(repo.path, get_repo(path, alias).path)
def generate_yara_update_file(path=self._yara_repo_rules_dir): import vcs # don't move these imports! they're not needed on the worker from constance import config rule_repo = None try: rule_repo = vcs.get_backend(config.YARA_REPO_TYPE)(path, create=True, src_url=config.YARA_REPO_URL) except vcs.RepositoryError: # this means its already there .... rule_repo = vcs.get_repo(path=path, create=False) # ensure that we have the latest copy # todo detect when the repo url is changed and blow it away rule_repo.run_git_command("pull") tmp_path = tempfile.mktemp(suffix='.tar') rule_repo.run_git_command("checkout") rule_repo.run_git_command("archive master -o {0}".format(tmp_path)) temp_ver_path = tempfile.mktemp() with open(temp_ver_path, 'w') as version_file_obj: version_file_obj.write(str(rule_repo.get_changeset().revision)) version_file_obj.flush() with open(temp_ver_path, 'r') as version_file_obj: with tarfile.open(tmp_path, 'a') as tf: version_info = tf.gettarinfo(name=self._version_file, fileobj=version_file_obj, arcname=self._version_file) tf.addfile(version_info, fileobj=version_file_obj) tf.close() pfs_update = PickleableFileSample.path_factory(tmp_path) unlink(tmp_path) unlink(temp_ver_path) return pfs_update
def __init__(self, stdout=None, stderr=None, repo=None): if repo is None: curdir = abspath(os.curdir) try: scm, path = get_scm(curdir, search_recursively=True) self.repo = vcs.get_repo(path, scm) except VCSError: raise CommandError('Repository not found') else: self.repo = repo super(RepositoryCommand, self).__init__(stdout, stderr)
def __init__(self, stdout=None, stderr=None, repo=None): """ Accepts extra argument: :param repo: repository instance. If not given, repository would be calculated based on current directory. """ if repo is None: curdir = abspath(os.curdir) try: scm, path = get_scm(curdir, search_up=True) self.repo = vcs.get_repo(path, scm) except VCSError: raise CommandError('Repository not found') else: self.repo = repo super(RepositoryCommand, self).__init__(stdout, stderr)
def __init__(self, stdout=None, stderr=None, repo=None): """ Accepts extra argument: :param repo: repository instance. If not given, repository would be calculated based on current directory. """ if repo is None: curdir = abspath(os.curdir) try: scm, path = get_scm(curdir, search_recursively=True) self.repo = vcs.get_repo(path, scm) except VCSError: raise CommandError('Repository not found') else: self.repo = repo super(RepositoryCommand, self).__init__(stdout, stderr)
def get_current_revision(): """Returns tuple of (number, id) from repository containing this package or None if repository could not be found. """ try: from vcs import get_repo from vcs.utils.helpers import get_scm from vcs.exceptions import RepositoryError, VCSError repopath = os.path.join(os.path.dirname(__file__), '..', '..') scm = get_scm(repopath)[0] repo = get_repo(path=repopath, alias=scm) tip = repo.get_changeset() return (tip.revision, tip.short_id) except (ImportError, RepositoryError, VCSError), err: logging.debug("Cannot retrieve rhodecode's revision. Original error " "was: %s" % err) return None
def get_current_revision(quiet=False): """ Returns tuple of (number, id) from repository containing this package or None if repository could not be found. :param quiet: prints error for fetching revision if True """ try: from vcs import get_repo from vcs.utils.helpers import get_scm repopath = os.path.join(os.path.dirname(__file__), '..', '..') scm = get_scm(repopath)[0] repo = get_repo(path=repopath, alias=scm) tip = repo.get_changeset() return (tip.revision, tip.short_id) except Exception, err: if not quiet: print ("Cannot retrieve rhodecode's revision. Original error " "was: %s" % err) return None
def test_changeset_walk(limit=None): print 'processing', jn(PROJECT_PATH, PROJECT) total_time = 0 repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT)) cnt = 0 for i in repo: cnt += 1 raw_cs = '/'.join((PROJECT, 'changeset', i.raw_id)) if limit and limit == cnt: break full_uri = (BASE_URI % raw_cs) s = time.time() f = o.open(full_uri) size = len(f.read()) e = time.time() - s total_time += e print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) print 'total_time', total_time print 'average on req', total_time / float(cnt)
def get_current_revision(): """ Returns tuple of (number, id) from repository containing this package or None if repository could not be found. """ try: import vcs except ImportError: return None try: from vcs import get_repo from vcs.utils.helpers import get_scm from vcs.exceptions import RepositoryError, VCSError repopath = os.path.join(os.path.dirname(__file__), '..', '..') scm = get_scm(repopath)[0] repo = get_repo(path=repopath, alias=scm) tip = repo.get_changeset() return (tip.revision, tip.id) except (ImportError, RepositoryError, VCSError), err: logging.debug("Cannot retrieve projector's revision. Original error " "was: %s" % err) return None
def test_files_walk(limit=100): print 'processing', jn(PROJECT_PATH, PROJECT) total_time = 0 repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT)) from rhodecode.lib.oset import OrderedSet paths_ = OrderedSet(['']) try: tip = repo.get_changeset('tip') for topnode, dirs, files in tip.walk('/'): for dir in dirs: paths_.add(dir.path) for f in dir: paths_.add(f.path) for f in files: paths_.add(f.path) except vcs.exception.RepositoryError, e: pass
def __init__(self, path): self._path = path self.repo = vcs.get_repo(path=self._path)
def repository(path, alias=None): return Repository(get_repo(path=path, alias=alias))
def get_repo(self): return vcs.get_repo(path=str(self.path))
def main(argv): usage = "usage: %prog [options] <project_file>" parser= optparse.OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="store_true", dest="verbose") vc_opts = optparse.OptionGroup(parser, "Version Control Options") vc_opts.add_option("-r","--repodir", help="repository root", metavar="DIR", type="string", dest="repo_root") parser.add_option_group(vc_opts) ise_opts = optparse.OptionGroup(parser, "Xilinx ISE Options") ise_opts.add_option("-p","--projdir", help="Project Directory", metavar="DIR", type="string", dest="proj_root") parser.add_option_group(ise_opts) parser.set_defaults(repo_root="..") # NOTE That this is an odd default parser.set_defaults(proj_root=".") parser.set_defaults(verbose=False) (opts, args) = parser.parse_args(argv) if len(args) != 2: parser.error("Incorrect command-line.") def path_ptv (proj_rel_path): full_proj_path = os.path.join(opts.proj_root, proj_rel_path) vcs_relpath = os.path.relpath(full_proj_path, opts.repo_root) return(vcs_relpath) def path_vcs_full (vcs_rel_path): full_vcs_path = os.path.join(opts.repo_root, vcs_rel_path) return(full_vcs_path) xise_file = args[1] xil_sources = xil_ise.get_project_files(xise_file) xil_sources_norm = [path_ptv(f) for f in xil_sources] repo=vcs.get_repo(path=opts.repo_root) head=repo.get_changeset() filestates=[] for fname in xil_sources_norm: try: vc_digest = hashlib.sha1(head.get_file_content(fname)).hexdigest() real_path=path_vcs_full(fname) f = file(real_path, 'r') real_digest = hashlib.sha1(f.read()).hexdigest() f.close() if real_digest == vc_digest: filestates.append((fname, FileStatus.EQUAL)) else: filestates.append((fname, FileStatus.DIFFER)) except vcs.exceptions.NodeDoesNotExistError: filestates.append((fname,FileStatus.WD_ONLY)) except IOError: filestates.append((fname,FileStatus.VC_ONLY)) #print fname, vc_digest, real_digest for (f,s) in filestates: if (s != FileStatus.EQUAL) or (opts.verbose): print ("{0:<5}:\t{1:<60}".format(formatFS(s),f))