コード例 #1
0
 def __init__(self, project, jira_url=None, github_user_name=None):
     self.git_path = project.path()
     self.github_name = project.github()
     if jira_url:
         self.jira_url = jira_url
     else:
         self.jira_url = Config().config['REPO']['JiraURL']
     if github_user_name:
         self.github_user_name = github_user_name
     else:
         self.github_user_name = "apache"
     self.jira_project_name = project.jira()
     self.repo = Repo(self.jira_project_name, self.github_name, local_path=self.git_path,
                      github_user_name=self.github_user_name)
     self.git_repo = git.Repo(self.git_path)
     self.head_commit = self.git_repo.head.commit.hexsha
     # self.git_repo.git.checkout(self.head_commit, force=True)
     self.git_url = os.path.join(list(self.git_repo.remotes[0].urls)[0].replace(".git", ""), "tree")
     self.jira_issues = None
     self.commits = None
     self.versions = None
     self.bugged_files_between_versions = None
     self.selected_versions = None
     self.selected_config = self.read_selected_config()
コード例 #2
0
    def test_load_index_version_1(self):
        r = Repo(index={}, config=self.conf, connector=self.connector)
        r.load_index_from_disk(1)

        self.assertEqual(r.index, self.pi)
        self.assertIsNot(r.index, self.pi)
コード例 #3
0
from repo import Repo
import json
import urllib2
import os

orignalsrepo = Repo('101clonebot', '101haskelloriginals')
clonerepo = Repo('tschmorleiz', '101haskellclones')
clones = json.load(urllib2.urlopen('http://101companies.org/api/clones'))

def prepareWorker(reponame, contribname, sha):
  #print 'cd ~/101results/gitdeps/' + reponame + '/; git checkout %s; cd ~'%sha
  os.popen('cd ~/101results/gitdeps/' + reponame + '/; git checkout %s; cd ~'%sha).read()
  os.popen('cp -r ~/101results/gitdeps/' + reponame + '/contributions/' + contribname + '/ ~/101results/101repo/contributions/').read()

def getFragmentsContents(relevantFiles, reponame, contribname, sha):
  urlbase = 'http://worker.101companies.org/services/featureNameDetection'
  params = '?reponame=%s&contribname=%s&sha=%s'%(reponame, contribname, sha)
  detection = json.load(urllib2.urlopen(urlbase + params))
  contents = {}
  prepareWorker(reponame, contribname, sha)
  for f, resources in detection[contribname]['features'].items():
    contents[f] = {}
    for i, r in enumerate(resources):
      resource = r['resource']
      if any(map(lambda f: f in resource, relevantFiles)):
        content = json.load(urllib2.urlopen(resource))['content']
      else:
        content = ''
      contents[f][resource] = {'index': i, 'content': content}
  return contents
コード例 #4
0
def main():  #pylint: disable=R0915
    LOGGER.info('ADF Version %s', ADF_VERSION)
    LOGGER.info("ADF Log Level is %s", ADF_LOG_LEVEL)

    parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
    deployment_map = DeploymentMap(parameter_store, ADF_PIPELINE_PREFIX)
    s3 = S3(DEPLOYMENT_ACCOUNT_REGION, S3_BUCKET_NAME)
    sts = STS()
    role = sts.assume_cross_account_role(
        'arn:aws:iam::{0}:role/{1}-readonly'.format(
            MASTER_ACCOUNT_ID,
            parameter_store.fetch_parameter('cross_account_access_role')),
        'pipeline')

    organizations = Organizations(role)
    clean(parameter_store, deployment_map)

    try:
        auto_create_repositories = parameter_store.fetch_parameter(
            'auto_create_repositories')
    except ParameterNotFoundError:
        auto_create_repositories = 'enabled'

    for p in deployment_map.map_contents.get('pipelines'):
        pipeline = Pipeline(p)

        if auto_create_repositories == 'enabled':
            code_account_id = next(param['SourceAccountId']
                                   for param in p['params']
                                   if 'SourceAccountId' in param)
            if auto_create_repositories and code_account_id and str(
                    code_account_id).isdigit():
                repo = Repo(code_account_id, p.get('name'),
                            p.get('description'))
                repo.create_update()

        for target in p.get('targets', []):
            target_structure = TargetStructure(target)
            for step in target_structure.target:
                for path in step.get('path'):
                    regions = step.get(
                        'regions', p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                    step_name = step.get('name')
                    params = step.get('params', {})
                    pipeline.stage_regions.append(regions)
                    pipeline_target = Target(path, regions, target_structure,
                                             organizations, step_name, params)
                    pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

        parameters = pipeline.generate_parameters()
        pipeline.generate()
        deployment_map.update_deployment_parameters(pipeline)
        s3_object_path = upload_pipeline(s3, pipeline)

        store_regional_parameter_config(pipeline, parameter_store)
        cloudformation = CloudFormation(
            region=DEPLOYMENT_ACCOUNT_REGION,
            deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
            role=boto3,
            template_url=s3_object_path,
            parameters=parameters,
            wait=True,
            stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
            s3=None,
            s3_key_path=None,
            account_id=DEPLOYMENT_ACCOUNT_ID)
        cloudformation.create_stack()
コード例 #5
0
with open("config.yaml", 'r') as stream:
    config = yaml.safe_load(stream)

with open("sources.yaml", 'r') as stream:
    repos = yaml.safe_load(stream)

REPOS_BASE_PATH.mkdir(exist_ok=True)

# ■ Laden der Daten:
repos_to_versuche = []
for repo in repos:
    try:
        console.print()
        console.rule(repo['name'])
        repos_to_versuche.append(import_repo(Repo(repo, gh)))
    except github.UnknownObjectException:
        pass
    except KeyboardInterrupt:
        warn("\n" * 5 + "KeyboardInterrupt – exiting early…")
        break
    except Exception as e:
        error(f'Could not import {repo["name"]}')
        # So ist anhand des Status in GitHub Actions direkt ersichtlich, ob es ein Problem gab.
        raise

# ■ Einbinden der „awesome-ap-pdfs“:
console.rule('*** NicoWeio/awesome-ap-pdfs ***')
repos_to_versuche = add_aap_pdfs(repos_to_versuche, gh)

console.rule('*** Analyse ***')
コード例 #6
0
from ui import Ui
from controller import Controller
from repo import Repo

repo = Repo()
controller = Controller(repo)
c = Ui(controller)
c.run()
コード例 #7
0
# This runs the application
from phone_data import phones
from tmo_api import TmoData
from phone import Phone
from view import View
from controller import Controller
from router import Router
from repo import Repo

phone_repo = Repo(phones())
app_view = View()
app_controller = Controller(app_view, phone_repo)
router = Router(app_controller)

router.run()
コード例 #8
0
def clear_database():
    with session() as db:
        repo = Repo(db)
        repo.clear_database()
        db.commit()
コード例 #9
0
ファイル: test_testing.py プロジェクト: todun/aeongarden
 def setUp(self):
     self.repo = Repo(autocommit=True)
     self.user = UserFactory(email="*****@*****.**")
     self.repo.save(self.user)
コード例 #10
0
def run_tag_creator(args: argparse.Namespace):
    print(">>> Reading config file...")
    if args.conf is None:
        args.conf = ConfigParser.parse()
    print(">>> Config file read : {}".format(args.conf))

    print(">>> Configuring repository...")
    if args.repo is None:
        args.repo = Repo(args.conf["repo"]["path"])
    print(">>> Repository configured : {}".format(args.repo))

    if args.environment is None:
        args.environment = args.conf["default"]["environment"]
    elif args.environment not in args.conf["environment"]:
        print(">>> Environment couldn't be found on config : {}".format(args.environment))
        exit(-1)
    print(">>> Environment configured : {}".format(args.environment))

    if args.branch is None:
        args.branch = args.conf["environment"][args.environment]["branch"]

    if args.commit:
        print(">>> Checking out to {}...".format(args.commit))
        if args.repo.checkout(commit=args.commit):
            print(">>> Check out completed!")
        else:
            exit(-1)
    else:
        print(">>> Checking out to {}...".format(args.branch))
        if args.repo.checkout(branch=args.branch):
            print(">>> Check out completed!")
        else:
            exit(-1)

    if args.fetch:
        print(">>> Fetching...")
        if args.repo.fetch():
            print(">>> Fetch completed!")
        else:
            exit(-1)
    else:
        print(">>> Fetch not set, skipping fetch!")

    if args.pull:
        print(">>> Pulling...")
        if args.repo.pull(branch=args.branch):
            print(">>> Pull completed!")
        else:
            exit(-1)
    else:
        print(">>> Pull not set, skipping pull!")

    if args.tag:
        new_tag = args.tag
    else:
        tag_config = args.conf["environment"][args.environment]["tag"]
        print(">>> Environment configuration read for tag limits : {}".format(tag_config))
        prefix, *_ = tag_config.values()
        max_values = {k: v for k, v in tag_config.items() if k.startswith("max")}
        base_ver = Version(**max_values)
        tags = []
        print(">>> Reading existing tags for environment...")
        for tag in args.repo.tags():
            if tag.startswith(prefix):
                try:
                    tags.append(base_ver.parse(tag))
                except VersionError as err:
                    print(">>> Tag exceeds configured environment tag limits({}) : {}".format(base_ver.maximum(), tag))
                    print(err)
        new_tag = max(tags).next()
    print(">>> New tag : {}".format(new_tag))
    if args.repo.create_tag(tag_name=str(new_tag), commit=args.commit):
        print(">>> Tag created : {}".format(str(new_tag)))
    else:
        print(">>> Tag couldn't be created!")
        exit(-1)

    if args.push:
        print(">>> Pushing...")
        if args.repo.push(branch=args.branch):
            print(">>> Push completed!")
        else:
            exit(-1)
    else:
        print(">>> Push not set, skipping push!")
コード例 #11
0
ファイル: controller.py プロジェクト: JakeTompkins/TermChat
 def __init__(self):
     self.r = Repo()
     self.v = View()
     pass
コード例 #12
0
ファイル: snixContext.py プロジェクト: nishantkakar/snix
 def get_repos(self):
     all_repos = []
     for repo in self._manifest_repos:
         repo_context = {'repo_location': repo}
         all_repos.append(Repo(repo_context))
     return all_repos
コード例 #13
0
from remote import Remote
from repo import Repo
from submit import submit
from patcher import make_patch


repo = Repo('/Users/ajermyn/Dropbox/Software/Stokes_Experiments')
remote = Remote('rusty', '/mnt/home/ajermyn/Projects/Stokes_Experiments', '/mnt/home/ajermyn/ceph/Stokes_Experiments/')



config = {
	'x_ctrl(6)': 4.0, # Heat multiplier
	'x_logical_ctrl(1)': '.false.', # True for simple norm, false for complicated
	'x_ctrl(2)': 1.0 # nf spacing in log space
	'x_ctrl(3)': 1.0 # nl spacing in log space
	'x_ctrl(5)': 1e-4 # Heat smoothing in Msun
	'x_ctrl(7)': 1.0 # N^2 smoothing in distance 1/(this*kr)
	'time_delta_coeff': 0.2 # Time resolution
}

bname = 'master'
config = {}
config['x_ctrl(2)'] = 1.0
patch = make_patch(config)

submit(repo,remote,bname,patch)
コード例 #14
0
ファイル: main.py プロジェクト: tordisuna/SC-T-201-GSKI
from repo import Repo
from contact import Contact

my_repo = Repo(Contact)
a = Contact("Gudni", "9635354", "*****@*****.**")
b = Contact("Lalli", "3453453", "*****@*****.**")
c = Contact("Sigga", "2341123", "*****@*****.**")
d = Contact("Hannes", "03459533", "*****@*****.**")
e = Contact("Gudni", "1234567", "*****@*****.**")
my_repo.add(a, b, c, d, e)

undo_stack = list()
undo_stack.append(((my_repo.remove, (a, b, c, d, e)), ))


def undo():
    for undo_method, arguments in undo_stack.pop():
        undo_method(*arguments)


print(my_repo, "\n")
undo()
print(my_repo)

# for item in my_repo.order_by("name"):
#     print(item)

# print()
# for item in my_repo.order_by("phone"):
#     print(item)
コード例 #15
0
 def __init__(self, repo_settings, verbose=False):
     self.verbose = verbose
     self.repo_settings = repo_settings
     self.repo = Repo(self.repo_settings, verbose=self.verbose)
コード例 #16
0
    def test_load_index_version_too_low(self):
        r = Repo(index={}, config=self.conf, connector=self.connector)

        self.assertRaises(KeyError, r.load_index_from_disk, 0)
コード例 #17
0
 def __init__(self, repository):
     self.repository_model = repository
     self.repo = Repo(
         Path(repository.path, "conf/repos/%s.conf" % repository.name))
コード例 #18
0
ファイル: actions.py プロジェクト: enderyildirim/python_demo
 def __call__(self, parser, namespace, value, option_string=None):
     if option_string in self.option_strings:
         setattr(namespace, self.dest, Repo(option_string))
コード例 #19
0
from pprint import pprint, pformat
from db import DB
from plumbum.cmd import echo, grep, head, ls, msgfmt, pocount, sed, tail, mkdir, cat, wdiff
from plumbum import local
from repo import Repo

logging.basicConfig(filename='findRevs.log',
                    level=logging.DEBUG,
                    format='%(asctime)s - %(levelname)s - %(message)s')

parser = argparse.ArgumentParser(description='language processor.')
#parser.add_argument('--type', required=True, choices=('ug', 'ch', 'sy'))
parser.add_argument('--langs', nargs='+', required=True)
args = parser.parse_args()

r = Repo('../../mainNVDACode/.git')
tbpath = local.path('../')
linfo = {
    'changes': {
        'filename': 'changes.t2t',
        'srcpath': 'user_docs/en/changes.t2t',
        'dstprefix': 'changes-newRevisions',
    },
    'userGuide': {
        'filename': 'userGuide.t2t',
        'srcpath': 'user_docs/en/userGuide.t2t',
        'dstprefix': 'userGuide-newRevisions',
    },
    'symbols': {
        'filename': 'symbols.dic',
        'srcpath': 'source/locale/en/symbols.dic',
コード例 #20
0
    def find(query, components):
        conn = DB.getConn()
        c = conn.cursor()

        c.execute(query, components)
        commitrows = c.fetchall()
        commitfiles = []

        if commitrows:
            allcommitids = ",".join(
                [str(int(commit[0])) for commit in commitrows])

            #This is poor practice, but we assured ourselves the value is composed only of ints first
            DB.execute(
                c, "SELECT * from " + DB.commitfile._table +
                " WHERE commitid IN (" + allcommitids + ")")
            commitfiles = c.fetchall()

            DB.execute(
                c, "SELECT * from " + DB.commitkeyword._table +
                " WHERE commitid IN (" + allcommitids + ")")
            commitkeywords = c.fetchall()

            DB.execute(
                c,
                "SELECT commitid, case when length(data) < 307200 then data else 'TOOLARGE' end as data from "
                + DB.commitdiffs._table + " WHERE commitid IN (" +
                allcommitids + ")")
            commitdata = c.fetchall()

        commits = []
        for i in commitrows:
            r = Repo()
            r.loadFromValues(i[DB.commit._numColumns + DB.repo.id],
                             i[DB.commit._numColumns + DB.repo.name],
                             i[DB.commit._numColumns + DB.repo.repotypeid],
                             i[DB.commit._numColumns + DB.repo.url],
                             i[DB.commit._numColumns + DB.repo.viewlink],
                             i[DB.commit._numColumns + DB.repo.tagname],
                             i[DB.commit._numColumns + DB.repo.tagmaturity])

            files = [
                file[DB.commitfile.file] for file in commitfiles
                if file[DB.commitfile.commitid] == i[DB.commit.id]
            ]
            keywords = [
                keyword[DB.commitkeyword.keyword] for keyword in commitkeywords
                if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]
            ]
            data = [
                cdata[DB.commitdiffs.data] for cdata in commitdata
                if cdata[DB.commitdiffs.commitid] == i[DB.commit.id]
            ][0]

            if i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.GIT:
                c = GitCommit()
            elif i[DB.commit._numColumns +
                   DB.repo.repotypeid] == Repo.Type.SVN:
                c = SVNCommit()
            else:
                c = Commit()
            c.loadFromDatabase(r, i, files, keywords, data)

            commits.append(c)

        return commits
コード例 #21
0
import sys
import argparse

from repo import Repo
from developer_graph import DeveloperGraph

parser = argparse.ArgumentParser(
    description='Generate node link data to be used by presenter')
parser.add_argument('--g', help="Git repo path", type=str, required=True)
parser.add_argument('--o', help="Output file name", type=str, required=True)
parser.add_argument('--m', help="Past n month", type=str, required=True)
parser.add_argument('--ext',
                    help="Only files having given extension",
                    type=str,
                    default=None)
args = parser.parse_args()

git_dir_path = args.g
output_file = args.o

r = Repo()
r.read_repo(git_dir_path, args.m)
dev_graph = DeveloperGraph()
dev_graph.read_repo(r)
dev_graph.project(ext=args.ext)
print "Number of vertices:", len(dev_graph.projected_graph)
dev_graph.write_json(args.o)
コード例 #22
0
ファイル: gitz.py プロジェクト: mudox/gitz
    def __init__(self, *, include_all=False):
        """TODO: to be defined1. """
        with open(os.path.expanduser(Gitz.DATA_FILE_PATH)) as file:

            json_dict = json.load(file)

            # repos from ~/.gitz.json
            self.repos = [Repo(dict) for dict in json_dict['repos']]
            for repo in self.repos:
                repo.priority = 10

            if include_all:
                # repos from ~/Git
                dirs = json_dict['repos_under']
                for dir in dirs:
                    paths = [
                        p for p in Path(dir).expanduser().glob('*/')
                        if p.is_dir()
                    ]
                    names = [p.parts[-1] for p in paths]
                    repos = [
                        Repo({
                            "name": n,
                            "path": p
                        }) for n, p in zip(names, paths)
                    ]
                    for repo in repos:
                        repo.priority = 5
                    self.repos += repos

            # collect status up
            print('collecting status: {}'.format(SC),
                  end='',
                  file=sys.stderr,
                  flush=True)
            for idx, repo in enumerate(self.repos, start=1):
                print('{}{}{}/{}'.format(RC, EL, idx, len(self.repos)),
                      end='',
                      file=sys.stderr,
                      flush=True)
                repo.parse()
            print(CUU1, end='', file=sys.stderr, flush=True)

            # statistics
            self.max_name_width = 0

            self.max_tracking_width = 0
            self.max_untracked_width = 0
            self.max_unmerged_width = 0

            self.max_branch_head_width = 0
            self.max_upstream_width = 0
            self.max_a_width = 0
            self.max_b_width = 0

            self.show_tracking = False
            self.show_untracked = False
            self.show_unmerged = False

            self.show_a = False
            self.show_b = False

            for repo in self.repos:
                # remove tilder `~` if any
                repo.path = os.path.expanduser(repo.path)

                # name width
                self.max_name_width = max(
                    self.max_name_width,
                    len(repo.name),
                )

                # tracking & untracked & unmerged width
                self.max_tracking_width = max(
                    self.max_tracking_width,
                    len(str(repo.tracking)),
                )
                self.max_untracked_width = max(
                    self.max_untracked_width,
                    len(str(repo.untracked)),
                )
                self.max_unmerged_width = max(
                    self.max_unmerged_width,
                    len(str(repo.unmerged)),
                )

                # branch part components width
                self.max_branch_head_width = max(
                    self.max_branch_head_width,
                    len(repo.branch_head),
                )
                self.max_upstream_width = max(
                    self.max_upstream_width,
                    len(repo.branch_upstream),
                )

                a, b = repo.branch_ab
                if (a > 0):
                    self.show_a = True
                if (b > 0):
                    self.show_b = True

                self.max_a_width = max(self.max_a_width, len(str(a)))
                self.max_b_width = max(self.max_b_width, len(str(b)))

                if repo.tracking > 0:
                    self.show_tracking = True
                if repo.untracked > 0:
                    self.show_untracked = True
                if repo.unmerged > 0:
                    self.show_unmerged = True

            # fields width
            self.name_field_width = max(self.max_name_width, 14)
            self.tracking_field_width = max(self.max_tracking_width, 9)
            self.untracked_field_width = max(self.max_untracked_width, 9)
            self.unmerged_field_width = max(self.max_unmerged_width, 9)

            # branch width
            self.branch_field_width = sum([
                self.max_branch_head_width,
                4,
                self.max_upstream_width,
            ])

            if self.show_a:
                self.branch_field_width += 1 + self.max_a_width

            if self.show_b:
                self.branch_field_width += 1 + self.max_b_width

            jack.debug(
                'widths: name:%d tracking:%d untracked:%d unmerged:%d',
                self.max_name_width,
                self.max_tracking_width,
                self.max_untracked_width,
                self.max_unmerged_width,
            )

            self.sort()
コード例 #23
0
ファイル: playlist_move.py プロジェクト: ostwald/python-lib
	"""
    annoPaths = []
    add = annoPaths.append
    for resId in resIds:
        try:
            path = repo.findAnnoPath(resId, userId)
            add(path)
        except Exception, msg:
            print 'Error: ', msg
    return annoPaths


if __name__ == '__main__':
    host = os.environ['HOST']

    if host == 'dls-rs1':
        userId = 'DPS-1247001681454'  # margaret
        resourceId = '1374202578944'
        repo_base = '/dls/www/ccs.dls.ucar.edu/ccs_user_content/records_ccs_users'
    if host == 'purg.local':
        userId = '1247065132457'
        resourceId = '1268089830379'
        repo_base = '/Users/ostwald/devel/dcs-repos/dds-ccs-dev/ccs_user_content/records_ccs_users/'
        resIds = ['1271000343182', '1254607776664', '1248387485269']

    repo = Repo(repo_base)
    # print findAnnoPath (resourceId, userId)
    findUserAnnos(repo, resIds, userId)

    # parseAnnoId('CCS-ANNO-RESOURCE-DPS-1247001681465-1262103110670')
コード例 #24
0
 def test_repo_name_from_path_with_trailing_slash(self):
     self.connector.url.path = "/path/to/repo/"
     repo = Repo(self.index, self.config, self.connector)
     self.assertEqual(repo.name, "repo")
コード例 #25
0
 def setUp(self) -> None:
     self.repo = Repo()
コード例 #26
0
    def test_load_index_version_current(self):
        r = Repo(index={}, config=self.conf, connector=self.connector)
        r.load_index_from_disk(repo.INDEX_FORMAT_VERSION)

        self.assertEqual(r.index, self.pi)
        self.assertIsNot(r.index, self.pi)
コード例 #27
0
ファイル: compare.py プロジェクト: paulolimac/repo-compare
def compare(*repository):
    report = Report([Repo(full_name) for full_name in repository])
    report.test_score()
    report.show_result()
コード例 #28
0
ファイル: repo_test.py プロジェクト: profucius/BlobBackup
 def setUp(self):
     self.backend = MemoryBackend()
     self.repo = Repo(self.backend)