Example #1
0
    def update(self):
        if not self.isValid():
            return False

        Repo.destroy(Game, self)
        Repo.create(Game, self)
        return True
Example #2
0
    def createRepo(self):
        print("Getting repo: ", self.repoEdit.text())
        testDateSince1 = NotSet
        testDateUntil1 = NotSet
        self.repo = Repo(self.repoEdit.text(),
                         _since=testDateSince1,
                         _until=testDateUntil1,
                         _gh=self.github)

        self.issuesPlot.changeData(self.repo.issuesData, ["Open Issues"])
        self.commitsPlot.changeData(self.repo.commitsData[0:3],
                                    ["Bugfixes", "Features"])
        self.linesPlot.changeData([
            self.repo.commitsData[0], self.repo.commitsData[3],
            self.repo.commitsData[4]
        ], ["Additions", "Removals", "Total"])
        self.repo.issueProcessed.connect(self.issuesPlot.updatePlot)
        self.repo.commitProcessed.connect(self.commitsPlot.updatePlot)
        self.repo.commitProcessed.connect(self.linesPlot.updatePlot)

        for p in self.plots:
            self.repo.milestoneProcessed.connect(p.addMilestone)
            p.milestoneData = self.repo.milestoneData

        self.startDate.setDateTime(self.repo.createdAt)
Example #3
0
class GitBackend(Backend):

    def __init__(self, gitdir=None):
        self.gitdir = gitdir

        if not self.gitdir:
            self.gitdir = tempfile.mkdtemp()
            Repo.create(self.gitdir)

        self.repo = Repo(self.gitdir)
        self.fetch_objects = self.repo.fetch_objects
        self.get_refs = self.repo.get_refs

    def apply_pack(self, refs, read):
        fd, commit = self.repo.object_store.add_thin_pack()
        fd.write(read())
        fd.close()
        commit()

        for oldsha, sha, ref in refs:
            if ref == "0" * 40:
                self.repo.remove_ref(ref)
            else:
                self.repo.set_ref(ref, sha)

        print "pack applied"
Example #4
0
    def update(self):
        if not self.isValid():
            return False

        Repo.destroy(Game, self)
        Repo.create(Game, self)
        return True
Example #5
0
 def run(self):
     status = False
     message = "Couldn't transliterate the text."
     content = {}
     output = []
     if not self.validated:
         message = "Please provide languages and their code."
         output = self.text
     else:
         text = self.text.split()
         try:
             for index in xrange(len(text)):
                 word = text[index]
                 if not self.isEngSource:
                     word = word.decode('utf-8')
                     output.insert(
                         index,
                         self.engine.transform(word).encode('utf-8'))
                 else:
                     if not Vida.is_ascii(word): word = word.decode('utf-8')
                     if not self.usdictionary.check(
                             word) and not self.gbdictionary.check(word):
                         output.insert(
                             index,
                             self.engine.transform(word).encode('utf-8'))
                     else:
                         output.insert(index, word)
             status = True
             message = "Succesfully transliterated the code."
         except UnicodeDecodeError, e:
             Repo.exception(e)
             message = "Couldn't decode the language properly."
         except IndexError, e:
             Repo.exception(e)
             message = "Couldn't properly frame the sentence."
Example #6
0
    def test_load_config_from_disk(self):
        r = Repo(index={}, config={}, connector=self.connector)
        r.load_config_from_disk()

        self.assertEqual(r.config, self.conf)
        self.assertIsNot(r.config, self.conf)
        self.assertEqual(r.index, {})
Example #7
0
    def test_load_version_mismatch_error(self):
        r = Repo(index={}, config=self.conf, connector=self.connector)

        with self.assertRaises(VersionMismatchError) as cm:
            r.load_index_from_disk(99)
        self.assertEqual(cm.exception.actual, 99)
        self.assertEqual(cm.exception.expected, repo.INDEX_FORMAT_VERSION)
Example #8
0
def add_aap_pdfs(repos, gh):
    aap_repo = Repo({'name': 'NicoWeio/awesome-ap-pdfs'}, gh)
    aap_repo.update_repo_mirror()

    for repo in repos:
        repo_dir = aap_repo.cwd_path / repo.full_name.replace('/', '∕')
        # überspringen, falls aweseome-ap-pdfs zu diesem Repo keine PDFs bereitstellt
        if not repo_dir.exists():
            continue

        versuche_dirs = [f for f in repo_dir.iterdir() if f.is_dir()]
        for versuch_dir in versuche_dirs:
            versuch = int(
                versuch_dir.stem
            )  # TODO: unsichere Annahme über die Ordnerstruktur von awesome-ap-pdfs
            paths = [
                Path(f) for f in versuch_dir.iterdir() if f.suffix == '.pdf'
            ]
            files = [
                File(path, aap_repo, is_user_generated=False) for path in paths
            ]
            if versuch in repo.versuche:
                repo.versuche[versuch].setdefault('pdfs', []).extend(files)
            else:
                warn(
                    f'Versuch {versuch} existiert nicht in {repo.full_name}, aber in awesome-ap-pdfs.'
                )

    return repos
    def on_post(req, resp):
        request_payload = json.loads(req.stream.read().decode('utf-8'))
        ticket_type = request_payload.get('ticket_type')
        message = request_payload.get('message')

        try:
            ticket = Ticket.create_new(ticket_type, message)

            with session() as db:
                repo = Repo(db)
                ticket.ticket_id = repo.save_new_ticket(ticket)
                db.commit()

            resp.body = json.dumps(
                {
                    'id': ticket.ticket_id,
                    'status': ticket.status
                },
                sort_keys=True,
                indent=4)
        except ValueError as error:
            resp.body = json.dumps({
                'status': 'failed',
                'reason': str(error)
            },
                                   sort_keys=True,
                                   indent=4)
Example #10
0
    def test_load_config_from_disk(self):
        r = Repo(index={}, config={}, connector=self.connector)
        r.load_config_from_disk()

        self.assertEqual(r.config, self.conf)
        self.assertIsNot(r.config, self.conf)
        self.assertEqual(r.index, {})
Example #11
0
	def find(query, components):
		conn = DB.getConn()
		c = conn.cursor()
		
		c.execute(query, components)
                commitrows = c.fetchall()
                commitfiles = []

                if commitrows:
                        allcommitids = ",".join([str(int(commit[0])) for commit in commitrows])

                        #This is poor practice, but we assured ourselves the value is composed only of ints first
                        DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")")
                        commitfiles = c.fetchall()

                        DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")")
                        commitkeywords = c.fetchall()

                commits = []
                for i in commitrows:
                        r = Repo()
                        r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2],
                                i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5])

                        files = [file[DB.commitfile.file] for file in commitfiles
                                if file[DB.commitfile.commitid] == i[DB.commit.id]]
                        keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords
                                    if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]]

                        c = Commit()
                        c.loadFromDatabase(r, i, files, keywords)

                        commits.append(c)

                return commits
Example #12
0
    def test_load_version_mismatch_error(self):
        r = Repo(index={}, config=self.conf, connector=self.connector)

        with self.assertRaises(VersionMismatchError) as cm:
            r.load_index_from_disk(99)
        self.assertEqual(cm.exception.actual, 99)
        self.assertEqual(cm.exception.expected, repo.INDEX_FORMAT_VERSION)
Example #13
0
 def setUp(self):
     self.connector = MockConnector(urlparse.urlparse('/baseurl/repo/'))
     self.connector.connect()
     self.pi = index.PictureIndex()
     self.pi.add(MockPicture.create_many(10))
     self.conf = repo.new_repo_config()
     self.conf['index.file'] = 'mock-index-path'
     Repo.create_on_disk(self.connector, self.conf, self.pi)
Example #14
0
    def test_load_version_mismatch_error(self):
        self.conf['index.format_version'] = 99
        Repo.create_on_disk(self.connector, self.conf, self.pi)

        with self.assertRaises(VersionMismatchError) as cm:
            Repo.load_from_disk(self.connector)
        self.assertEqual(cm.exception.actual, 99)
        self.assertEqual(cm.exception.expected, repo.INDEX_FORMAT_VERSION)
Example #15
0
 def setUp(self):
     self.connector = MockConnector(urlparse.urlparse('/baseurl/repo/'))
     self.connector.connect()
     self.pi = index.PictureIndex()
     self.pi.add(MockPicture.create_many(10))
     self.conf = repo.new_repo_config()
     self.conf['index.file'] = 'mock-index-path'
     Repo.create_on_disk(self.connector, self.conf, self.pi)
Example #16
0
    def update(self):
        brand = Brand.find_by("id", self.id)
        if not self.isValid():
            return False

        Repo.destroy(Brand, self)
        Repo.create(Brand, self)
        return True
Example #17
0
    def test_load_version_mismatch_error(self):
        self.conf['index.format_version'] = 99
        Repo.create_on_disk(self.connector, self.conf, self.pi)

        with self.assertRaises(VersionMismatchError) as cm:
            Repo.load_from_disk(self.connector)
        self.assertEqual(cm.exception.actual, 99)
        self.assertEqual(cm.exception.expected, repo.INDEX_FORMAT_VERSION)
Example #18
0
def worker_thread(p, organizations, auto_create_repositories, s3,
                  deployment_map, parameter_store):
    pipeline = Pipeline(p)

    if auto_create_repositories == 'enabled':
        try:
            code_account_id = next(param['SourceAccountId']
                                   for param in p['params']
                                   if 'SourceAccountId' in param)
            has_custom_repo = bool(
                [item for item in p['params'] if 'RepositoryName' in item])
            if auto_create_repositories and code_account_id and str(
                    code_account_id).isdigit() and not has_custom_repo:
                repo = Repo(code_account_id, p.get('name'),
                            p.get('description'))
                repo.create_update()
        except StopIteration:
            LOGGER.debug(
                "No need to create repository as SourceAccountId is not found in params"
            )

    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            for path in step.get('path'):
                regions = step.get('regions',
                                   p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
                step_name = step.get('name')
                params = step.get('params', {})
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path, regions, target_structure,
                                         organizations, step_name, params)
                pipeline_target.fetch_accounts_for_target()

        pipeline.template_dictionary["targets"].append(
            target_structure.account_list)

        if DEPLOYMENT_ACCOUNT_REGION not in regions:
            pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)

    parameters = pipeline.generate_parameters()
    pipeline.generate()
    deployment_map.update_deployment_parameters(pipeline)
    s3_object_path = upload_pipeline(s3, pipeline)

    store_regional_parameter_config(pipeline, parameter_store)
    cloudformation = CloudFormation(
        region=DEPLOYMENT_ACCOUNT_REGION,
        deployment_account_region=DEPLOYMENT_ACCOUNT_REGION,
        role=boto3,
        template_url=s3_object_path,
        parameters=parameters,
        wait=True,
        stack_name="{0}-{1}".format(ADF_PIPELINE_PREFIX, pipeline.name),
        s3=None,
        s3_key_path=None,
        account_id=DEPLOYMENT_ACCOUNT_ID)
    cloudformation.create_stack()
Example #19
0
    def test_save_index_to_disk(self):
        r = Repo(self.pi, self.conf, self.connector)
        r.save_index_to_disk()

        self.assertTrue(self.connector.opened('mock-index-path'))
        index_on_disk = index.PictureIndex()
        index_on_disk.read(self.connector.get_file('mock-index-path'))
        self.assertEqual(index_on_disk, self.pi)
        self.assertIsNot(index_on_disk, self.pi)
Example #20
0
    def test_save_config_to_disk(self):
        r = Repo(self.pi, self.conf, self.connector)
        r.save_config_to_disk()

        self.assertTrue(self.connector.opened(repo.CONFIG_FILE))
        config_on_disk = config.Config()
        config_on_disk.read(self.connector.get_file(repo.CONFIG_FILE))
        self.assertEqual(config_on_disk, self.conf)
        self.assertIsNot(config_on_disk, self.conf)
Example #21
0
    def test_save_config_to_disk(self):
        r = Repo(self.pi, self.conf, self.connector)
        r.save_config_to_disk()

        self.assertTrue(self.connector.opened(repo.CONFIG_FILE))
        config_on_disk = config.Config()
        config_on_disk.read(self.connector.get_file(repo.CONFIG_FILE))
        self.assertEqual(config_on_disk, self.conf)
        self.assertIsNot(config_on_disk, self.conf)
    def test_validate_update_person(self):
        invalid_data = {'name': 'Eugeny'}
        is_valid1 = Repo.validate_update_person(invalid_data)

        valid_data = {'id': 123}
        is_valid2 = Repo.validate_update_person(valid_data)

        self.assertFalse(is_valid1)
        self.assertTrue(is_valid2)
Example #23
0
    def test_save_index_to_disk(self):
        r = Repo(self.pi, self.conf, self.connector)
        r.save_index_to_disk()

        self.assertTrue(self.connector.opened('mock-index-path'))
        index_on_disk = index.PictureIndex()
        index_on_disk.read(self.connector.get_file('mock-index-path'))
        self.assertEqual(index_on_disk, self.pi)
        self.assertIsNot(index_on_disk, self.pi)
Example #24
0
    def __init__(self, gitdir=None):
        self.gitdir = gitdir

        if not self.gitdir:
            self.gitdir = tempfile.mkdtemp()
            Repo.create(self.gitdir)

        self.repo = Repo(self.gitdir)
        self.fetch_objects = self.repo.fetch_objects
        self.get_refs = self.repo.get_refs
Example #25
0
class TestTesting(BaseTestCase):
    def setUp(self):
        self.repo = Repo(autocommit=True)
        self.user = UserFactory(email="*****@*****.**")
        self.repo.save(self.user)

    def test_testing(self):
        user = self.repo.get_user_by_email("*****@*****.**")

        assert user.email == "*****@*****.**"
Example #26
0
def worker_thread(p, organizations, auto_create_repositories, deployment_map,
                  parameter_store):
    LOGGER.debug("Worker Thread started for %s", p.get('name'))
    pipeline = Pipeline(p)
    if auto_create_repositories == 'enabled':
        code_account_id = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('account_id', {})
        has_custom_repo = p.get('default_providers',
                                {}).get('source',
                                        {}).get('properties',
                                                {}).get('repository', {})
        if auto_create_repositories and code_account_id and str(
                code_account_id).isdigit() and not has_custom_repo:
            repo = Repo(code_account_id, p.get('name'), p.get('description'))
            repo.create_update()

    regions = []
    for target in p.get('targets', []):
        target_structure = TargetStructure(target)
        for step in target_structure.target:
            regions = step.get('regions',
                               p.get('regions', DEPLOYMENT_ACCOUNT_REGION))
            paths_tags = []
            for path in step.get('path', []):
                paths_tags.append(path)
            if step.get('tags') is not None:
                paths_tags.append(step.get('tags', {}))
            for path_or_tag in paths_tags:
                pipeline.stage_regions.append(regions)
                pipeline_target = Target(path_or_tag, target_structure,
                                         organizations, step, regions)
                pipeline_target.fetch_accounts_for_target()

            pipeline.template_dictionary["targets"].append(
                target.target_structure.generate_waves())

    if DEPLOYMENT_ACCOUNT_REGION not in regions:
        pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION)
    pipeline.generate_input()
    ssm_params = fetch_required_ssm_params(pipeline.input["regions"]
                                           or [DEPLOYMENT_ACCOUNT_REGION])
    deployment_map.update_deployment_parameters(pipeline)
    store_regional_parameter_config(pipeline, parameter_store)
    with open(f'cdk_inputs/{pipeline.input["name"]}.json',
              mode='w',
              encoding='utf-8') as outfile:
        data = {}
        data['input'] = pipeline.input
        data['input']['default_scm_branch'] = ssm_params.get(
            'default_scm_branch')
        data['ssm_params'] = ssm_params
        json.dump(data, outfile)
Example #27
0
    def test_load_from_disk(self):
        repo_created = Repo.create_on_disk(self.connector, self.conf, self.pi)
        repo_loaded = Repo.load_from_disk(self.connector)

        self.assertIsInstance(repo_loaded, Repo)
        self.assertEqual(repo_loaded.config, repo_created.config)
        self.assertIsNot(repo_loaded.config, repo_created.config)
        self.assertEqual(repo_loaded.index, repo_created.index)
        self.assertIsNot(repo_loaded.index, repo_created.index)
        self.assertIsNot(repo_loaded, repo_created)
        self.assertIs(repo_loaded.connector, self.connector)
Example #28
0
    def test_load_from_disk(self):
        repo_created = Repo.create_on_disk(self.connector, self.conf, self.pi)
        repo_loaded = Repo.load_from_disk(self.connector)

        self.assertIsInstance(repo_loaded, Repo)
        self.assertEqual(repo_loaded.config, repo_created.config)
        self.assertIsNot(repo_loaded.config, repo_created.config)
        self.assertEqual(repo_loaded.index, repo_created.index)
        self.assertIsNot(repo_loaded.index, repo_created.index)
        self.assertIsNot(repo_loaded, repo_created)
        self.assertIs(repo_loaded.connector, self.connector)
Example #29
0
def recommended():
    try:
        limit, response, page = int(request.args.get(
            'limit', '0')), [], 'ajax/recommended/more'
        playlists, countryCode, nextPageToken = [
            'PLMC9KNkIncKvYin_USF1qoJQnIyMAfRxl',
            'PLNCA1T91UH31_SnlMecke_9wsbc-5mamS',
            'PLYVjGTi85afoMw4yMGHLTB99T8ZTTP0ZP'
        ], Repo.getRegion()['countryCode'], ''
        options = {
            'part': 'id',
            'maxResults': 50,
            'order': 'relevance',
            'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ',
            'channelId': 'UCk8vhgJslhfcLcwS5Q2KADw',
            'type': 'playlist'
        }
        channel_playlists = YouTube.search(options)
        if channel_playlists.get('items') is not None:
            for item in channel_playlists.get('items'):
                playlistId = item['id'].get('playlistId')
                if playlistId is not None:
                    playlists.append(playlistId)
        for playlist in playlists:
            playlist_tracks = YouTube.getPlaylistInfo(playlist, 8)
            if playlist_tracks['status'] == True:
                video = playlist_tracks.get('videos')
                if video is not None:
                    response.append(playlist_tracks)
        topics = [{
            'topicId': '/m/04rlf',
            'title': 'Music'
        }, {
            'topicId': '/m/02lkt',
            'title': 'EDM'
        }, {
            'topicId': '/m/06by7',
            'title': 'Rock'
        }]
        for topic in topics:
            options_topic = {
                'part': 'snippet',
                'maxResults': 8,
                'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ',
                'regionCode': Repo.getRegion()['countryCode'],
                'topicId': topic['topicId']
            }
            playlist_topics = YouTube.search(options_topic)
            if playlist_topics['status'] == True:
                playlist_topics.update({'title': topic['title']})
                response.append(playlist_topics)
        return render('recommended', response=response, page=page)
    except Exception as e:
        return render('error', code=500, message=str(e))
Example #30
0
 def _do_save(self):
     self.validate()
     self._updated_at = datetime.datetime.today()
     if self.id:
         attrs = list(self.__class__.__all_attributes__)
         data = {attr: getattr(self, "_" + attr) for attr in attrs}
         Repo(self.__table).where(id=self.id).update(**data)
     else:
         attrs = list(self.__class__.__all_attributes__)
         self._created_at = datetime.datetime.today()
         data = {attr: getattr(self, "_" + attr) for attr in attrs}
         self.__id = int(Repo(self.__table).insert(**data))
Example #31
0
    def test_clone(self):
        src_repo = Repo.create_on_disk(self.connector, self.conf, self.pi)
        dest_connector = MockConnector(urlparse.urlparse('/destrepo/baseurl/'))
        dest_connector.connect()
        dest_repo = Repo.clone(repo=src_repo, dest=dest_connector)

        self.assertIsInstance(dest_repo, Repo)
        self.assertEqual(dest_repo.config, src_repo.config)
        self.assertIsNot(dest_repo.config, src_repo.config)
        self.assertEqual(dest_repo.index, src_repo.index)
        self.assertIsNot(dest_repo.index, src_repo.index)
        self.assertIsNot(dest_repo, src_repo)
        self.assertTrue(dest_connector.opened(repo.CONFIG_FILE))
        self.assertTrue(dest_connector.opened('mock-index-path'))
Example #32
0
 def _related_args(self, record, related_class):
     # Both records are already persisted (have ids), so we can
     # set up the relating record fully now. One of the ids comes
     # from the constraint on the query, the other comes from
     # the foreign key logic below:
     # What we do is we get the singular table name of the record.
     # With that, we can look into the related class description for
     # the correct foreign key, which is set to the passed record's
     # id.
     record_class_name = inflector.singularize(Repo.table_name(record.__class__))
     related_args = self.where_query.get(Repo.table_name(related_class), {})
     related_key = associations.foreign_keys_for(related_class)[record_class_name]
     related_args[related_key] = record.id
     return related_args
Example #33
0
    def test_clone(self):
        src_repo = Repo.create_on_disk(self.connector, self.conf, self.pi)
        dest_connector = MockConnector(urlparse.urlparse('/destrepo/baseurl/'))
        dest_connector.connect()
        dest_repo = Repo.clone(repo=src_repo, dest=dest_connector)

        self.assertIsInstance(dest_repo, Repo)
        self.assertEqual(dest_repo.config, src_repo.config)
        self.assertIsNot(dest_repo.config, src_repo.config)
        self.assertEqual(dest_repo.index, src_repo.index)
        self.assertIsNot(dest_repo.index, src_repo.index)
        self.assertIsNot(dest_repo, src_repo)
        self.assertTrue(dest_connector.opened(repo.CONFIG_FILE))
        self.assertTrue(dest_connector.opened('mock-index-path'))
Example #34
0
def get_location():
    real_location, sel_location = Repo.getLocation(), Repo.getRegion()
    real_region = {
        'countryCode': real_location.get('country_name', ''),
        'countryName': real_location.get('country_code', '')
    }
    sel_region = {
        'countryCode': sel_location.get('countryCode', ''),
        'countryName': sel_location.get('countryName', '')
    }
    return jsonify({
        'status': True,
        'actual': real_region,
        'selected': sel_region
    })
Example #35
0
  def test_it_should_replace_a_given_string_in_repo_conf(self):
    mocked_re = MagicMock()
    path = 'tests/fixtures/config.conf'

    mocked_re.sub.return_value = 'another_text'

    with patch.multiple('repo', re=mocked_re):
      repo = Repo(path)
      repo.replace('pattern', 'string')

      with open('tests/fixtures/config.conf') as f:
        eq_(f.read(), 'another_text')

      mocked_re.sub.assert_called_once_with('pattern', 'string',
                                            'another_text')
Example #36
0
 def _related_args(self, record, related_class):
     # Both records are already persisted (have ids), so we can
     # set up the relating record fully now. One of the ids comes
     # from the constraint on the query, the other comes from
     # the foreign key logic below:
     # What we do is we get the singular table name of the record.
     # With that, we can look into the related class description for
     # the correct foreign key, which is set to the passed record's
     # id.
     record_class_name = inflector.singularize(
         Repo.table_name(record.__class__))
     related_args = self.where_query.get(Repo.table_name(related_class), {})
     related_key = associations.foreign_keys_for(
         related_class)[record_class_name]
     related_args[related_key] = record.id
     return related_args
Example #37
0
 def test__init(self):
     extractor = object.__new__(DataExtractor)
     project = ProjectName.CommonsLang.value
     extractor.git_path = project.path()
     path = os.path.join(Config().config['REPO']['RepoDir'], "commons-lang")
     assert extractor.git_path == path
     yield
     extractor.github_name = project.github()
     assert extractor.github_name == "commons-lang"
     yield
     extractor.jira_url = Config().config['REPO']['JiraURL']
     URL = "http://issues.apache.org/jira"
     assert extractor.jira_url == URL
     yield
     extractor.jira_project_name = project.jira()
     assert extractor.jira_project_name == "LANG"
     yield
     extractor.repo = Repo(extractor.jira_project_name,
                           extractor.github_name,
                           local_path=extractor.git_path)
     path = extractor.git_path
     assert os.path.exists(path) and os.listdir(path)
     yield
     repo = git.Repo(extractor.git_path)
     extractor.commits = extractor._get_repo_commits(
         "", repo, extractor.jira_project_name, extractor.jira_url)
     extractor.versions = extractor._get_repo_versions("", repo)
     extractor.bugged_files_between_versions = extractor._get_bugged_files_between_versions(
     )
     assert extractor.bugged_files_between_versions
     yield
Example #38
0
def analyze(repo):
    extensions = LANGUAGES[repo['language']]['extensions']

    r = Repo(repo['owner'],
             repo['name'],
             default_branch=repo['default_branch'],
             language=repo['language'],
             extensions=extensions)

    json_path = f'{r.path}.json'

    if os.path.isfile(json_path):
        repo = read_json(json_path)
        return repo

    repo['code'], repo['documentation'], repo['empty'] = (0, 0, 0)

    with r:
        for f in r.files:
            # Ignore symlinks.
            if not os.path.isfile(f):
                continue

            analysis = pygount.source_analysis(f,
                                               repo['language'],
                                               encoding='utf-8')

            if analysis.state == 'analyzed':
                repo['code'] += analysis.code + analysis.string
                repo['documentation'] += analysis.documentation
                repo['empty'] += analysis.empty

    write_json(json_path, repo)

    return repo
Example #39
0
class RepoServer(object):
    def __init__(self, keyChain, certificateName):
        self._keyChain = keyChain
        self._certificateName = certificateName
        self.repo = Repo()

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        print 'Interest received: %s' % interest.getName().toUri()

        # Make and sign a Data packet.
        encoded_data = self.repo.extract_from_repo(interest)
        if not encoded_data:
            data = Data(interest.getName())
            content = "No match found"
            data.setContent(content)
            self._keyChain.sign(data, self._certificateName)
            encoded_data = data.wireEncode().toBuffer()
        else:
            dumpData(encoded_data)
            encoded_data = encoded_data.wireEncode().toBuffer()

        transport.send(encoded_data)
        print 'sent'

    def onRegisterFailed(self, prefix):
        dump("Register failed for prefix", prefix.toUri())
Example #40
0
 def test_attributes(self):
     repo = Repo(self.index, self.config, self.connector, self.name)
     self.assertIsInstance(repo, Repo)
     self.assertIs(repo.index, self.index)
     self.assertIs(repo.config, self.config)
     self.assertIs(repo.connector, self.connector)
     self.assertIs(repo.name, self.name)
    def test_validate_add_person(self):
        invalid_person1 = Person()
        invalid_person1.id = 11
        is_valid1: bool = Repo.validate_add_person(invalid_person1)

        invalid_person2 = Person()
        invalid_person2.email = "*****@*****.**"
        is_valid2: bool = Repo.validate_add_person(invalid_person2)

        valid_person = Person()
        valid_person.name = 'Eugeny'
        is_valid3: bool = Repo.validate_add_person(valid_person)

        self.assertFalse(is_valid1)
        self.assertFalse(is_valid2)
        self.assertTrue(is_valid3)
Example #42
0
def commits(repo, branch):
    "Outputs all commits in repo that are in pe-modules-vanagon"

    vanagon = PeModulesVanagon(branch)
    commits = vanagon.commits(repo)

    project = Repo(repo, branch)
    project_commits = project.commits()

    in_build = [c for c in project_commits if c['sha'] in commits]

    for s in in_build:
        commit = 'commit: {}'.format(s['sha'])
        output = [commit]
        output.extend(s['message'].split('\n'))
        click.echo('\n\t'.join(output))
    def __init__(self, project, jira_url=None, github_user_name=None):
        self.git_path = project.path()
        self.github_name = project.github()
        if jira_url:
            self.jira_url = jira_url
        else:
            self.jira_url = Config().config['REPO']['JiraURL']
        if github_user_name:
            self.github_user_name = github_user_name
        else:
            self.github_user_name = "apache"
        self.jira_project_name = project.jira()
        self.repo = Repo(self.jira_project_name,
                         self.github_name,
                         local_path=self.git_path,
                         github_user_name=self.github_user_name)
        self.git_repo = git.Repo(self.git_path)
        self.head_commit = self.git_repo.head.commit.hexsha
        self.git_repo.git.checkout(self.head_commit, force=True)
        self.git_url = os.path.join(
            list(self.git_repo.remotes[0].urls)[0].replace(".git", ""), "tree")

        self.commits = self._get_repo_commits(self.git_repo,
                                              self.jira_project_name,
                                              self.jira_url)
        self.versions = self._get_repo_versions(self.git_repo)
        self.bugged_files_between_versions = self._get_bugged_files_between_versions(
            self.versions)
        self.selected_versions = None
        self.selected_config = 0
    def find(query, components):
        conn = DB.getConn()
        c = conn.cursor()
        
        c.execute(query, components)
        commitrows = c.fetchall()
        commitfiles = []

        if commitrows:
                allcommitids = ",".join([str(int(commit[0])) for commit in commitrows])

                #This is poor practice, but we assured ourselves the value is composed only of ints first
                DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")")
                commitfiles = c.fetchall()

                DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")")
                commitkeywords = c.fetchall()
                
                DB.execute(c, "SELECT commitid, case when length(data) < 307200 then data else 'TOOLARGE' end as data from " + DB.commitdiffs._table + " WHERE commitid IN (" + allcommitids + ")")
                commitdata = c.fetchall()
                

        commits = []
        for i in commitrows:
                r = Repo()
                r.loadFromValues(i[DB.commit._numColumns + DB.repo.id], i[DB.commit._numColumns + DB.repo.name], i[DB.commit._numColumns + DB.repo.repotypeid], i[DB.commit._numColumns + DB.repo.url],
                        i[DB.commit._numColumns + DB.repo.viewlink], i[DB.commit._numColumns + DB.repo.tagname], i[DB.commit._numColumns + DB.repo.tagmaturity])

                files = [file[DB.commitfile.file] for file in commitfiles
                        if file[DB.commitfile.commitid] == i[DB.commit.id]]
                keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords
                            if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]]
                data = [cdata[DB.commitdiffs.data] for cdata in commitdata
                            if cdata[DB.commitdiffs.commitid] == i[DB.commit.id]][0]

                if i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.GIT:
                    c = GitCommit()
                elif i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.SVN:
                    c = SVNCommit()
                else:
                    c = Commit()
                c.loadFromDatabase(r, i, files, keywords, data)

                commits.append(c)

        return commits
	def findByKeywords(keywords):
		conn = DB.getConn()
		c = conn.cursor()
		
		getcommitsSQL = "SELECT c.*, r.* " + \
				"FROM " + DB.commit._table + " c " + \
				"INNER JOIN " + DB.repo._table + " r " + \
				"	ON r.id = c.repoid "
		
		whereClause = " 1=1 "
		components = []
		if keywords:
			keywordsTree = KeywordsParser(keywords)
			getcommitsSQL += "LEFT OUTER JOIN " + DB.commitkeyword._table + " ck " + \
							 "	ON c.id = ck.commitid "
			whereClause, components = keywordsTree.getWhereClause("ck.keyword", "r.tagname", "r.maturity")
		
		getcommitsSQL += "WHERE " + whereClause
		getcommitsSQL += "ORDER BY c.date DESC "
		
		c.execute(getcommitsSQL, components)
		commitrows = c.fetchall()
		commitfiles = []
		
		if commitrows:
			allcommitids = ",".join([str(int(commit[0])) for commit in commitrows])
		
			#This is poor practice, but we assured ourselves the value is composed only of ints first
			c.execute("SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")")
			commitfiles = c.fetchall()

		commits = []
		for i in commitrows:
			r = Repo()
			r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], 
				i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5])
			
			files = [file[DB.commitfile.file] for file in commitfiles 
				if file[DB.commitfile.commitid] == i[DB.commit.id]]
			
			c = Commit()
			c.loadFromDatabase(r, i, files)
			
			commits.append(c)

		return commits
	def findByIDs(project, uniqueid):
		conn = DB.getConn()
		c = conn.cursor()
		
		getcommitsSQL = "SELECT c.*, r.* " + \
				"FROM " + DB.commit._table + " c " + \
				"INNER JOIN " + DB.repo._table + " r " + \
				"	ON r.id = c.repoid "
		
		whereClause = " 1=1 "
		components = []
		if project and uniqueid:
			whereClause += "AND r.tagname = %s AND c.uniqueid = %s "
			components = [project, uniqueid]
		
		getcommitsSQL += "WHERE " + whereClause
		getcommitsSQL += "ORDER BY c.date DESC "
		
		DB.execute(c, getcommitsSQL, components)
		commitrows = c.fetchall()
		commitfiles = []
		
		if commitrows:
			allcommitids = ",".join([str(int(commit[0])) for commit in commitrows])
		
			#This is poor practice, but we assured ourselves the value is composed only of ints first
			DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")")
			commitfiles = c.fetchall()

		commits = []
		for i in commitrows:
			r = Repo()
			r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], 
				i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5])
			
			files = [file[DB.commitfile.file] for file in commitfiles 
				if file[DB.commitfile.commitid] == i[DB.commit.id]]
			
			c = Commit()
			c.loadFromDatabase(r, i, files)
			
			commits.append(c)

		return commits
Example #47
0
def get_repos(handle):
    if handle == USERNAME:
        repo_url = URL + '/user/repos'

    else:
        repo_url = URL + '/users/' + handle + '/repos'

    repo_response = requests.get(repo_url, headers=HEADERS, auth=(USERNAME, PASSWORD)).json()
    repos = [Repo.from_Repository(repo) for repo in repo_response]
    return repos
Example #48
0
def create_site(repo_path, target_dir):
    """ Записывает извлекаемые из репозитория данные в виде статического
        HTML-сайта в каталог target_dir """
            
    r = Repo.open(repo_path)
    print "Repo loaded."
    print "Blaming the authors..."
    r.compute_blame()
    print "Done."
    print "Saving data..."
    r.save()
    print "Done."
    print "Stats for the latest revision:"
    print r.commits[r.head].snapshot_blame
    print "Plotting..."
    
    if not os.path.isdir(target_dir):
        os.makedirs(target_dir)
    copy_common_files(target_dir)
        
    longest_path = r.get_longest_path()
    print "Found longest_path, len = ", len(longest_path)
    png, commit_coords = commitgraph.commit_network(r, set(longest_path))
    f = open(os.path.join(target_dir, 'graph.png'), 'wb')
    f.write(png)
    f.close()
    print "Plotting blame..."
    png = plot.plot_snapshot_blame(r, longest_path, commit_coords, relative=False)
    f = open(os.path.join(target_dir, 'blame-abs.png'), 'wb')
    f.write(png)
    f.close()
    print "Plotting blame (relative)..."
    png = plot.plot_snapshot_blame(r, longest_path, commit_coords, relative=True)
    f = open(os.path.join(target_dir, 'blame-rel.png'), 'wb')
    f.write(png)
    f.close()
    print "Done"

    print "Writing commit information..."
    f = open(os.path.join(target_dir, 'commits-data.js'), 'w')
    r.dump_commit_info_js(f, commit_coords)
    f.close()
    print "Done"

    root = dirtree.Directory.from_revision_blames(r.commits[r.head].snapshot_file_blames)

    print "Writing dirtree information..."
    f = open(os.path.join(target_dir, 'dirtree-data.js'), 'w')
    root.dump_to_js(f)
    f.close()
    print "Done"
Example #49
0
 def __init__(self, **kwargs):
     """
     Instantiate a new object, mass-assigning the values in +kwargs+.
     Cannot mass-assign id or created_at.
     """
     if set(["id", "created_at", "updated_at"]) & set(kwargs):
         raise AttributeError("Cannot set 'id', 'created_at', "
                              "or 'updated_at'")
     for attr in self.__class__.__all_attributes__:
         setattr(self, "_" + attr, None)
     self.update(**kwargs)
     self._id = None
     self.__table = Repo.table_name(self.__class__)
     self._related_records = []
     self._delete_related_records = []
Example #50
0
 def _query_repo(self):
     repo = Repo(self.table)
     if self.where_query or self.custom_where:
         repo = repo.where(self.custom_where, **self.where_query)
     if self.join_args:
         repo = repo.inner_join(*self.join_args)
     if self._order_with:
         repo = repo.order_by(**self._order_with)
     if self.group_column:
         repo = repo.group_by(self.group_column)
     if self.having_args:
         repo = repo.having(self.having_args)
     if self.limit_count:
         repo = repo.limit(self.limit_count)
     return repo
Example #51
0
 def save(self):
     """
     Save a record to the database, creating it if needed, updating it
     otherwise. Also saves related records (children and dependents) as
     needed.
     """
     with Repo.db:
         self._do_save()
         our_name = inflector.singularize(Repo.table_name(self.__class__))
         for record in self._related_records:
             if not self._id:
                 related_key = associations.foreign_keys_for(
                     record.__class__)[our_name]
                 setattr(record, related_key, self.__id)
             record._do_save()
         for record in self._delete_related_records:
             record._do_destroy()
     self._finish_save()
Example #52
0
 def __init__(self, model, record=None):
     """
     Instantiates a new Query. +model+ is the lazy_record model (i.e. class)
     that the query will be made about: the query's member objects will be
     members of +model+'s class. When +record+ is passed, it is used for
     managing related records (to ensure that associated records are upated
     on +record+'s save and visa-versa).
     """
     self.model = model
     self.record = record
     self.where_query = {}
     self.custom_where = []
     self.having_args = []
     self.join_args = []
     self._order_with = {}
     self.group_column = None
     self.limit_count = None
     self.attributes = ["id"] + list(self.model.__all_attributes__)
     self.table = Repo.table_name(self.model)
Example #53
0
 def do_join(table, model):
     while model is not associations.model_from_name(table):
         # ex) Category -> Forum -> Thread -> Post
         # Category: {"posts": "forums"}
         # Forum: {"posts": "threads"}
         # Thread: {"posts": None}
         # >>> Category.joins("posts")
         # => [
         #       {'table': 'forums', 'on': ['category_id', 'id']}
         #       {'table': 'threads', 'on': ['forum_id', 'id']}
         #       {'table': 'posts', 'on': ['thread_id', 'id']}
         #    ]
         if table in associations.associations_for(model):
             # This to next: one-many (they have the fk)
             # If associations.associations_for(model)[table] is None, then this is
             # terminal (i.e. table is the FINAL association in the
             # chain)
             next_level = associations.associations_for(model)[table] or table
             next_model = associations.model_from_name(next_level)
             foreign_key = associations.foreign_keys_for(model).get(
                 next_level,
                 inflector.foreignKey(model.__name__))
             yield {'table': next_level, 'on': [foreign_key, 'id']}
         else:
             # One-One or Many-One
             # singular table had better be in associations.associations_for(model)
             singular = inflector.singularize(table)
             next_level = associations.associations_for(model)[singular] or singular
             next_model = associations.model_from_name(next_level)
             this_table_name = Repo.table_name(model)
             foreign_key = associations.foreign_keys_for(model).get(
                 next_level,
                 inflector.foreignKey(model.__name__))
             if associations.model_has_foreign_key_for_table(table,
                                                             model):
                 # we have the foreign key
                 order = ['id', foreign_key]
             else:
                 # They have the foreign key
                 order = [foreign_key, 'id']
             yield {'table': inflector.pluralize(next_level), 'on': order}
         model = next_model
Example #54
0
    def test_create_on_disk_with_index(self):
        r = Repo.create_on_disk(self.connector, self.conf, self.pi)

        # test repo
        self.assertIsInstance(r, Repo)
        self.assertIs(r.connector, self.connector)
        self.assertIs(r.config, self.conf)
        self.assertEqual(r.index, self.pi)

        # test conf on disk
        self.assertTrue(self.connector.opened(repo.CONFIG_FILE))
        conf_on_disk = config.Config()
        conf_on_disk.read(self.connector.get_file(repo.CONFIG_FILE))
        self.assertEqual(conf_on_disk, self.conf)

        # test picture index on disk
        self.assertTrue(self.connector.opened('mock-index-path'))
        index_on_disk = index.PictureIndex()
        index_on_disk.read(self.connector.get_file('mock-index-path'))
        self.assertEqual(index_on_disk, self.pi)
Example #55
0
    def createRepo(self):
        print("Getting repo: ", self.repoEdit.text())
        testDateSince1 = NotSet
        testDateUntil1 = NotSet
        self.repo = Repo(self.repoEdit.text(), _since=testDateSince1,
                         _until=testDateUntil1, _gh=self.github)

        self.issuesPlot.changeData(self.repo.issuesData, ["Open Issues"])
        self.commitsPlot.changeData(self.repo.commitsData[0:3],
                                    ["Bugfixes", "Features"])
        self.linesPlot.changeData([self.repo.commitsData[0],
                                   self.repo.commitsData[3],
                                   self.repo.commitsData[4]],
                                  ["Additions", "Removals", "Total"])
        self.repo.issueProcessed.connect(self.issuesPlot.updatePlot)
        self.repo.commitProcessed.connect(self.commitsPlot.updatePlot)
        self.repo.commitProcessed.connect(self.linesPlot.updatePlot)

        for p in self.plots:
            self.repo.milestoneProcessed.connect(p.addMilestone)
            p.milestoneData = self.repo.milestoneData

        self.startDate.setDateTime(self.repo.createdAt)
Example #56
0
class BenchmarkRepo(object):

    def __init__(self, clear=False):
        self.repo = Repo(clear=clear)

    def benchmark_write(self):
        name = "/ndn/ucla.edu/bms/building:melnitz/room:1451/seg0"
        content = "melnitz.1451.seg0"
        data = self.repo.wrap_content(name, content)
        data_size = getsizeof(data)

        volume = 0
        start_time = datetime.now()
        for i in range(100):
            self.repo.add_content_object_to_repo(name, data)
            volume += data_size
        finish_time = datetime.now()
        duration = finish_time - start_time
        print duration, volume

    def benchmark_read(self):
        # the graph db is queried 2 times per read if does not 
        # apply selectors. otherwise 4 times queries are needed
        name = "/ndn/ucla.edu/bms/building:melnitz/room:1451/seg0"
        content = "melnitz.1451.seg0"
        interest = Interest(Name(name))
        data = self.repo.wrap_content(name, content)
        data_size = getsizeof(data)

        volume = 0
        start_time = datetime.now()
        for i in range(1):
            self.repo.extract_from_repo(interest)
            volume += data_size
        finish_time = datetime.now()
        duration = finish_time - start_time
        print duration, volume

    def run_benchmark(self):
#        self.benchmark_write()
        self.benchmark_read()
Example #57
0
	def __init__(self, name, url):
		Repo.__init__(self, name)
		self.init_paths(name, url)
		self.read_packages()
Example #58
0
    args = parser.parse_args()

    args.startdate, args.enddate = fixDates(args.startdate, args.enddate)

    print "Searching between", time.strftime(
        "%a, %d %b %Y %H:%M:%S +0000", time.gmtime(args.startdate)
    ), "-", time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(args.enddate))

    conn = DB.getConn()
    c = conn.cursor()

    c.execute("SELECT * FROM " + DB.repo._table)
    rows = c.fetchall()

    for i in rows:
        r = Repo()
        r.loadFromDatabase(i)
        r.pprint()

        module = -1
        if r.type == Repo.Type.SVN:
            module = svnpuller
        elif r.type == Repo.Type.GIT:
            module = gitpuller

        if module != -1:
            try:
                commits = module.getCommits(r, args.startdate, args.enddate)
            except:
                print "Error pulling commits for", r.url
                commits = []