Ejemplo n.º 1
0
def test_discover_config_file_env_no_exception():
    test_config = default_config
    with open(os.path.join(temp_dir, "augur.config.json"), "w") as f:
        pass
    config_object = AugurConfig(temp_dir, test_config)
    assert config_object.discover_config_file() == os.path.join(
        temp_dir, "augur.config.json")
Ejemplo n.º 2
0
def test_discover_config_file_env_exception():
    os.environ['AUGUR_CONFIG_FILE'] = os.path.join(temp_dir,
                                                   "augur.config.json")
    test_config = default_config
    with pytest.raises(FileNotFoundError):
        config_object = AugurConfig(temp_dir, test_config)
        assert config_object.discover_config_file()
Ejemplo n.º 3
0
    def __init__(self,
                 given_config={},
                 disable_logs=False,
                 offline_mode=False):
        """
        Reads config, creates DB session, and initializes cache
        """
        self.logging = AugurLogging(disable_logs=disable_logs)
        self.root_augur_dir = ROOT_AUGUR_DIRECTORY
        self.config = AugurConfig(self.root_augur_dir, given_config)

        # we need these for later
        self.housekeeper = None
        self.manager = None

        self.gunicorn_options = {
            'bind':
            '%s:%s' % (self.config.get_value(
                "Server", "host"), self.config.get_value("Server", "port")),
            'workers':
            int(self.config.get_value('Server', 'workers')),
            'timeout':
            int(self.config.get_value('Server', 'timeout'))
        }
        self.logging.configure_logging(self.config)
        self.gunicorn_options.update(self.logging.gunicorn_logging_options)

        self.cache_config = {
            'cache.type': 'file',
            'cache.data_dir': 'runtime/cache/',
            'cache.lock_dir': 'runtime/cache/'
        }

        if not os.path.exists(self.cache_config['cache.data_dir']):
            os.makedirs(self.cache_config['cache.data_dir'])
        if not os.path.exists(self.cache_config['cache.lock_dir']):
            os.makedirs(self.cache_config['cache.lock_dir'])
        cache_parsed = parse_cache_config_options(self.cache_config)
        self.cache = CacheManager(**cache_parsed)

        if offline_mode is False:
            logger.debug("Running in online mode")
            self.database, self.operations_database, self.spdx_database = self._connect_to_database(
            )

            self.metrics = Metrics(self)
    def __init__(self, config={}):

        # Define the worker's type, which will be used for self identification.

        worker_type = "pull_request_analysis_worker"

        # Define what this worker can be given and know how to interpret

        given = [['github_url']]

        # The name the housekeeper/broker use to distinguish the data model this worker can fill

        models = ['pull_request_analysis']

        # Define the tables needed to insert, update, or delete on

        data_tables = ['message', 'repo', 'pull_request_analysis']

        operations_tables = ['worker_history', 'worker_job']

        # Run the general worker initialization
        super().__init__(worker_type, config, given, models, data_tables,
                         operations_tables)

        # Do any additional configuration after the general initialization has been run
        self.config.update(config)

        # Define data collection info
        self.tool_source = 'Pull Request Analysis Worker'
        self.tool_version = '0.0.0'
        self.data_source = 'Non-existent API'

        self.insight_days = 200  # self.config['insight_days']

        augur_config = AugurConfig(ROOT_AUGUR_DIRECTORY)
        self.senti_models_dir = os.path.join(
            ROOT_AUGUR_DIRECTORY, "workers", "message_insights_worker",
            augur_config.get_section("Workers")["message_insights_worker"]
            ["models_dir"])

        self.logger.info(
            f'Sentiment model dir located - {self.senti_models_dir}')
Ejemplo n.º 5
0
 def __init__(self, organization_name, database_connection):
     self.org = organization_name
     self.db = database_connection
     ## added for keys
     self._root_augur_dir = Repo_insertion_manager.ROOT_AUGUR_DIR
     self.augur_config = AugurConfig(self._root_augur_dir)
Ejemplo n.º 6
0
class Repo_insertion_manager():
    ROOT_AUGUR_DIR = os.path.dirname(
        os.path.dirname(os.path.realpath(__file__)))

    def __init__(self, organization_name, database_connection):
        self.org = organization_name
        self.db = database_connection
        ## added for keys
        self._root_augur_dir = Repo_insertion_manager.ROOT_AUGUR_DIR
        self.augur_config = AugurConfig(self._root_augur_dir)

        ##########

    def get_existing_repos(self, group_id):
        """returns repos belonging to repogroup in augur db"""
        select_repos_query = s.sql.text("""
            SELECT repo_git from augur_data.repo
            WHERE repo_group_id = :repo_group_id
        """)
        select_repos_query = select_repos_query.bindparams(
            repo_group_id=group_id)
        result = self.db.execute(select_repos_query)
        return result.fetchall()

## This doesn't permit importing of an individual's repo, as they don't show up under "orgs"
#    def group_exists_gh(self):
#        url = url = "https://api.github.com/orgs/{}".format(self.org)
#        res = requests.get(url).json()
#        try:
#            if res['message'] == "Not Found":
#                return False
#        except KeyError:
#            return True

## Revised Version of Method

    def group_exists_gh(self):
        url = url = "https://api.github.com/orgs/{}".format(self.org)
        ## attempting to add key due to rate limiting
        gh_api_key = self.augur_config.get_value('Database', 'key')
        self.headers = {'Authorization': 'token %s' % gh_api_key}
        #r = requests.get(url=cntrb_url, headers=self.headers)
        ####### Original request code
        #        res = requests.get(url).json()
        ########
        res = requests.get(url=url, headers=self.headers).json()
        try:
            if res['message'] == "Not Found":
                url = url = "https://api.github.com/users/{}".format(self.org)
                res = requests.get(url=url, headers=self.headers).json()
                if res['message'] == "Not Found":
                    return False
        except KeyError:
            return True

    def insert_repo(self, orgid, given_org, reponame):
        """creates a new repo record"""
        insert_repo_query = s.sql.text("""
            INSERT INTO augur_data.repo(repo_group_id, repo_git, repo_status,
                tool_source, tool_version, data_source, data_collection_date)
            VALUES (:repo_group_id, :repo_git, 'New', 'CLI', 1.0, 'Git', CURRENT_TIMESTAMP)
            RETURNING repo_id
        """)
        repogit = self.github_urlify(given_org, reponame)
        insert_repo_query = insert_repo_query.bindparams(
            repo_group_id=int(orgid), repo_git=repogit)
        result = self.db.execute(insert_repo_query).fetchone()
        return result['repo_id']

    def github_urlify(self, org, repo):
        return "https://github.com/" + org + "/" + repo

    def get_org_id(self):
        select_group_query = s.sql.text("""
            SELECT repo_group_id
            FROM augur_data.repo_groups
            WHERE rg_name = :group_name
        """)
        select_group_query = select_group_query.bindparams(group_name=self.org)
        result = self.db.execute(select_group_query)
        row = result.fetchone()
        return row['repo_group_id']

    def insert_repo_group(self):
        """creates a new repo_group record and returns its id"""
        insert_group_query = s.sql.text("""
            INSERT INTO augur_data.repo_groups(rg_name, rg_description, rg_website, rg_recache, rg_last_modified, rg_type, 
                tool_source, tool_version, data_source, data_collection_date)
            VALUES (:group_name, '', '', 1, CURRENT_TIMESTAMP, 'Unknown', 'Loaded by user', 1.0, 'Git', CURRENT_TIMESTAMP)
            RETURNING repo_group_id
        """)
        insert_group_query = insert_group_query.bindparams(group_name=self.org)
        result = self.db.execute(insert_group_query)
        row = result.fetchone()
        return row['repo_group_id']

    def fetch_repos(self):
        """uses the github api to return repos belonging to the given organization"""
        gh_api_key = self.augur_config.get_value('Database', 'key')
        self.headers = {'Authorization': 'token %s' % gh_api_key}
        repos = []
        page = 1
        url = self.paginate(page)
        res = requests.get(url, headers=self.headers).json()
        while res:
            for repo in res:
                repos.append(repo['name'])
            page += 1
            res = requests.get(self.paginate(page)).json()
        return repos

## Modified pagination to account for github orgs that look like orgs but are actually users.

    def paginate(self, page):
        ### Modified here to incorporate the use of a GitHub API Key
        gh_api_key = self.augur_config.get_value('Database', 'key')
        self.headers = {'Authorization': 'token %s' % gh_api_key}
        url = "https://api.github.com/orgs/{}/repos?per_page=100&page={}"
        res = requests.get(url, headers=self.headers).json()
        if res['message'] == "Not Found":
            url = "https://api.github.com/users/{}/repos?per_page=100&page={}"
            res = requests.get(url=url, headers=self.headers).json()
        return url.format(self.org, str(page))

        #r = requests.get(url=cntrb_url, headers=self.headers)
        ####### Original request code
        #        res = requests.get(url).json()
        ########
        res = requests.get(url=url, headers=self.headers).json()
Ejemplo n.º 7
0
class Application():
    """Initalizes all classes from Augur using a config file or environment variables"""
    def __init__(self,
                 given_config={},
                 disable_logs=False,
                 offline_mode=False):
        """
        Reads config, creates DB session, and initializes cache
        """
        self.logging = AugurLogging(disable_logs=disable_logs)
        self.root_augur_dir = ROOT_AUGUR_DIRECTORY
        self.config = AugurConfig(self.root_augur_dir, given_config)

        # we need these for later
        self.housekeeper = None
        self.manager = None

        self.gunicorn_options = {
            'bind':
            '%s:%s' % (self.config.get_value(
                "Server", "host"), self.config.get_value("Server", "port")),
            'workers':
            int(self.config.get_value('Server', 'workers')),
            'timeout':
            int(self.config.get_value('Server', 'timeout'))
        }
        self.logging.configure_logging(self.config)
        self.gunicorn_options.update(self.logging.gunicorn_logging_options)

        self.cache_config = {
            'cache.type': 'file',
            'cache.data_dir': 'runtime/cache/',
            'cache.lock_dir': 'runtime/cache/'
        }

        if not os.path.exists(self.cache_config['cache.data_dir']):
            os.makedirs(self.cache_config['cache.data_dir'])
        if not os.path.exists(self.cache_config['cache.lock_dir']):
            os.makedirs(self.cache_config['cache.lock_dir'])
        cache_parsed = parse_cache_config_options(self.cache_config)
        self.cache = CacheManager(**cache_parsed)

        if offline_mode is False:
            logger.debug("Running in online mode")
            self.database, self.operations_database, self.spdx_database = self._connect_to_database(
            )

            self.metrics = Metrics(self)

    def _connect_to_database(self):
        user = self.config.get_value('Database', 'user')
        host = self.config.get_value('Database', 'host')
        port = self.config.get_value('Database', 'port')
        dbname = self.config.get_value('Database', 'name')

        database_connection_string = 'postgresql://{}:{}@{}:{}/{}'.format(
            user, self.config.get_value('Database', 'password'), host, port,
            dbname)

        csearch_path_options = 'augur_data'

        engine = s.create_engine(
            database_connection_string,
            poolclass=s.pool.NullPool,
            connect_args={'options': f'-csearch_path={csearch_path_options}'},
            pool_pre_ping=True)

        csearch_path_options += ',spdx'
        spdx_engine = s.create_engine(
            database_connection_string,
            poolclass=s.pool.NullPool,
            connect_args={'options': f'-csearch_path={csearch_path_options}'},
            pool_pre_ping=True)

        helper_engine = s.create_engine(
            database_connection_string,
            poolclass=s.pool.NullPool,
            connect_args={'options': f'-csearch_path=augur_operations'},
            pool_pre_ping=True)

        try:
            engine.connect().close()
            helper_engine.connect().close()
            spdx_engine.connect().close()
            logger.debug("Database connection successfully established")
            return engine, helper_engine, spdx_engine
        except s.exc.OperationalError as e:
            logger.error("Unable to connect to the database. Terminating...")
            raise (e)

    def shutdown(self):
        if self.logging.stop_event is not None:
            logger.debug("Stopping housekeeper logging listener...")
            self.logging.stop_event.set()

        if self.housekeeper is not None:
            logger.debug("Shutting down housekeeper updates...")
            self.housekeeper.shutdown_updates()
            self.housekeeper = None

        if self.manager is not None:
            logger.debug("Shutting down manager...")
            self.manager.shutdown()
            self.manager = None
Ejemplo n.º 8
0
 def new_func(ctx, *args, **kwargs):
     config = AugurConfig(ROOT_AUGUR_DIRECTORY)
     ctx.obj = AugurLogging.get_log_directories(config,
                                                reset_logfiles=False)
     return ctx.invoke(f, ctx.obj, *args, **kwargs)
Ejemplo n.º 9
0
def test_config_get_section_exception():
    test_config = default_config
    test_config['Database']['user'] = "******"
    config_object = AugurConfig(temp_dir, test_config)
    assert config_object.get_section("absent_section") == None
Ejemplo n.º 10
0
def test_config_get_section_no_exception():
    test_config = default_config
    test_config['Database']['user'] = "******"
    config_object = AugurConfig(temp_dir, test_config)
    assert type(config_object.get_section("Database")) == dict