예제 #1
0
파일: db.py 프로젝트: ishagarg06/augur
def check_pgpass_credentials(config):
    pgpass_file_path = environ['HOME'] + '/.pgpass'

    if not path.isfile(pgpass_file_path):
        logger.debug("~/.pgpass does not exist, creating.")
        open(pgpass_file_path, 'w+')
        chmod(pgpass_file_path, stat.S_IWRITE | stat.S_IREAD)

    pgpass_file_mask = oct(os.stat(pgpass_file_path).st_mode & 0o777)

    if pgpass_file_mask != '0o600':
        logger.debug("Updating ~/.pgpass file permissions.")
        chmod(pgpass_file_path, stat.S_IWRITE | stat.S_IREAD)

    with open(pgpass_file_path, 'a+') as pgpass_file:
        end = pgpass_file.tell()
        pgpass_file.seek(0)

        credentials_string = str(config['Database']['host']) \
                          + ':' + str(config['Database']['port']) \
                          + ':' + str(config['Database']['name']) \
                          + ':' + str(config['Database']['user']) \
                          + ':' + str(config['Database']['password'])

        if credentials_string.lower() not in [
                ''.join(line.split()).lower()
                for line in pgpass_file.readlines()
        ]:
            logger.info("Adding credentials to $HOME/.pgpass")
            pgpass_file.seek(end)
            pgpass_file.write(credentials_string + '\n')
        else:
            logger.info("Credentials found in $HOME/.pgpass")
예제 #2
0
 def update_all(self):
     """
     Updates all plugins
     """
     for updatable in self.__updatable:
         logger.info('Updating {}...'.format(updatable['name']))
         updatable['update']()
예제 #3
0
파일: util.py 프로젝트: ishagarg06/augur
def list_processes():
    """
    Outputs the name and process ID (PID) of all currently running backend Augur processes, including any workers. Will only work in a virtual environment.    
    """
    processes = get_augur_processes()
    for process in processes:
        logger.info(f"Found process {process.pid}")
예제 #4
0
파일: db.py 프로젝트: ishagarg06/augur
def add_repo_groups(ctx, filename):
    """
    Create new repo groups in Augur's database
    """
    app = ctx.obj

    df = pd.read_sql(
        s.sql.text("SELECT repo_group_id FROM augur_data.repo_groups"),
        app.database)
    repo_group_IDs = df['repo_group_id'].values.tolist()

    insert_repo_group_sql = s.sql.text("""
    INSERT INTO "augur_data"."repo_groups"("repo_group_id", "rg_name", "rg_description", "rg_website", "rg_recache", "rg_last_modified", "rg_type", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES (:repo_group_id, :repo_group_name, '', '', 0, CURRENT_TIMESTAMP, 'Unknown', 'Loaded by user', '1.0', 'Git', CURRENT_TIMESTAMP);
    """)

    with open(filename) as create_repo_groups_file:
        data = csv.reader(create_repo_groups_file, delimiter=',')
        for row in data:
            logger.info(
                f"Inserting repo group with name {row[1]} and ID {row[0]}...")
            if int(row[0]) not in repo_group_IDs:
                repo_group_IDs.append(int(row[0]))
                app.database.execute(insert_repo_group_sql,
                                     repo_group_id=int(row[0]),
                                     repo_group_name=row[1])
            else:
                logger.info(
                    f"Repo group with ID {row[1]} for repo group {row[1]} already exists, skipping..."
                )
예제 #5
0
파일: db.py 프로젝트: ishagarg06/augur
def add_repos(ctx, filename):
    """
    Add repositories to Augur's database
    """
    app = ctx.obj

    df = app.database.execute(
        s.sql.text("SELECT repo_group_id FROM augur_data.repo_groups"))
    repo_group_IDs = [group[0] for group in df.fetchall()]

    insertSQL = s.sql.text("""
        INSERT INTO augur_data.repo(repo_group_id, repo_git, repo_status, 
        tool_source, tool_version, data_source, data_collection_date) 
        VALUES (:repo_group_id, :repo_git, 'New', 'CLI', 1.0, 'Git', CURRENT_TIMESTAMP)
    """)

    with open(filename) as upload_repos_file:
        data = csv.reader(upload_repos_file, delimiter=',')
        for row in data:
            logger.info(
                f"Inserting repo with Git URL `{row[1]}` into repo group {row[0]}"
            )
            if int(row[0]) in repo_group_IDs:
                result = app.database.execute(insertSQL,
                                              repo_group_id=int(row[0]),
                                              repo_git=row[1])
            else:
                logger.warn(
                    f"Invalid repo group id specified for {row[1]}, skipping.")
예제 #6
0
파일: db.py 프로젝트: ishagarg06/augur
def check_for_upgrade(ctx):
    """
    Upgrade the configured database to the latest version
    """
    app = ctx.obj
    check_pgpass_credentials(app.config)
    current_db_version = get_db_version(app)

    update_scripts_filenames = []
    for (_, _, filenames) in walk('schema/generate'):
        update_scripts_filenames.extend(
            [file for file in filenames if 'update' in file])
        # files_temp.extend([file.split("-")[1][14:].split(".")[0] for file in filenames if 'update' in file])
        break

    target_version_script_map = {}
    for script in update_scripts_filenames:
        upgrades_to = int(script.split("-")[1][14:].split(".")[0])
        target_version_script_map[upgrades_to] = str(script)

    target_version_script_map = OrderedDict(
        sorted(target_version_script_map.items()))

    most_recent_version = list(target_version_script_map.keys())[-1]
    if current_db_version == most_recent_version:
        logger.info("Database is already up to date.")
    elif current_db_version < most_recent_version:
        logger.info(
            f"Current database version: v{current_db_version}\nPlease upgrade to the most recent version (v{most_recent_version}) with augur db upgrade-db-version."
        )
    elif current_db_version > most_recent_version:
        logger.warn(
            f"Unrecognized version: {current_db_version}\nThe most recent version is {most_recent_version}. Please contact your system administrator to resolve this error."
        )
예제 #7
0
    def __init__(self,
                 user,
                 password,
                 host,
                 port,
                 dbname,
                 ghtorrent,
                 buildMode="auto"):
        """
		Connect to the database

		:param dbstr: The [database string](http://docs.sqlalchemy.org/en/latest/core/engines.html) to connect to the GHTorrent database
		"""
        char = "charset=utf8"
        self.DB_STR = 'mysql+pymysql://{}:{}@{}:{}/{}?{}'.format(
            user, password, host, port, dbname, char)
        #print('GHTorrentPlus: Connecting to {}:{}/{}?{} as {}'.format(host, port, dbname, char,user))
        self.db = s.create_engine(self.DB_STR, poolclass=s.pool.NullPool)
        self.ghtorrent = ghtorrent

        try:
            # Table creation
            if (buildMode == 'rebuild') or ((not self.db.dialect.has_table(
                    self.db.connect(), 'issue_response_time'))
                                            and buildMode == "auto"):
                logger.info(
                    "[GHTorrentPlus] Creating Issue Response Time table...")
                self.build_issue_response_time()
        except Exception as e:
            logger.error(
                "Could not connect to GHTorrentPlus database. Error: " +
                str(e))
예제 #8
0
파일: batch.py 프로젝트: ishagarg06/augur
    def batch_metadata():
        """
            Returns endpoint metadata in batch format
            """
        server.show_metadata = True

        if request.method == 'GET':
            """this will return sensible defaults in the future"""
            return server.app.make_response(json.dumps(metric_metadata))

        try:
            requests = json.loads(request.data.decode('utf-8'))
        except ValueError as e:
            request.abort(400)

        responses = []

        for index, req in enumerate(requests):
            method = req['method']
            path = req['path']
            body = req.get('body', None)

            try:
                logger.info('batch endpoint: ' + path)
                with server.app.server.app.context():
                    with server.app.test_request_context(path,
                                                         method=method,
                                                         data=body):
                        try:
                            rv = server.app.preprocess_request()
                            if rv is None:
                                rv = server.app.dispatch_request()
                        except Exception as e:
                            rv = server.app.handle_user_exception(e)
                        response = server.app.make_response(rv)
                        response = server.app.process_response(response)

                responses.append({
                    "path": path,
                    "status": response.status_code,
                    "response": str(response.get_data(), 'utf8'),
                })

            except Exception as e:
                responses.append({
                    "path": path,
                    "status": 500,
                    "response": str(e)
                })

        server.show_metadata = False

        return Response(response=json.dumps(responses),
                        status=207,
                        mimetype="server.app.ication/json")
예제 #9
0
파일: util.py 프로젝트: ishagarg06/augur
def repo_reset(ctx):
    """
    Refresh repo collection to force data collection
    """
    app = ctx.obj

    app.database.execute(
        "UPDATE augur_data.repo SET repo_path = NULL, repo_name = NULL, repo_status = 'New'; TRUNCATE augur_data.commits CASCADE; "
    )

    logger.info("Repos successfully reset")
예제 #10
0
파일: db.py 프로젝트: ishagarg06/augur
def update_api_key(ctx, api_key):
    """
    Update the API key in the database to the given key
    """
    app = ctx.obj

    update_api_key_sql = s.sql.text("""
        UPDATE augur_operations.augur_settings SET VALUE = :api_key WHERE setting='augur_api_key';
    """)

    app.database.execute(update_api_key_sql, api_key=api_key)
    logger.info(f"Update Augur API key to: {api_key}")
예제 #11
0
파일: db.py 프로젝트: ishagarg06/augur
def update_repo_directory(ctx, repo_directory):
    """
    Update Facade worker repo cloning directory
    """
    app = ctx.obj

    updateRepoDirectorySQL = s.sql.text("""
        UPDATE augur_data.settings SET VALUE = :repo_directory WHERE setting='repo_directory';
    """)

    app.database.execute(updateRepoDirectorySQL, repo_directory=repo_directory)
    logger.info(f"Updated Facade repo directory to: {repo_directory}")
예제 #12
0
 def finalize_config(self):
     """
     Parse args and generates a valid config if the given one is bad
     """
     # Close files and save config
     if self.__config_bad:
         logger.info('Regenerating config with missing values...')
         self.__config_file.close()
         self.__config_file = open(self.__config_file_path, 'w')
         config_text = json.dumps(self.__config, sort_keys=True, indent=4)
         config_text = config_text.replace(self.__config_location, '$(AUGUR)')
         self.__config_file.write(config_text)
     self.__config_file.close()
예제 #13
0
 def update(self):
     try:
         # Table creation
         if (buildMode == 'rebuild') or ((not self.db.dialect.has_table(
                 self.db.connect(), 'issue_response_time'))
                                         and buildMode == "auto"):
             logger.info(
                 "[GHTorrentPlus] Creating Issue Response Time table...")
             self.build_issue_response_time()
     except Exception as e:
         logger.error(
             "Could not connect to GHTorrentPlus database. Error: " +
             str(e))
예제 #14
0
def updater_process(name, delay):
    logger.info('Spawned {} updater process with PID {}'.format(
        name, os.getpid()))
    app = Application()
    datasource = getattr(app, name)()
    try:
        while True:
            logger.info('Updating {}...'.format(name))
            datasource.update()
            time.sleep(delay)
    except KeyboardInterrupt:
        os._exit(0)
    except:
        raise
예제 #15
0
파일: util.py 프로젝트: ishagarg06/augur
def kill_processes(ctx):
    """
    Terminates all currently running backend Augur processes, including any workers. Will only work in a virtual environment.    
    """
    processes = get_augur_processes()
    if processes != []:
        for process in processes:
            if process.pid != os.getpid():
                # logger.info(f"Killing {process.pid}: {' '.join(process.info['cmdline'][1:])}")
                logger.info(f"Killing process {process.pid}")
                try:
                    process.send_signal(signal.SIGTERM)
                except psutil.NoSuchProcess as e:
                    pass
예제 #16
0
    def exit():
        try:
            for pid in worker_pids:
                os.kill(pid, 9)
        except:
            logger.info("Worker process {} already killed".format(pid))
        for process in worker_processes:
            logger.info("Shutting down worker process with pid: {} ...".format(
                process.pid))
            process.terminate()

        if master is not None:
            master.halt()
        logger.info("Shutting down housekeeper updates...")
        if housekeeper is not None:
            housekeeper.shutdown_updates()

        # if hasattr(manager, "shutdown"):
        # wait for the spawner and the worker threads to go down
        #
        if manager is not None:
            manager.shutdown()
            # check if it is still alive and kill it if necessary
            # if manager._process.is_alive():
            manager._process.terminate()

        # Prevent multiprocessing's atexit from conflicting with gunicorn
        logger.info("Killing main augur process with PID: {}".format(
            os.getpid()))
        os.kill(os.getpid(), 9)
        os._exit(0)
예제 #17
0
 def finalize_config(self):
     # Parse args with help
     self.arg_parser.parse_known_args()
     # Close files and save config
     if self.__config_bad:
         logger.info('Regenerating config with missing values...')
         self.__config_file.close()
         self.__config_file = open(self.__config_file_path, 'w')
         config_text = json.dumps(self.__config, sort_keys=True, indent=4)
         config_text = config_text.replace(self.__config_location,
                                           '$(AUGUR)')
         self.__config_file.write(config_text)
     self.__config_file.close()
     if (self.__export_env):
         self.__export_file.close()
예제 #18
0
def worker_start(worker_name=None, instance_number=0, worker_port=None):
    time.sleep(120 * instance_number)
    destination = subprocess.DEVNULL
    try:
        destination = open(
            "workers/{}/worker_{}.log".format(worker_name, worker_port), "a+")
    except IOError as e:
        logger.error(
            "Error opening log file for auto-started worker {}: {}".format(
                worker_name, e))
    process = subprocess.Popen("cd workers/{} && {}_start".format(
        worker_name, worker_name),
                               shell=True,
                               stdout=destination,
                               stderr=subprocess.STDOUT)
    logger.info("{} booted.".format(worker_name))
예제 #19
0
    def updater_process(name, delay, shared):
        """
        Controls a given plugin's update process

        :param name: name of object to be updated 
        :param delay: time needed to update
        :param shared: shared object that is to also be updated
        """
        logger.info('Spawned {} updater process with PID {}'.format(name, os.getpid()))
        app = Application()
        datasource = getattr(app, name)()
        try:
            while True:
                logger.info('Updating {}...'.format(name))
                datasource.update(shared)
                time.sleep(delay)
        except KeyboardInterrupt:
            os._exit(0)
        except:
            raise
예제 #20
0
파일: db.py 프로젝트: ishagarg06/augur
def upgrade_db_version(ctx):
    """
    Upgrade the configured database to the latest version
    """
    app = ctx.obj
    check_pgpass_credentials(app.config)
    current_db_version = get_db_version(app)

    update_scripts_filenames = []
    for (_, _, filenames) in walk('schema/generate'):
        update_scripts_filenames.extend(
            [file for file in filenames if 'update' in file])
        # files_temp.extend([file.split("-")[1][14:].split(".")[0] for file in filenames if 'update' in file])
        break

    target_version_script_map = {}
    for script in update_scripts_filenames:
        upgrades_to = int(script.split("-")[1][14:].split(".")[0])
        target_version_script_map[upgrades_to] = str(script)

    target_version_script_map = OrderedDict(
        sorted(target_version_script_map.items()))

    most_recent_version = list(target_version_script_map.keys())[-1]
    if current_db_version == most_recent_version:
        logger.info("Your database is already up to date. ")
    elif current_db_version > most_recent_version:
        logger.info(
            f"Unrecognized version: {current_db_version}\nThe most recent version is {most_recent_version}. Please contact your system administrator to resolve this error."
        )

    for target_version, script_location in target_version_script_map.items():
        if target_version == current_db_version + 1:
            logger.info(
                f"Upgrading from {current_db_version} to {target_version}")
            run_psql_command_in_database(app, '-f',
                                         f"schema/generate/{script_location}")
            current_db_version += 1
예제 #21
0
    def __init__(self,
                 default_config_file_path='augur.config.json',
                 no_config_file=0,
                 config=None):
        """
        Reads config, creates DB session, and initializes cache
        """
        # Open the config file
        self.__config_file_name = 'augur.config.json'
        self.__already_exported = {}
        self.__default_config = {
            "Cache": {
                "config": {
                    "cache.data_dir": "runtime/cache/",
                    "cache.lock_dir": "runtime/cache/",
                    "cache.type": "file"
                }
            },
            "Database": {
                "connection_string": "sqlite:///:memory:",
                "database": "augur",
                "host": "localhost",
                "key": "key",
                "password": "******",
                "port": 5432,
                "schema": "augur_data",
                "user": "******"
            },
            "Development": {
                "developer": "0",
                "interactive": "0"
            },
            "Facade": {
                "check_updates": 1,
                "clone_repos": 1,
                "create_xlsx_summary_files": 1,
                "delete_marked_repos": 0,
                "fix_affiliations": 1,
                "force_analysis": 1,
                "force_invalidate_caches": 1,
                "force_updates": 1,
                "limited_run": 0,
                "multithreaded": 0,
                "nuke_stored_affiliations": 0,
                "pull_repos": 1,
                "rebuild_caches": 1,
                "run_analysis": 1
            },
            "Housekeeper": {
                "jobs": []
            },
            "Plugins": [],
            "Server": {
                "cache_expire": "3600",
                "host": "0.0.0.0",
                "port": "5000",
                "workers": "1"
            },
            "Workers": {}
        }

        _root_augur_dir_path = os.path.dirname(
            os.path.dirname(os.path.realpath(__file__)))
        _possible_config_paths = [
            self.__config_file_name,
            _root_augur_dir_path + "/" + self.__config_file_name,
            f"/opt/augur/{self.__config_file_name}"
        ]
        _config_file_path = default_config_file_path

        for location in _possible_config_paths:
            try:
                f = open(location, "r+")
                _config_file_path = os.path.abspath(location)
                f.close()
                logging.info("Using config file at " +
                             os.path.abspath(location))
                break
            except FileNotFoundError:
                pass

        self.__using_config_file = True
        self.__config_bad = False
        self.__config_file_path = os.path.abspath(
            os.getenv('AUGUR_CONFIG_FILE', _config_file_path))
        self.__config_location = os.path.dirname(self.__config_file_path)
        self.__runtime_location = 'runtime/'
        self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1'
        self.__shell_config = None

        if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0:
            try:
                self.__config_file = open(self.__config_file_path, 'r+')
            except:
                logger.info('Couldn\'t open {}, attempting to create.'.format(
                    self.__config_file_name))
                if not os.path.exists(self.__config_location):
                    os.makedirs(self.__config_location)
                self.__config_file = open(self.__config_file_path, 'w+')
                self.__config_bad = True

            if self.__export_env:
                export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE',
                                            'augur.config.json.sh')
                self.__export_file = open(export_filename, 'w+')
                logger.info(
                    'Exporting {} to environment variable export statements in {}'
                    .format(self.__config_file_name, export_filename))
                self.__export_file.write('#!/bin/bash\n')

            # Load the config file
            try:
                config_text = self.__config_file.read()
                self.__config = json.loads(config_text)
            except json.decoder.JSONDecodeError as e:
                if not self.__config_bad:
                    self.__using_config_file = False
                    logger.error(
                        '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s',
                        self.__config_file_path, str(e))

                self.__config = self.__default_config
        else:
            self.__using_config_file = False
            self.__config = self.__default_config

        if isinstance(config, dict):
            self.__config.update(config)

        # List of data sources that can do periodic updates
        self.__updatable = []
        self.__processes = []

        # Create cache
        cache_config = {
            'cache.type': 'file',
            'cache.data_dir': self.path('$(RUNTIME)/cache/'),
            'cache.lock_dir': self.path('$(RUNTIME)/cache/')
        }
        cache_config.update(
            self.read_config('Cache', 'config', None, cache_config))
        cache_config['cache.data_dir'] = self.path(
            cache_config['cache.data_dir'])
        cache_config['cache.lock_dir'] = self.path(
            cache_config['cache.lock_dir'])
        if not os.path.exists(cache_config['cache.data_dir']):
            os.makedirs(cache_config['cache.data_dir'])
        if not os.path.exists(cache_config['cache.lock_dir']):
            os.makedirs(cache_config['cache.lock_dir'])
        cache_parsed = parse_cache_config_options(cache_config)
        self.cache = CacheManager(**cache_parsed)

        self.metrics = MetricDefinitions(self)

        # # Initalize all objects to None
        self._loaded_plugins = {}
예제 #22
0
    def __init__(self,
                 config_file='augur.config.json',
                 no_config_file=0,
                 description='Augur application',
                 db_str='sqlite:///:memory:'):
        """
        Reads config, creates DB session, and initializes cache
        """
        # Command line arguments
        # TODO: make this useful
        self.arg_parser = argparse.ArgumentParser(description=description)
        self.arg_parser.parse_known_args()

        # Open the config file
        self.__already_exported = {}
        self.__default_config = {'Plugins': []}
        self.__using_config_file = True
        self.__config_bad = False
        self.__config_file_path = os.path.abspath(
            os.getenv('AUGUR_CONFIG_FILE', config_file))
        self.__config_location = os.path.dirname(self.__config_file_path)
        self.__runtime_location = 'runtime/'
        self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1'
        if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0:
            try:
                self.__config_file = open(self.__config_file_path, 'r+')
            except:
                logger.info(
                    'Couldn\'t open {}, attempting to create. If you have a augur.cfg, you can convert it to a json file using "make to-json"'
                    .format(config_file))
                if not os.path.exists(self.__config_location):
                    os.makedirs(self.__config_location)
                self.__config_file = open(self.__config_file_path, 'w+')
                self.__config_bad = True
            # Options to export the loaded configuration as environment variables for Docker

            if self.__export_env:
                export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE',
                                            'augur.cfg.sh')
                self.__export_file = open(export_filename, 'w+')
                logger.info(
                    'Exporting {} to environment variable export statements in {}'
                    .format(config_file, export_filename))
                self.__export_file.write('#!/bin/bash\n')

            # Load the config file
            try:
                config_text = self.__config_file.read()
                self.__config = json.loads(config_text)
            except json.decoder.JSONDecodeError as e:
                if not self.__config_bad:
                    self.__using_config_file = False
                    logger.error(
                        '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s',
                        self.__config_file_path, str(e))

                self.__config = self.__default_config
        else:
            self.__using_config_file = False
            self.__config = self.__default_config

        # List of data sources that can do periodic updates
        self.__updatable = []
        self.__processes = []

        # Create cache
        cache_config = {
            'cache.type': 'file',
            'cache.data_dir': self.path('$(RUNTIME)/cache/'),
            'cache.lock_dir': self.path('$(RUNTIME)/cache/')
        }
        cache_config.update(
            self.read_config('Cache', 'config', None, cache_config))
        cache_config['cache.data_dir'] = self.path(
            cache_config['cache.data_dir'])
        cache_config['cache.lock_dir'] = self.path(
            cache_config['cache.lock_dir'])
        if not os.path.exists(cache_config['cache.data_dir']):
            os.makedirs(cache_config['cache.data_dir'])
        if not os.path.exists(cache_config['cache.lock_dir']):
            os.makedirs(cache_config['cache.lock_dir'])
        cache_parsed = parse_cache_config_options(cache_config)
        self.cache = CacheManager(**cache_parsed)

        # Create DB Session
        self.db = None
        self.session = None
        if db_str:
            self.db = create_engine(db_str)
            self.__Session = sessionmaker(bind=self.db)
            self.session = self.__Session()

        # Initalize all objects to None
        self.__metrics_status = None
        self._loaded_plugins = {}

        for plugin_name in Application.default_plugins:
            self[plugin_name]
예제 #23
0
 def __init__(self):
     logger.info('example-plugin loaded')
     return
예제 #24
0
def cli(ctx, disable_housekeeper, skip_cleanup):
    """
    Start Augur's backend server
    """
    if not skip_cleanup:
        logger.info("Cleaning up old Augur processes. Just a moment please...")
        ctx.invoke(kill_processes)
        time.sleep(2)
    else:
        logger.info("Skipping cleanup processes.")

    def get_process_id(name):
        """Return process ids found by name or command
        """
        child = subprocess.Popen(['pgrep', '-f', name],
                                 stdout=subprocess.PIPE,
                                 shell=False)
        response = child.communicate()[0]
        return [int(pid) for pid in response.split()]

    app = ctx.obj

    mp.set_start_method('forkserver', force=True)
    master = None

    manager = None
    broker = None
    housekeeper = None

    logger.info("Booting broker and its manager...")
    manager = mp.Manager()
    broker = manager.dict()

    controller = app.read_config('Workers')
    worker_pids = []
    worker_processes = []

    if not disable_housekeeper:
        if not controller:
            return
        for worker in controller.keys():
            if not controller[worker]['switch']:
                continue
            logger.info(
                "Your config has the option set to automatically boot {} instances of the {}"
                .format(controller[worker]['workers'], worker))
            pids = get_process_id(
                "/bin/sh -c cd workers/{} && {}_start".format(worker, worker))
            worker_pids += pids
            if len(pids) > 0:
                worker_pids.append(pids[0] + 1)
                pids.append(pids[0] + 1)
                logger.info(
                    "Found and preparing to kill previous {} worker pids: {}".
                    format(worker, pids))
                for pid in pids:
                    try:
                        os.kill(pid, 9)
                    except:
                        logger.info(
                            "Worker process {} already killed".format(pid))

    @atexit.register
    def exit():
        try:
            for pid in worker_pids:
                os.kill(pid, 9)
        except:
            logger.info("Worker process {} already killed".format(pid))
        for process in worker_processes:
            logger.info("Shutting down worker process with pid: {} ...".format(
                process.pid))
            process.terminate()

        if master is not None:
            master.halt()
        logger.info("Shutting down housekeeper updates...")
        if housekeeper is not None:
            housekeeper.shutdown_updates()

        # if hasattr(manager, "shutdown"):
        # wait for the spawner and the worker threads to go down
        #
        if manager is not None:
            manager.shutdown()
            # check if it is still alive and kill it if necessary
            # if manager._process.is_alive():
            manager._process.terminate()

        # Prevent multiprocessing's atexit from conflicting with gunicorn
        logger.info("Killing main augur process with PID: {}".format(
            os.getpid()))
        os.kill(os.getpid(), 9)
        os._exit(0)

    if not disable_housekeeper:
        logger.info("Booting housekeeper...")
        jobs = deepcopy(app.read_config('Housekeeper', 'jobs'))
        try:
            housekeeper = Housekeeper(
                jobs,
                broker,
                broker_host=app.read_config('Server', 'host'),
                broker_port=app.read_config('Server', 'port'),
                user=app.read_config('Database', 'user'),
                password=app.read_config('Database', 'password'),
                host=app.read_config('Database', 'host'),
                port=app.read_config('Database', 'port'),
                dbname=app.read_config('Database', 'name'))
        except KeyboardInterrupt as e:
            exit()

        logger.info("Housekeeper has finished booting.")

        if controller:
            for worker in controller.keys():
                if controller[worker]['switch']:
                    for i in range(controller[worker]['workers']):
                        logger.info("Booting {} #{}".format(worker, i + 1))
                        worker_process = mp.Process(
                            target=worker_start,
                            kwargs={
                                'worker_name': worker,
                                'instance_number': i,
                                'worker_port': controller[worker]['port']
                            },
                            daemon=True)
                        worker_process.start()
                        worker_processes.append(worker_process)

    host = app.read_config('Server', 'host')
    port = app.read_config('Server', 'port')
    workers = int(app.read_config('Server', 'workers'))
    timeout = int(app.read_config('Server', 'timeout'))
    options = {
        'bind': '%s:%s' % (host, port),
        'workers': workers,
        'accesslog': '-',
        'access_log_format': '%(h)s - %(t)s - %(r)s',
        'timeout': timeout
    }
    logger.info('Starting server...')
    master = Arbiter(
        AugurGunicornApp(options,
                         manager=manager,
                         broker=broker,
                         housekeeper=housekeeper)).run()
예제 #25
0
 def update_all(self):
     print(self.__updatable)
     for updatable in self.__updatable:
         logger.info('Updating {}...'.format(updatable['name']))
         updatable['update']()
예제 #26
0
    def __init__(self,
                 config_file='augur.config.json',
                 no_config_file=0,
                 description='Augur application'):

        # Command line arguments
        # TODO: make this useful
        self.arg_parser = argparse.ArgumentParser(description=description)
        self.arg_parser.parse_known_args()

        # Open the config file
        self.__already_exported = {}
        self.__default_config = {'Plugins': []}
        self.__using_config_file = True
        self.__config_bad = False
        self.__config_file_path = os.path.abspath(
            os.getenv('AUGUR_CONFIG_FILE', config_file))
        self.__config_location = os.path.dirname(self.__config_file_path)
        self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1'
        if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0:
            try:
                self.__config_file = open(self.__config_file_path, 'r+')
            except:
                logger.info(
                    'Couldn\'t open {}, attempting to create. If you have a augur.cfg, you can convert it to a json file using "make to-json"'
                    .format(config_file))
                if not os.path.exists(self.__config_location):
                    os.makedirs(self.__config_location)
                self.__config_file = open(self.__config_file_path, 'w+')
                self.__config_bad = True
            # Options to export the loaded configuration as environment variables for Docker

            if self.__export_env:
                export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE',
                                            'augur.cfg.sh')
                self.__export_file = open(export_filename, 'w+')
                logger.info(
                    'Exporting {} to environment variable export statements in {}'
                    .format(config_file, export_filename))
                self.__export_file.write('#!/bin/bash\n')

            # Load the config file
            try:
                config_text = self.__config_file.read()
                config_text = config_text.replace('$(AUGUR)',
                                                  self.__config_location)
                self.__config = json.loads(config_text)
            except json.decoder.JSONDecodeError as e:
                if not self.__config_bad:
                    self.__using_config_file = False
                    logger.error(
                        '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s',
                        self.__config_file_path, str(e))

                self.__config = self.__default_config
        else:
            self.__using_config_file = False
            self.__config = self.__default_config

        # List of data sources that can do periodic updates
        self.__updatable = []
        self.__processes = []

        # Create cache
        cache_config = self.read_config(
            'Cache', 'config', None, {
                'cache.type': 'file',
                'cache.data_dir': 'runtime/cache/',
                'cache.lock_dir': 'runtime/cache/'
            })
        if not os.path.exists(cache_config['cache.data_dir']):
            os.makedirs(cache_config['cache.data_dir'])
        if not os.path.exists(cache_config['cache.lock_dir']):
            os.makedirs(cache_config['cache.lock_dir'])
        cache_parsed = parse_cache_config_options(cache_config)
        self.cache = CacheManager(**cache_parsed)

        # Initalize all objects to None
        self.__ghtorrent = None
        self.__ghtorrentplus = None
        self.__githubapi = None
        self.__git = None
        self.__librariesio = None
        self.__downloads = None
        self.__publicwww = None
        self.__localCSV = None