def __init__(self, user, password, host, port, dbname, ghtorrent, buildMode="auto"): """ Connect to the database :param dbstr: The [database string](http://docs.sqlalchemy.org/en/latest/core/engines.html) to connect to the GHTorrent database """ char = "charset=utf8" self.DB_STR = 'mysql+pymysql://{}:{}@{}:{}/{}?{}'.format( user, password, host, port, dbname, char) #print('GHTorrentPlus: Connecting to {}:{}/{}?{} as {}'.format(host, port, dbname, char,user)) self.db = s.create_engine(self.DB_STR, poolclass=s.pool.NullPool) self.ghtorrent = ghtorrent try: # Table creation if (buildMode == 'rebuild') or ((not self.db.dialect.has_table( self.db.connect(), 'issue_response_time')) and buildMode == "auto"): logger.info( "[GHTorrentPlus] Creating Issue Response Time table...") self.build_issue_response_time() except Exception as e: logger.error( "Could not connect to GHTorrentPlus database. Error: " + str(e))
def update(self): try: # Table creation if (buildMode == 'rebuild') or ((not self.db.dialect.has_table( self.db.connect(), 'issue_response_time')) and buildMode == "auto"): logger.info( "[GHTorrentPlus] Creating Issue Response Time table...") self.build_issue_response_time() except Exception as e: logger.error( "Could not connect to GHTorrentPlus database. Error: " + str(e))
def __init__(self, user, password, host, port, dbname): """ Connect to GHTorrent :param dbstr: The [database string](http://docs.sqlalchemy.org/en/latest/core/engines.html) to connect to the GHTorrent database """ self.DB_STR = 'mysql+pymysql://{}:{}@{}:{}/{}'.format( user, password, host, port, dbname ) logger.debug('GHTorrent: Connecting to {}:{}/{} as {}'.format(host, port, dbname, user)) self.db = s.create_engine(self.DB_STR, poolclass=s.pool.NullPool) try: self.userid('howderek') except Exception as e: logger.error("Could not connect to GHTorrent database. Error: " + str(e))
def worker_start(worker_name=None, instance_number=0, worker_port=None): time.sleep(120 * instance_number) destination = subprocess.DEVNULL try: destination = open( "workers/{}/worker_{}.log".format(worker_name, worker_port), "a+") except IOError as e: logger.error( "Error opening log file for auto-started worker {}: {}".format( worker_name, e)) process = subprocess.Popen("cd workers/{} && {}_start".format( worker_name, worker_name), shell=True, stdout=destination, stderr=subprocess.STDOUT) logger.info("{} booted.".format(worker_name))
def __init__(self, config_file='augur.config.json', no_config_file=0, description='Augur application', db_str='sqlite:///:memory:'): """ Reads config, creates DB session, and initializes cache """ # Command line arguments # TODO: make this useful self.arg_parser = argparse.ArgumentParser(description=description) self.arg_parser.parse_known_args() # Open the config file self.__already_exported = {} self.__default_config = {'Plugins': []} self.__using_config_file = True self.__config_bad = False self.__config_file_path = os.path.abspath( os.getenv('AUGUR_CONFIG_FILE', config_file)) self.__config_location = os.path.dirname(self.__config_file_path) self.__runtime_location = 'runtime/' self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1' if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0: try: self.__config_file = open(self.__config_file_path, 'r+') except: logger.info( 'Couldn\'t open {}, attempting to create. If you have a augur.cfg, you can convert it to a json file using "make to-json"' .format(config_file)) if not os.path.exists(self.__config_location): os.makedirs(self.__config_location) self.__config_file = open(self.__config_file_path, 'w+') self.__config_bad = True # Options to export the loaded configuration as environment variables for Docker if self.__export_env: export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE', 'augur.cfg.sh') self.__export_file = open(export_filename, 'w+') logger.info( 'Exporting {} to environment variable export statements in {}' .format(config_file, export_filename)) self.__export_file.write('#!/bin/bash\n') # Load the config file try: config_text = self.__config_file.read() self.__config = json.loads(config_text) except json.decoder.JSONDecodeError as e: if not self.__config_bad: self.__using_config_file = False logger.error( '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s', self.__config_file_path, str(e)) self.__config = self.__default_config else: self.__using_config_file = False self.__config = self.__default_config # List of data sources that can do periodic updates self.__updatable = [] self.__processes = [] # Create cache cache_config = { 'cache.type': 'file', 'cache.data_dir': self.path('$(RUNTIME)/cache/'), 'cache.lock_dir': self.path('$(RUNTIME)/cache/') } cache_config.update( self.read_config('Cache', 'config', None, cache_config)) cache_config['cache.data_dir'] = self.path( cache_config['cache.data_dir']) cache_config['cache.lock_dir'] = self.path( cache_config['cache.lock_dir']) if not os.path.exists(cache_config['cache.data_dir']): os.makedirs(cache_config['cache.data_dir']) if not os.path.exists(cache_config['cache.lock_dir']): os.makedirs(cache_config['cache.lock_dir']) cache_parsed = parse_cache_config_options(cache_config) self.cache = CacheManager(**cache_parsed) # Create DB Session self.db = None self.session = None if db_str: self.db = create_engine(db_str) self.__Session = sessionmaker(bind=self.db) self.session = self.__Session() # Initalize all objects to None self.__metrics_status = None self._loaded_plugins = {} for plugin_name in Application.default_plugins: self[plugin_name]
def __init__(self, config_file='augur.config.json', no_config_file=0, description='Augur application'): # Command line arguments # TODO: make this useful self.arg_parser = argparse.ArgumentParser(description=description) self.arg_parser.parse_known_args() # Open the config file self.__already_exported = {} self.__default_config = {'Plugins': []} self.__using_config_file = True self.__config_bad = False self.__config_file_path = os.path.abspath( os.getenv('AUGUR_CONFIG_FILE', config_file)) self.__config_location = os.path.dirname(self.__config_file_path) self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1' if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0: try: self.__config_file = open(self.__config_file_path, 'r+') except: logger.info( 'Couldn\'t open {}, attempting to create. If you have a augur.cfg, you can convert it to a json file using "make to-json"' .format(config_file)) if not os.path.exists(self.__config_location): os.makedirs(self.__config_location) self.__config_file = open(self.__config_file_path, 'w+') self.__config_bad = True # Options to export the loaded configuration as environment variables for Docker if self.__export_env: export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE', 'augur.cfg.sh') self.__export_file = open(export_filename, 'w+') logger.info( 'Exporting {} to environment variable export statements in {}' .format(config_file, export_filename)) self.__export_file.write('#!/bin/bash\n') # Load the config file try: config_text = self.__config_file.read() config_text = config_text.replace('$(AUGUR)', self.__config_location) self.__config = json.loads(config_text) except json.decoder.JSONDecodeError as e: if not self.__config_bad: self.__using_config_file = False logger.error( '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s', self.__config_file_path, str(e)) self.__config = self.__default_config else: self.__using_config_file = False self.__config = self.__default_config # List of data sources that can do periodic updates self.__updatable = [] self.__processes = [] # Create cache cache_config = self.read_config( 'Cache', 'config', None, { 'cache.type': 'file', 'cache.data_dir': 'runtime/cache/', 'cache.lock_dir': 'runtime/cache/' }) if not os.path.exists(cache_config['cache.data_dir']): os.makedirs(cache_config['cache.data_dir']) if not os.path.exists(cache_config['cache.lock_dir']): os.makedirs(cache_config['cache.lock_dir']) cache_parsed = parse_cache_config_options(cache_config) self.cache = CacheManager(**cache_parsed) # Initalize all objects to None self.__ghtorrent = None self.__ghtorrentplus = None self.__githubapi = None self.__git = None self.__librariesio = None self.__downloads = None self.__publicwww = None self.__localCSV = None
def __init__(self, default_config_file_path='augur.config.json', no_config_file=0, config=None): """ Reads config, creates DB session, and initializes cache """ # Open the config file self.__config_file_name = 'augur.config.json' self.__already_exported = {} self.__default_config = { "Cache": { "config": { "cache.data_dir": "runtime/cache/", "cache.lock_dir": "runtime/cache/", "cache.type": "file" } }, "Database": { "connection_string": "sqlite:///:memory:", "database": "augur", "host": "localhost", "key": "key", "password": "******", "port": 5432, "schema": "augur_data", "user": "******" }, "Development": { "developer": "0", "interactive": "0" }, "Facade": { "check_updates": 1, "clone_repos": 1, "create_xlsx_summary_files": 1, "delete_marked_repos": 0, "fix_affiliations": 1, "force_analysis": 1, "force_invalidate_caches": 1, "force_updates": 1, "limited_run": 0, "multithreaded": 0, "nuke_stored_affiliations": 0, "pull_repos": 1, "rebuild_caches": 1, "run_analysis": 1 }, "Housekeeper": { "jobs": [] }, "Plugins": [], "Server": { "cache_expire": "3600", "host": "0.0.0.0", "port": "5000", "workers": "1" }, "Workers": {} } _root_augur_dir_path = os.path.dirname( os.path.dirname(os.path.realpath(__file__))) _possible_config_paths = [ self.__config_file_name, _root_augur_dir_path + "/" + self.__config_file_name, f"/opt/augur/{self.__config_file_name}" ] _config_file_path = default_config_file_path for location in _possible_config_paths: try: f = open(location, "r+") _config_file_path = os.path.abspath(location) f.close() logging.info("Using config file at " + os.path.abspath(location)) break except FileNotFoundError: pass self.__using_config_file = True self.__config_bad = False self.__config_file_path = os.path.abspath( os.getenv('AUGUR_CONFIG_FILE', _config_file_path)) self.__config_location = os.path.dirname(self.__config_file_path) self.__runtime_location = 'runtime/' self.__export_env = os.getenv('AUGUR_ENV_EXPORT', '0') == '1' self.__shell_config = None if os.getenv('AUGUR_ENV_ONLY', '0') != '1' and no_config_file == 0: try: self.__config_file = open(self.__config_file_path, 'r+') except: logger.info('Couldn\'t open {}, attempting to create.'.format( self.__config_file_name)) if not os.path.exists(self.__config_location): os.makedirs(self.__config_location) self.__config_file = open(self.__config_file_path, 'w+') self.__config_bad = True if self.__export_env: export_filename = os.getenv('AUGUR_ENV_EXPORT_FILE', 'augur.config.json.sh') self.__export_file = open(export_filename, 'w+') logger.info( 'Exporting {} to environment variable export statements in {}' .format(self.__config_file_name, export_filename)) self.__export_file.write('#!/bin/bash\n') # Load the config file try: config_text = self.__config_file.read() self.__config = json.loads(config_text) except json.decoder.JSONDecodeError as e: if not self.__config_bad: self.__using_config_file = False logger.error( '%s could not be parsed, using defaults. Fix that file, or delete it and run this again to regenerate it. Error: %s', self.__config_file_path, str(e)) self.__config = self.__default_config else: self.__using_config_file = False self.__config = self.__default_config if isinstance(config, dict): self.__config.update(config) # List of data sources that can do periodic updates self.__updatable = [] self.__processes = [] # Create cache cache_config = { 'cache.type': 'file', 'cache.data_dir': self.path('$(RUNTIME)/cache/'), 'cache.lock_dir': self.path('$(RUNTIME)/cache/') } cache_config.update( self.read_config('Cache', 'config', None, cache_config)) cache_config['cache.data_dir'] = self.path( cache_config['cache.data_dir']) cache_config['cache.lock_dir'] = self.path( cache_config['cache.lock_dir']) if not os.path.exists(cache_config['cache.data_dir']): os.makedirs(cache_config['cache.data_dir']) if not os.path.exists(cache_config['cache.lock_dir']): os.makedirs(cache_config['cache.lock_dir']) cache_parsed = parse_cache_config_options(cache_config) self.cache = CacheManager(**cache_parsed) self.metrics = MetricDefinitions(self) # # Initalize all objects to None self._loaded_plugins = {}