コード例 #1
0
ファイル: regexbot.py プロジェクト: cequencer/slackbots
	def __init__(self, config_path=None):
		if config_path is None:
			config_path = 'regexbot.ini'
		config = RawConfigParser()
		config.read_dict(DEFAULT_CONFIG)
		config.read(config_path)

		self.rtm_token = config.get('regexbot', 'rtm_token')

		self.channel_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'channel_flood_cooldown'))
		self.global_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'global_flood_cooldown'))
		self.max_messages = config.getint('regexbot', 'max_messages')
		self.max_message_size = config.getint('regexbot', 'max_message_size')

		self.version = str(config.get('regexbot', 'version')) + '; %s'
		try: self.version = self.version % Popen(["git","branch","-v","--contains"], stdout=PIPE).communicate()[0].strip()
		except: self.version = self.version % 'unknown'

		self._last_message_times = {}
		self._last_message = datetime.utcnow()
		self._message_buffer = {}

		self.ignore_list = []
		if config.has_section('ignore'):
			for k,v in config.items('ignore'):
				try:
					self.ignore_list.append(regex.compile(str(v), regex.I))
				except Exception, ex:
					print "Error compiling regular expression in ignore list (%s):" % k
					print "  %s" % v
					print ex
					exit(1)
コード例 #2
0
ファイル: test_config.py プロジェクト: mutalex/Radicale
 def _write_config(self, config_dict, name):
     parser = RawConfigParser()
     parser.read_dict(config_dict)
     config_path = os.path.join(self.colpath, name)
     with open(config_path, "w") as f:
         parser.write(f)
     return config_path
コード例 #3
0
ファイル: manager.py プロジェクト: jnphilipp/Feedindicator
 def save(self):
     """Save configuration to file."""
     parser = RawConfigParser()
     parser.optionxform = str
     parser.read_dict({'Options': self._configs})
     with open(os.path.join(app_config_dir, 'config'), 'w',
               encoding='utf-8') as f:
         parser.write(f)
コード例 #4
0
ファイル: config.py プロジェクト: vantu5z/mailnag
def read_cfg():
	cfg = RawConfigParser()
	cfg.read_dict(mailnag_defaults)

	if os.path.exists(cfg_file):
		cfg.read(cfg_file)

	return cfg
コード例 #5
0
def write_settings(data):
    """Write dict to settings config.

    :param data: dict for write
    """
    conf = RawConfigParser()
    conf.read_dict(data)
    with open(CONF_SETTINGS, 'w', encoding='utf-8') as file:
        conf.write(file)
コード例 #6
0
 def save(self):
     """Save configuration to file."""
     parser = RawConfigParser()
     parser.optionxform = str
     parser.read_dict({'Options': self._configs})
     with open(os.path.join(app_config_dir, 'config'),
               'w',
               encoding='utf-8') as f:
         parser.write(f)
コード例 #7
0
 def __init__(self, cfg_fn="config.ini"):
     ##    def __init__(self, bd, blf="md5ban.csv", con=3, upd_int=1):
     self.valid = False
     cp = RawConfigParser()
     cp.read_dict(DEFAULT_CFG)
     try:
         cp.read(cfg_fn)
     except:
         if self.con >= 1:
             print("Could not load the configuration!", file=sys.stderr)
             return
     self.con = int(DEFAULT_CFG[NAME]["verbosity"])
     try:
         self.bds = cp[NAME]["build-banlist-from"]  # banlist data source
     except KeyError:  # if this key is absent, use existing banlist file
         self.bds = None
     try:
         self.upd_int = float(
             cp[NAME]["con-update"])  # console update intvl
         self.con = int(cp[NAME]["verbosity"])  # verbosity level 0-4
         self.tmp = cp[NAME]["temp"]  # temp folder path for ucc
         self.blf = cp[NAME]["banlist"]  # banlist file name
         self.rds = cp[NAME]["data-source"]  # redirect data source
         self.ucc = cp[NAME]["ucc"]  # path to the ucc executable to use
         self.out = cp[NAME]["output-folder"]  # uz2 destination folder
         self.bs = int(
             cp[NAME]["batch-size"])  # num. files for ucc compress
     except KeyError:
         if self.con >= 1:
             print("Could not load an incomplete configuration",
                   file=sys.stderr)
             return
     except ValueError:
         if self.con >= 1:
             print("Could not parse all values in the configuration",
                   file=sys.stderr)
             return
     # cleanup for easier computing
     self.tmp = abspath(self.tmp)
     self.rds = abspath(self.rds)
     self.blf = abspath(self.blf)
     self.ucc = abspath(self.ucc)
     if self.bds and not exists(self.blf):
         self.create_banlist()
     self.check_hash = exists(self.blf)
     if not exists(self.out):
         mkdir(self.out)
     # determine which paths we want to walk through
     self.bl_wanted_paths = [join(self.bds, p) for p in BASE_EXT.keys()]
     self.rd_wanted_paths = [join(self.rds, p) for p in BASE_EXT.keys()]
     self.valid = True
コード例 #8
0
ファイル: test_server.py プロジェクト: hackjackyer/Radicale
 def test_wsgi_server(self):
     config_path = os.path.join(self.colpath, "config")
     parser = RawConfigParser()
     parser.read_dict(configuration_to_dict(self.configuration))
     with open(config_path, "w") as f:
         parser.write(f)
     env = os.environ.copy()
     env["PYTHONPATH"] = os.pathsep.join(sys.path)
     env["RADICALE_CONFIG"] = config_path
     p = subprocess.Popen([
         sys.executable, "-m", "waitress", "--listen",
         self.configuration.get_raw("server", "hosts"),
         "radicale:application"
     ],
                          env=env)
     try:
         self.get("/", is_alive_fn=lambda: p.poll() is None, check=302)
     finally:
         p.terminate()
         p.wait()
コード例 #9
0
 def test_wsgi_server(self):
     config_path = os.path.join(self.colpath, "config")
     parser = RawConfigParser()
     parser.read_dict(configuration_to_dict(self.configuration))
     with open(config_path, "w") as f:
         parser.write(f)
     env = os.environ.copy()
     env["PYTHONPATH"] = os.pathsep.join(sys.path)
     p = subprocess.Popen([
         sys.executable,
         "-c", "from gunicorn.app.wsgiapp import run; run()",
         "--bind", self.configuration.get_raw("server", "hosts"),
         "--env", "RADICALE_CONFIG=%s" % config_path, "radicale"], env=env)
     try:
         status, _, _ = self.request(
             "GET", "/", is_alive_fn=lambda: p.poll() is None)
         assert status == 302
     finally:
         p.terminate()
         p.wait()
     assert p.returncode == 0
コード例 #10
0
ファイル: regexbot.py プロジェクト: cequencer/slackbots
    def __init__(self, config_path=None):
        if config_path is None:
            config_path = 'regexbot.ini'
        config = RawConfigParser()
        config.read_dict(DEFAULT_CONFIG)
        config.read(config_path)

        self.rtm_token = config.get('regexbot', 'rtm_token')

        self.channel_flood_cooldown = timedelta(
            seconds=config.getint('regexbot', 'channel_flood_cooldown'))
        self.global_flood_cooldown = timedelta(
            seconds=config.getint('regexbot', 'global_flood_cooldown'))
        self.max_messages = config.getint('regexbot', 'max_messages')
        self.max_message_size = config.getint('regexbot', 'max_message_size')

        self.version = str(config.get('regexbot', 'version')) + '; %s'
        try:
            self.version = self.version % Popen(
                ["git", "branch", "-v", "--contains"],
                stdout=PIPE).communicate()[0].strip()
        except:
            self.version = self.version % 'unknown'

        self._last_message_times = {}
        self._last_message = datetime.utcnow()
        self._message_buffer = {}

        self.ignore_list = []
        if config.has_section('ignore'):
            for k, v in config.items('ignore'):
                try:
                    self.ignore_list.append(regex.compile(str(v), regex.I))
                except Exception, ex:
                    print "Error compiling regular expression in ignore list (%s):" % k
                    print "  %s" % v
                    print ex
                    exit(1)
コード例 #11
0
ファイル: setup.py プロジェクト: luizperes/mal
def config():
    """Create a RawConfigParser and if exists read it before return

    :returns: the current config or a new one
    :rtype: configparser.RawConfigParser
    """
    parser = RawConfigParser()
    if path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, 'r') as f:
            parser.read_file(f)

    if CONFIG_SECTION not in parser:
        parser.read_dict(DEFAULT_CONFIG)
        # ensure that directory app_dir exists or creates otherwise
        os.makedirs(APP_DIR, exist_ok=True)
        with open(CONFIG_PATH, 'w') as f:
            parser.write(f)
    elif 'animation' not in parser[CONFIG_SECTION]:
        parser.set(CONFIG_SECTION, 'animation', True)
        with open(CONFIG_PATH, 'w') as f:
            parser.write(f)

    return parser
コード例 #12
0
def create_config(file_path):
    # create config parser instance
    configparser = RawConfigParser(allow_no_value=True)
    # create required sections
    configparser.add_section('global')
    configparser.add_section('basecon')
    configparser.add_section('seco')
    configparser.add_section('redis')
    configparser.add_section('memcached')
    configparser.add_section('kvs')
    configparser.add_section('kvs:init')
    configparser.add_section('decibel')
    configparser.add_section('sqlite:init')
    configparser.add_section('sqlite:stmt')
    configparser.add_section('mysql')
    configparser.add_section('mysql:init')
    configparser.add_section('mysql:stmt')
    # add comments to sections
    configparser.set('global', '; global settings')
    configparser.set('basecon', '; choose base from (2 <= base <= 65)')
    configparser.set('seco',
                     "; choose `serialize` in ('json', 'msgpack', 'pickle')")
    configparser.set('seco', "; chose `compress` in ('zlib', 'bz2')")
    configparser.set(
        'kvs', "; `engine` in (':memory:', 'redis', 'memcached', 'dbm')")
    configparser.set('kvs',
                     "; `path` is only used when `engine` is set to 'dbm'")
    configparser.set('kvs:init',
                     '; `key = value` format, value can be valid JSON string')
    configparser.set('decibel',
                     "; choose decibel `engine` in ('sqlite', 'mysql')")
    configparser.set('decibel',
                     "; `path` is only used when `engine` is set to 'sqlite'")
    configparser.set('sqlite:init',
                     '; `stmt_id = stmt` format, initialize statements')
    configparser.set('sqlite:stmt',
                     '; `stmt_id = stmt` format, regular statements')
    configparser.set('mysql:init',
                     '; `stmt_id = stmt` format, initialize statements')
    configparser.set('mysql:stmt',
                     '; `stmt_id = stmt` format, regular statements')
    # read in default configs
    configparser.read_dict({
        'global': {},
        'basecon': {
            'base': 62
        },
        'seco': {
            'serialize': 'msgpack',
            'compress': 'zlib'
        },
        'redis': {
            ';unix_socket_path': '',
            'host': 'localhost',
            'port': '6379',
            'password': '',
            'db': '0',
        },
        'memcached': {
            'host': 'localhost',
            'port': '11211'
        },
        'kvs': {
            'initialize': 'false',
            'engine': ':memory:',
            'path': './database.kvs'
        },
        'kvs:init': {},
        'decibel': {
            'initialize': 'false',
            'engine': 'sqlite',
            'path': './database.sqlite'
        },
        'sqlite:init': {},
        'sqlite:stmt': {},
        'mysql': {
            'host': 'localhost',
            'port': '3306',
            'user': '',
            'password': '',
            'database': ''
        },
        'mysql:init': {},
        'mysql:stmt': {}
    })
    # write default configs to file
    with open(file_path, 'w', encoding='UTF8') as fp:
        configparser.write(fp)
コード例 #13
0
class Utilize(object):
  """ Utilize class for common resources """

  __slots__ = (
    '_config',
    '_basecon', '_seco', '_kvs', '_decibel',
    '_redis', '_memcached', '_message_broker'
  )

  def __init__(self, config = './config.ini'):
    """
    Resource class constructor
    :param config: mixed, the config resource
    """
    # import ini file config parser
    from configparser import RawConfigParser
    # read config from config resource
    self._config = RawConfigParser()
    self.update(config)
    # initialize attributes
    self._basecon = None
    self._seco = None
    self._kvs = None
    self._decibel = None
    self._redis = None
    self._memcached = None
    self._message_broker = None

  def update(self, config, string = False):
    """
    Update config with file, string or dict
    :param config: mixed, config resource
    :param string: bool, whether read string
    :return: None
    """
    # type check and decide how to update
    if isinstance(config, (str, bytes, bytearray)):
      config = config if isinstance(config, str) \
        else config.decode(encoding = 'UTF8')
      self._config.read(config) if not string \
        else self._config.read_string(config)
    # update content from a dict
    elif isinstance(config, dict):
      self._config.read_dict(config)

  @staticmethod
  def _boolean(keyword):
    """
    Boolean-ize a string
    :param keyword: str|bytes|bytearray, keyword
    :return: bool
    """
    # convert keyword into string
    keyword = keyword.decode(encoding = 'UTF8') \
      if isinstance(keyword, (bytes, bytearray)) \
      else keyword
    return keyword.lower() in (
      '1', 't', 'y', 'true', 'yes',
      'on', 'ok', 'okay', 'confirm'
    )

  @property
  def config(self):
    """
    Acquire the ConfigParse instance
    :return: configparse, the ConfigParse instance
    """
    return self._config

  @property
  def basecon(self):
    """
    Acquire a singleton base_convert instance
    :return: base_convert, a base_convert instance
    """
    # return if exists
    if self._basecon is not None: return self._basecon
    # else instantiate and return
    from basecon import BaseCon
    base = int(self._config['basecon']['base']) \
      if 'basecon' in self._config else 62
    self._basecon = BaseCon(base = base)
    return self._basecon

  @property
  def seco(self):
    """
    Acquire a singleton SeCo instance
    :return: seco, a SeCo instance
    """
    # return if exists
    if self._seco is not None: return self._seco
    # else instantiate and return
    from seco import SeCo
    if 'seco' in self._config:
      self._seco = SeCo(**self._config['seco'])
    else:
      self._seco = SeCo()
    return self._seco

  @property
  def kvs(self):
    """
    Acquire a singleton k-v store instance

    DO NOT INVOKE BEFORE MULTI-PROC FORKING

    :return: kvs, a KVS instance
    """
    # return only one instance of database instance
    if self._kvs is not None: return self._kvs
    # else instantiate and return
    from kvs import KVS
    # acquire config and serialize instance
    config = self._config
    seco = self.seco
    # attempts to get the kvs configs
    kvs_config = config['kvs'] \
      if 'kvs' in config else {}
    kvs_init = 'initialize' in kvs_config \
      and self._boolean(kvs_config['initialize'])
    kvs_engine = kvs_config['engine'].lower() \
      if 'engine' in kvs_config else ':memory:'
    kvs_path = kvs_config['path'] \
      if 'path' in kvs_config else './database.kvs'
    # instantiate kvs according to configs
    if kvs_engine == ':memory:':
      engine = KVS(serialize = seco)
    elif kvs_engine in ('dbm', 'gdbm', 'ndbm'):
      engine = KVS(kvs_path, seco)
    elif kvs_engine == 'redis':
      engine = KVS(self.redis, seco)
    elif kvs_engine == 'memcached':
      engine = KVS(self.memcached, seco)
    else:
      raise NotImplementedError(
        'Other databases not supported yet.'
      )
    # initialize kv-store
    if kvs_init and 'kvs:init' in config:
      # import json for decoding
      import json
      for key, value in config['kvs:init'].items():
        # try to decode value as json
        try: engine.set(key, json.loads(value))
        except (json.JSONDecodeError, ValueError):
          engine.set(key, value)
    # preserve and return kv-store instance
    self._kvs = engine
    return self._kvs

  @property
  def decibel(self):
    """
    Acquire a singleton decibel instance

    DO NOT INVOKE BEFORE MULTI-PROC FORKING

    :return: decibel, a Decibel instance
    """
    # return only one instance of database instance
    if self._decibel is not None: return self._decibel
    from decibel import Decibel
    # set config shorthand
    config = self._config
    # attempts to acquire decibel configs
    db_config = config['decibel'] \
      if 'decibel' in config else {}
    db_init = 'initialize' in db_config \
      and self._boolean(db_config['initialize'])
    db_engine = db_config['engine'].lower() \
      if 'engine' in db_config else 'sqlite'
    db_path = db_config['path'] \
      if 'path' in db_config else './database.sqlite'
    # initialize database instance, acquire statements
    if db_engine in ('sqlite', 'sqlite3'):
      import sqlite3
      engine = sqlite3.connect(db_path)
      init = tuple(config['sqlite:init'].values()) \
        if db_init and 'sqlite:init' in config else ()
      stmt = dict(config['sqlite:stmt']) \
        if 'sqlite:stmt' in config else {}
    elif db_engine == 'mysql':
      from mysql.connector import connect
      engine = connect(**(
        config['mysql'] if 'mysql' in config else {}
      ))
      init = tuple(config['mysql:init'].values()) \
        if db_init and 'mysql:init' in config else ()
      stmt = dict(config['mysql:stmt']) \
        if 'mysql:stmt' in config else {}
    else:
      raise NotImplementedError(
        'Other databases not supported yet.'
      )
    # initialize sql database
    if db_init and init:
      cursor = engine.cursor()
      for init_stmt in init:
        cursor.execute(init_stmt)
      else:
        cursor.close()
        engine.commit()
    # preserve and return decibel instance
    self._decibel = Decibel(engine, stmt)
    return self._decibel

  @property
  def redis(self):
    """
    Acquire a singleton redis instance

    DO NOT INVOKE BEFORE MULTI-PROC FORKING

    :return: redis, a Redis instance
    """
    # return if exists
    if self._redis is not None: return self._redis
    # else instantiate and return
    from redis import Redis
    # acquire config
    config = self._config['redis'] \
      if 'redis' in self._config else {}
    # preserve and return
    self._redis = Redis(**config)
    return self._redis

  @property
  def memcached(self):
    """
    Acquire a singleton memcached instance

    DO NOT INVOKE BEFORE MULTI-PROC FORKING

    :return: memcached, the instance
    """
    # return if exists
    if self._memcached is not None:
      return self._memcached
    # else instantiate and return
    from pymemcache.client.base \
      import Client as Memcached
    # acquire config
    host, port = 'localhost', 11211
    if 'memcached' in self._config:
      config = self._config['memcached']
      host = config['path'] \
        if 'path' in config else 'localhost'
      port = int(config['port']) \
        if 'port' in config else 11211
    # preserve and return
    self._memcached = Memcached((host, port))
    return self._memcached

  @property
  def message_broker(self):
    """
    Acquire a singleton message broker instance
    :return: message_broker, a message broker
    """
    # return only one instance of broker
    if self._message_broker is not None:
      return self._message_broker
    # else instantiate and return
    from msgr import MessageQueue, MessageBroker
    # initialize and return message broker instance
    self._message_broker = \
      MessageBroker(
        job = MessageQueue(), res = MessageQueue(),
        rej = MessageQueue(), ser = MessageQueue()
      )
    return self._message_broker
コード例 #14
0
ファイル: regexbot.py プロジェクト: cequencer/ircbots
	'ipv6': 'no',
	'nick': 'regexbot',
	'channels': '#test',
	'channel_flood_cooldown': 5,
	'global_flood_cooldown': 1,
	'max_messages': 25,
	'max_message_size': 200,
	'version': 'regexbot; https://github.com/micolous/ircbots/',
	'translate_enabled': "off",
	'reconnect_to_server': "off",
	"force_ending_slash": "on"
	}
}

config = RawConfigParser()
config.read_dict(DEFAULT_CONFIG)
try:
	config.readfp(open(argv[1]))
except:
	try:
		config.readfp(open('regexbot.ini'))
	except Exception:
		print "Syntax:"
		print "  %s [config]" % argv[0]
		print ""
		print "If no configuration file is specified or there was an error, it will default to `regexbot.ini'."
		print "If there was a failure reading the configuration, it will display this message."
		exit(1)

# read config
SERVER = config.get('regexbot', 'server')
コード例 #15
0
ファイル: regexbot.py プロジェクト: cequencer/ircbots
        'ipv6': 'no',
        'nick': 'regexbot',
        'channels': '#test',
        'channel_flood_cooldown': 5,
        'global_flood_cooldown': 1,
        'max_messages': 25,
        'max_message_size': 200,
        'version': 'regexbot; https://github.com/micolous/ircbots/',
        'translate_enabled': "off",
        'reconnect_to_server': "off",
        "force_ending_slash": "on"
    }
}

config = RawConfigParser()
config.read_dict(DEFAULT_CONFIG)
try:
    config.readfp(open(argv[1]))
except:
    try:
        config.readfp(open('regexbot.ini'))
    except Exception:
        print "Syntax:"
        print "  %s [config]" % argv[0]
        print ""
        print "If no configuration file is specified or there was an error, it will default to `regexbot.ini'."
        print "If there was a failure reading the configuration, it will display this message."
        exit(1)

# read config
SERVER = config.get('regexbot', 'server')
コード例 #16
0
        'color': 'grey'
    },
    'selection': {
        'color': 'yellow',
        'color_over_cell': 'red'
    },
    'pattern_directories': {
        'startup': ''
    }
}

config_file = 'settings.cfg'

if __name__ == '__main__':
    settings = RawConfigParser()
    settings.read_dict(DEFAULTS)

    # read settings
    try:
        with open(config_file, 'r') as f:
            settings.read_file(f)
    except (FileNotFoundError, IOError):
        pass

    # write to settings
    try:
        with open(config_file, 'w') as f:
            settings.write(f)
    except IOError:
        pass
コード例 #17
0
def main():

    try:
        parser = argparse.ArgumentParser(
            prog="startproject",
            usage="startproject.py -p myproject -t dbtype")

        parser.add_argument("-p",
                            dest="project",
                            nargs="?",
                            type=str,
                            help="<string> name of your project")
        parser.add_argument(
            "-t",
            dest="dbtype",
            nargs="?",
            default="sql",
            choices=['sql', 'mongodb', 'atlas'],
            help=
            "<string> store data type: sql or mongodb or to the atlas cloud")

        args = parser.parse_args()
        project = args.project
        dbtype = args.dbtype

    except argparse.ArgumentError as argerror:
        print(argerror)
        sys.exit(1)

    print("""
    Starting a new agrimetscraper project
    """)

    main_path = os.getcwd()
    project_path = os.path.join(main_path, project)

    if not os.path.exists(project_path):
        os.makedirs(project_path)
    else:
        raise FileExistsError(f"{project} existed")

    dbdir = os.path.join(project_path, f"{project}-database")
    logdir = os.path.join(project_path, f"{project}-log")
    configdir = os.path.join(project_path, f"{project}-config")
    stationdir = os.path.join(project_path, f"{project}-stations")

    if not os.path.exists(dbdir):
        os.makedirs(dbdir)
    if not os.path.exists(logdir):
        os.makedirs(logdir)
    if not os.path.exists(configdir):
        os.makedirs(configdir)
    if not os.path.exists(stationdir):
        os.makedirs(stationdir)

    # initialize file names in each directories
    dbname = project + '.db'
    dbfilepath = os.path.join(dbdir, dbname)

    logfilename = project + ".log"
    logfilepath = os.path.join(logdir, logfilename)

    configfilename = project + ".ini"
    configfilepath = os.path.join(configdir, configfilename)

    stationfilename = "stations.csv"
    stationfilepath = os.path.join(stationdir, stationfilename)

    global_settings = basic_configs

    # add new settings to config file
    global_settings['PROJECT_SETTINGS']['project_name'] = project
    global_settings['PROJECT_SETTINGS']['project_path'] = project_path
    global_settings['PROJECT_SETTINGS'][
        'project_setting_path'] = configfilepath
    global_settings['DB_SETTINGS']['database_path'] = dbfilepath
    global_settings['DB_SETTINGS']['database_type'] = dbtype
    global_settings['DB_SETTINGS']['database_name'] = (dbname)
    global_settings['LOG_SETTINGS']['logfile_path'] = logfilepath
    global_settings['LOG_SETTINGS']['logfile_name'] = logfilename
    global_settings['LOG_SETTINGS'][
        'logfile_format'] = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
    global_settings['LOG_SETTINGS'][
        'logfile_datetimefmt'] = '%Y-%m-%d %H:%M:%S'
    global_settings['STATION_SETTINGS']['station_dir'] = stationfilepath

    config = RawConfigParser()
    config.read_dict(global_settings)

    print(f"\ninitializing config file: {configfilename}")
    with open(configfilepath, 'w') as config_handle:
        config.write(config_handle)

    # create log file
    print(f"making an empty log file: {logfilename}")
    with open(logfilepath, 'a') as log_handle:
        pass

    # create stations.csv
    print("retrieving stations information as csv")
    config = Configtuner(configfilepath)
    url = config.getconfig('STATION_SETTINGS', 'station_url')
    station = Stationinfo(url, stationfilepath)
    station_df = station.querysites()

    config.setconfig("DB_SETTINGS", "database_tables", "StationInfo")

    logger = Setlog(configfilepath, "startproject")

    connect_string = config.getconfig("DB_SETTINGS", "connect_string")

    if dbtype == 'sql':
        # create db file
        print(f"making an database: {dbname}")
        logger.info(f"making an SQL database: {dbname}")
        conn = sqlite3.connect(dbfilepath)
        station_df.save2sql("StationInfo", conn)

        conn.commit()
        conn.close()

    elif dbtype == 'mongodb':

        if connect_string != "localhost":
            logger.exception(
                "host selected not match database type. Choose mongodb for local storage in your ini file"
            )
            raise ValueError(
                "dbtype is not matching to the host type. Choose mongodb for local storage in your ini file"
            )

        print(f"making an database: {dbname}")
        logger.info(f"making a mongo database: {dbname}")
        # create collection from panda
        df = station_df.df_filtered
        data = df.to_dict(orient='records')
        mongo_conn = Mongosetup(dbdir, logger)
        mongo_conn.start_mongodb()
        db, _ = get_db(project, connect_string)
        db = db['StationInfo']  # collection
        db.insert_many(
            data
        )  # no need to consider update, once the project is setup, this collection will stand alone

    elif dbtype == "atlas":
        print(f"connecting to Mongo Atlas: database name: {dbname}")
        logger.info(f"connecting to Mongo Atlas: database name: {dbname}")

        connect_string = input("\nInput your connect string to atlas: ")
        password = getpass.getpass("\nPassword: "******"mongodb+srv://"):
            logger.exception(
                "host selected not match database type. Choose atlas for cloud storage in your ini file"
            )
            raise ValueError(
                "dbtype is not matching to the host type. Choose atlas for cloud storage in your ini file"
            )

        config.setconfig("DB_SETTINGS", "connect_string", connect_string)

        # create collection from panda
        df = station_df.df_filtered
        data = df.to_dict(orient='records')
        db, _ = get_db(project, connect_string)
        db = db['StationInfo']
        db.insert_many(
            data
        )  # no need to consider update, once the project is setup, this collection will stand alone

    logger.info(f"{project} finished initialization.")

    # copy files to local project location
    runprojectpath = os.path.realpath(runproject.__file__)
    pipelinepath = os.path.realpath(pipeline.__file__)
    shutil.copy2(runprojectpath, project_path)
    shutil.copy2(pipelinepath, project_path)
    print(
        f"\n{project} finished initialization.\nYou can modify your local '.ini' file in the config folder to schedule scrape time and then run RunProject!\n"
    )
コード例 #18
0
ファイル: flattenini.py プロジェクト: ebouaziz/miscripts
def dict2ini(inifile, sections):
    cp = RawConfigParser(strict=True)
    cp.read_dict(sections)
    with open(inifile, 'wt') as inifp:
        cp.write(inifp)