Пример #1
0
    def __init__(self, email=None, api_key=None, config_file=None,
                 verbose=False, retry_on_errors=True,
                 site=None, client="API: Python"):
        if None in (api_key, email):
            if config_file is None:
                config_file = os.path.join(os.environ["HOME"], ".humbugrc")
            if not os.path.exists(config_file):
                raise RuntimeError("api_key or email not specified and %s does not exist"
                                   % (config_file,))
            config = SafeConfigParser()
            with file(config_file, 'r') as f:
                config.readfp(f, config_file)
            if api_key is None:
                api_key = config.get("api", "key")
            if email is None:
                email = config.get("api", "email")
            if site is None and config.has_option("api", "site"):
                site = config.get("api", "site")

        self.api_key = api_key
        self.email = email
        self.verbose = verbose
        if site is not None:
            self.base_url = site
        else:
            self.base_url = "https://humbughq.com"
        self.retry_on_errors = retry_on_errors
        self.client_name = client
	def main(self):

		parser = SafeConfigParser()
		
		try:
			parser.read(self.base_folder + '/' + self.CONFIG_FILENAME)
			self.server = parser.get(self.CONFIG_SECTION, 'server')
			self.port = parser.getint(self.CONFIG_SECTION, 'port')
			self.user = parser.get(self.CONFIG_SECTION, 'user')
			self.password = parser.get(self.CONFIG_SECTION, 'password')
			self.timeout = parser.getint(self.CONFIG_SECTION, 'timeout')
			rpc_interval = parser.getint(self.CONFIG_SECTION, 'rpc_interval')
			
		except Exception as e :
			print( e );
			self.displayErrorAndExit( "Could not read configuration '"+self.CONFIG_FILENAME+"'\nError is: "+e.message)

		if parser.has_option(self.CONFIG_SECTION, "rpc_logger_level"):
			transmissionrpc.utils.add_stdout_logger(parser.get(self.CONFIG_SECTION, 'rpc_logger_level'));

		self.check_alt_speed_enabled()

		# The callback function is called repeatedly until it returns False,
		# at which point the timeout is automatically destroyed and the function will not be called again
		# http://www.pygtk.org/pygtk2reference/gobject-functions.html#function-gobject--timeout-add
		gtk.timeout_add( rpc_interval * 1000, self.check_alt_speed_enabled)

		gtk.main()
Пример #3
0
  def __init__(S):
    S.displaylock = Lock()
    # S.auto_update_trade = True
    S.auto_update_depth = True

    # parse configfile
    parser = SafeConfigParser()
    parser.read('traidor.conf')

    S.exchange = None
    S.exchanges = []
    exchange = MtGox(S, parser)
    S.addExchange(exchange)
    
    S.donated = parser.getboolean('main', 'donated')
    S.debug = parser.getboolean('main', 'debug')
    S.continue_on_exception = parser.getboolean('main', 'continue_on_exception')
    S.auto_update_depth = parser.getboolean('main', 'auto_update_depth')
    S.display_height = int(parser.get('main','initial_depth_display_height'))
    S.autoexec = parser.get('main', 'autoexec')
    #lines = os.environ['LINES']
    #print "lines: ", lines
    S.bots = list()

    # start command mainloop
    t = Thread(target = S)
    t.start()
Пример #4
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.parsed_url = parsed_url
        #URL string: atmos://host/path/ 
        self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url)
        self.url_path   = '/' + '/'.join(self.url_string.split('/')[3:])
        host            = self.url_string.split('/')[2].split(':')[0]
        #Hacks
        try:
            port        = self.url_string.split('/')[2].split(':')[1]
        except Exception:
            port=443
            pass
        parser = SafeConfigParser()
        parser.read('/etc/duplicity/atmos.ini')
        uid=parser.get(host, 'uid')
        secret=parser.get(host, 'secret')
        log.Debug("Parsed URL:" + self.url_string)

        #Init Atmos connection
        self.api = EsuRestApi( host, int(port), uid, secret )

        # Use an explicit directory name.
        if self.url_string[-1] != '/':
            self.url_string += '/'
Пример #5
0
    def __init__(self):
        """parameters from parameters.ini"""
        parser = SafeConfigParser()
        parser.read(configfile)
        self.mog2History = parser.getint('PeopleCounting', 'mog2History')
        self.mog2VarThrsh = parser.getint('PeopleCounting', 'mog2VarThrsh')
        self.mog2Shadow = parser.getboolean('PeopleCounting', 'mog2Shadow')
        self.mog2LearningRate = parser.getfloat('PeopleCounting', 'mog2LearningRate')
        self.kernelSize = parser.getint('PeopleCounting', 'kernelSize')
        self.scale = parser.getfloat('PeopleCounting', 'scale')
        self.areaThreshold = math.pi * parser.getfloat('PeopleCounting', 'areaRadius')**2
        self.peopleBlobSize = parser.getint('PeopleCounting', 'peopleBlobSize')
        self.distThreshold = parser.getint('PeopleCounting', 'distThreshold')
        self.countingRegion = map(int, parser.get('PeopleCounting', 'countingRegion').split(','))
        self.upperTrackingRegion = map(int, parser.get('PeopleCounting', 'upperTrackingRegion').split(','))
        self.lowerTrackingRegion = map(int, parser.get('PeopleCounting', 'lowerTrackingRegion').split(','))
        self.inactiveThreshold = parser.getint('PeopleCounting', 'inactiveThreshold')
        # self.singlePersonBlobSize = parser.getint('PeopleCounting', 'singlePersonBlobSize')
        self.Debug = parser.getboolean('PeopleCounting', 'Debug')
        self.Visualize = parser.getboolean('PeopleCounting', 'Visualize') or self.Debug
        self.useRatioCriteria = parser.getboolean('PeopleCounting', 'useRatioCriteria')
        self.RTSPurl = parser.get('PeopleCounting','RTSPurl')
        self.RTSPframerate = parser.getint('PeopleCounting','RTSPframerate')

        """ASSUMPTION: ppl entering door walk downards(direction = 1) in the video"""
        self.store_id = parser.getint('store', 'store_id')
        self.camera_id = parser.getint('store', 'camera_id')
        self.ipc_username = parser.get('store', 'ipc_username')
        self.ipc_password = parser.get('store', 'ipc_password') 
        self.wl_dev_cam_id = parser.get('store', 'wl_dev_cam_id')
Пример #6
0
def get_config():
    global weather_URL1
    global weather_URL2
    global userid
    global passwd
    
    if not os.path.isfile(config_file):
        print u'====> config file not found.'
        quit()
    
    parser = SafeConfigParser()
    parser.read(config_file)
    
    userid = parser.get('account', 'user')
    assert isinstance(userid, str)
    
    passwd = parser.get('account', 'pass')
    assert isinstance(passwd, str)
    
    w1 = parser.get('weatherurls', 'weather1')
    if w1:
        weather_URL1 = w1
        assert isinstance(weather_URL1, str)
    else:
        assert isinstance(weather_URL1, str)
    
    w2 = parser.get('weatherurls', 'weather2')
    if w2:
        weather_URL2 = w2
        assert isinstance(weather_URL2, str)
    else:
        assert isinstance(weather_URL2, str)
Пример #7
0
    def get(self, section, option):

        if not self.has_section(section) and section in self._section_defaults:
            self.add_section(section)

        if not self.has_option(section, option) and option in self._section_defaults[section]:
            self.set(section, option, str(self._section_defaults[section][option]))

        if section in self._section_defaults and option in self._section_defaults[section]:

            if isinstance(self._section_defaults[section][option], str):
                return SafeConfigParser.get(self, section, option)

            elif isinstance(self._section_defaults[section][option], bool):
                value = SafeConfigParser.get(self, section, option).lower()
                if value == "true" or value == "on" or value == "1":
                    return True
                else:
                    return False

            elif isinstance(self._section_defaults[section][option], int):
                return int(SafeConfigParser.get(self, section, option))

            # elif isinstance(self._section_defaults[section][option], float):
            #    return self.getfloat(section, option)

            elif isinstance(self._section_defaults[section][option], list):
                delimiter = ','
                return SafeConfigParser.get(self, section, option).split(delimiter)

            else:
                raise TypeError("Option type %s not supported" % type(self._section_defaults[section][option]))

        return SafeConfigParser.get(self, section, option)
Пример #8
0
def process_config(configfile_path):
    global api_token
    global api_url

    parser = SafeConfigParser()
    parser.read(configfile_path)

    error_count = 0

    potential_api_url = parser.get('api_settings', 'api_url')
    if potential_api_url == '':
        LOG.error('Configuration file needs valid api_url')
        error_count += 1
    else:
        api_url = potential_api_url

    potential_api_token = parser.get('api_settings', 'api_token')
    if potential_api_token == "":
        LOG.error('Configuration file needs valid api_token')
        error_count += 1
    else:
        api_token = potential_api_token

    if error_count > 0:
        sys.exit()
Пример #9
0
def configure(config_file, env):
    config = {}
    conf = SafeConfigParser()
    passwd = None
    if conf.read(config_file):
        config['username'] = conf.get(env, 'username')
        config['dreadnot'] = conf.get(env, 'dreadnot')
        config['region'] = conf.get(env, 'region')
    else:
        config['dreadnot'] = raw_input('Dreadnot URL:')
        config['region'] = raw_input('Deployment region:')
        config['username'] = raw_input('Dreadnot username:'******'dreadnot', 'region', 'username'):
            conf.set(env, name, config[name])
        with open(config_file, 'w') as f:
            conf.write(f)
    if keyring:
        passwd = keyring.get_password('dreadnot', config['dreadnot'])
    if not passwd:
        passwd = getpass.getpass('Dreadnot password:'******'dreadnot', config['dreadnot'], passwd)
    config['password'] = passwd
    return config
Пример #10
0
 def __init__(self, *args, **kwargs):
         luigi.Task.__init__(self, *args, **kwargs)
         parser = SafeConfigParser()
         parser.read(self.conf)
         root = parser.get("basic", "root")
         self.user_field = parser.get("basic", "user_field")	
         self.user_plda = '%s/data/user/user.plda' % root
Пример #11
0
class ConfManager():
    """Common base class for all configuration manager"""

    def __init__(self, platform):
        """Initialize parser object"""
        print('Create ConfManager platform: ' + platform)
        self.__platform = platform
        self.parser = SafeConfigParser()
        self.parser.read('conf/' + self.__platform + '/login.conf')

    def read(self):
        """Read configuration file"""
        self.read_options()
        #self.read_monitor_data()

    def read_login_data(self):
        """Read login data from login.txt file"""
        pass

    def read_monitor_data(self):
        """Read monitor configuration data from login.txt file"""
        self.monitor_port = self.parser.get('monitor', 'port')
        self.monitor_host = self.parser.get('monitor', 'host')
        self.monitor_enabled = self.parser.get('monitor', 'enabled')

    def read_options(self):
        """Read options values from [option] section"""
        self.validate_certs = self.parser.get('options', 'validate_certs')
        self.terminal = self.parser.get('options', 'terminal')
def parmRead(fname):
   """Read in the main config file, return needed parameters

   Parameters
   ----------
   fname: str
      name of parameter file to read in

   Returns
   -------
   Parms
      values as pulled from the file

   """
   parser = SafeConfigParser()
   parser.read(fname)

   forcing_config_label = "LongRangeRegridDriver"
   WhfLog.init(parser, forcing_config_label, 'Long', 'Regrid', 'CFS')
    
   cfsDir = parser.get('data_dir', 'CFS_data')
   cfsNumEnsemble = int(parser.get('data_dir', 'CFS_num_ensemble'))
   maxFcstHourCfs = int(parser.get('fcsthr_max', 'CFS_fcsthr_max'))
   hoursBackCfs = int(parser.get('triggering', 'CFS_hours_back'))
   stateFile = parser.get('triggering', 'long_range_regrid_state_file')
    
   parms = Parms(cfsDir, cfsNumEnsemble, maxFcstHourCfs, hoursBackCfs,
                 stateFile)
   return parms
def main():
    # Extract any command line options
    parser = OptionParser(usage="usage: %prog [options] <working directory>")
    parser.add_option("-c", "--config", dest='configFile', default=None, help='Path to configuration file')
    (options, args) = parser.parse_args()

    if len(args) != 1:
        parser.print_help()
        exit(1)
    workingDir = args[0]

    # Load the configuration from the file
    config = SafeConfigParser()
    fileList = ['./fundstatgen.cfg']
    if options.configFile is not None:
        fileList.append(options.configFile)
    config.read(fileList)

    # === BEGIN PROCESSING ===
    hostname = config.get('MySQL', 'hostname')
    port = config.getint('MySQL', 'port')
    username = config.get('MySQL', 'username')
    password = config.get('MySQL', 'password')
    database = config.get('MySQL', 'schema')

    stats = getData(hostname, port, username, password, database)

    createSingleOutFile(stats, ['date', 'utm_medium'], workingDir + '/donationdata-medium-breakdown.csv')
Пример #14
0
def setup():
    #default preferences. overridden by environment
    mpo_version    = os.getenv('MPO_VERSION','v0')
    mpo_api_url    = os.getenv('MPO_HOST', 'https://localhost:8443') #API_URL
    mpo_cert       = os.getenv('MPO_AUTH', '../../MPO Demo User.pem')
    archive_host   = os.getenv('MPO_ARCHIVE_HOST', 'psfcstor1.psfc.mit.edu')
    archive_user   = os.getenv('MPO_ARCHIVE_USER', 'psfcmpo')
    archive_key    = os.getenv('MPO_ARCHIVE_KEY', '~/.mpo/rsync_id_rsa')
    archive_prefix = os.getenv('MPO_ARCHIVE_PREFIX', 'mpo-persistent-store/')

    # Load user prefs here, these override any environmental settings or the above defaults.
    userconfdir = os.getenv('HOME')+'/.mpo/'
    userconf = userconfdir+'mpo.conf'
    if os.path.isfile(userconf):
        from ConfigParser import SafeConfigParser
        parser = SafeConfigParser()
        parser.read(userconfdir+'/mpo.conf')
        mpo_api_url = parser.get('api','MPO_HOST')
        mpo_cert = userconfdir+parser.get('api','MPO_AUTH')

    ### Establish a session to mpo
    print('mpo_setup env',mpo_cert,mpo_api_url)
    from mpo_arg import mpo_methods as mpo
    m=mpo(api_url=mpo_api_url,cert=mpo_cert,debug=True,filter='json')
    return m
Пример #15
0
    def write_to_db(self, db, min_id=1):

        def read_triples_sql(min_id=1, table_name='svo'):
            return u"""
                SELECT id, subject_head, subject_nn_head, object_head, object_nn_head
                FROM {}
                WHERE id >= {}
            """.format(table_name, str(min_id))

        def update_el_sql(id, el_column, el_results, table_name='svo'):
            return u"""
                UPDATE {} SET {}="{}" WHERE id={}
            """.format(table_name, el_column, el_results, str(id))

        parser = SafeConfigParser()
        parser.read('config/mysql_config.ini')
        mysql_config = {
            'host': parser.get('MySQL', 'host'),
            'user': parser.get('MySQL', 'user'),
            'passwd': parser.get('MySQL', 'passwd'),
            'db': db
        }
        conn = MySQLdb.connect(**mysql_config)
        cur = conn.cursor()

        # Read triples to construct el queries
        triples = None
        try:
            cur.execute(read_triples_sql(min_id=min_id))
            triples = cur.fetchall()
        except MySQLdb.Error, e:
            try:
                logger.error("MySQL Error [{}]: {}".format(e.args[0], e.args[1]))
            except IndexError:
                logger.error("MySQL Error: {}".format(str(e)))
Пример #16
0
def main(argv=sys.argv[1:]):

    try:
        imagename = argv[0]

        try:
            s = SafeConfigParser()
            s3cfg = os.getenv("HOME") + "/.s3cfg"
            s.readfp(open(s3cfg, "r"))
            s3id = s.get("default", "access_key")
            pw = s.get("default", "secret_key")
            host_base = s.get("default", "host_base")
            use_https = s.getboolean("default", "use_https")

            hba = host_base.split(":", 1)
            if len(hba) == 2:
                port = int(hba[1])
            else:
                port = 8888
            host = hba[0]
        except Exception, ex:
            print "This program uses the s3cmd configuration file ~/.s3cfg"
            print ex
            sys.exit(1)

        print "getting connection"
        ec2conn = EC2Connection(s3id, pw, host='locahost', port=8444, debug=2)
        ec2conn.host = 'localhost'
        print "getting image"
        image = ec2conn.get_image(imagename)
        print "running"
        res = image.run(min_count=2, max_count=4)
        res.stop_all()
Пример #17
0
    def get_blog_subscribers(self):
        """ Gets WordPress Blog Subscribers """
        config = SafeConfigParser()
        config.read(os.path.join(os.path.dirname(__file__), 'config.ini'))
        try:
            url = config.get("wordpress", "url")
            username = config.get("wordpress", "username")
            password = config.get("wordpress", "password")
        except Error as error:
            msg = "Config section [wordpress] bad or missing: %s" % \
                  error.message
            logging.error(msg)
            raise Exception(msg)

        subs = []

        wp = Client(url, username, password)
        users = wp.call(GetUsers())
        logging.info("Found %d users." % len(users))
        for u in users:
            logging.debug("User: %s" % u.email)
            logging.debug("Roles: %s" % u.roles)
            if 'subscriber' in u.roles:
                subs.append((u.email, u.first_name))
        return subs
Пример #18
0
def checkFault(controller,rxtp,txtp,rxE,txE,switch,port):
    """
    This method serves as a Fault Logger depending upon the performance threshold violation
    """
    logging.basicConfig(filename="/home/Capstone/FAULT_LOG/fault.log",level=logging.DEBUG)


    parser = SafeConfigParser()
    parser.read('/home/Capstone/KPI_config.conf')
    fm_rxtp = int(parser.get('pox_fm', 'fm_rxtp'))
    fm_txtp = int(parser.get('pox_fm', 'fm_txtp'))
    fm_rxE = int(parser.get('pox_fm', 'fm_rxE'))
    fm_txE = int(parser.get('pox_fm', 'fm_rxE'))

    result=[]
    if int(fm_rxtp) < rxtp:
       print "rxtp exceeded"
       #logging.basicConfig(format='%(levelname)s %(asctime)s : ---%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG)
       logging.warning(controller+":"+str(str(time.strftime("%x"))+" "+str(time.strftime("%X")))+":["+switch+"]-"+port+"-rxtp exceeded")

    if int(fm_txtp) < txtp:
       print "txtp exceeded"
       #logging.basicConfig(format='%(levelname)s  %(asctime)s : ---%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG)
       logging.warning(controller+":"+str(str(time.strftime("%x"))+" "+str(time.strftime("%X")))+":["+switch+"]-"+port+"-txtp exceeded")

    if int(fm_rxE) < rxE:
       print "rxE exceeded"
       #logging.basicConfig(format='%(levelname)s : %(asctime)s : ---%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG)
       logging.warning(controller+":"+str(str(time.strftime("%x"))+" "+str(time.strftime("%X")))+":["+switch+"]-"+port+"-rxE exceeded")

    if int(fm_txE) < txE:
       print "txE exceeded"
       #logging.basicConfig(format='%(levelname)s : %(asctime)s : ---%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG)
       logging.warning(controller+":"+str(str(time.strftime("%x"))+" "+str(time.strftime("%X")))+":["+switch+"]-"+port+"-txE exceeded")
Пример #19
0
    def buildTestStruct(self, path=None):
        """
        A list of dictionaries is used so that order is preserved.
        [
            {'variant0': {'path': ['test0', 'test1', 'test2',...]}},
            {'variant1': {'path': ['test0', 'test1', 'test2',...]}},
            {'variant2': {'path': ['test0', 'test1', 'test2',...]}},
        ]
        """
        self.getCfgs(path=path)
        if(len(self.cfg_files) > 0):
            for config in self.cfg_files:
                variant_name = None
                cfg = SafeConfigParser()
                cfg.read(config)
                if(cfg.has_option('DEFAULT', 'PROJ_ROOT')):
                    proj_root = cfg.get('DEFAULT', 'PROJ_ROOT')
                else:
                    proj_root = ''
                path = os.path.normpath(os.path.split(config)[0])

                    # Check the config file for a user supplied variant name
                if(cfg.has_option('DEFAULT', 'VARIANT_NAME')):
                    variant_name = cfg.get('DEFAULT', 'VARIANT_NAME')

                    # If the user supplied a variant name then use it
                    # or use the directory name instead
                if(variant_name is None):
                    if(path == '.'):    # Use the directory name as the variant_name
                        variant_name = os.path.normpath(os.path.split(os.getcwd())[1])
                    else:
                        variant_name = path
                a = cfg.sections()
                a.sort()
                self.variants_and_tests.append({variant_name: {path: a}})
Пример #20
0
	def __init__(self, *args, **kwargs):
		luigi.Task.__init__(self, *args, **kwargs)
		parser = SafeConfigParser()
		parser.read(self.conf)
		self.wsdl = parser.get("user", "wsdl")
		self.page = parser.getint("user", "page")
		self.archive_dir = parser.get("user", "archive_dir")
Пример #21
0
    def __init__(self,section='Settings',iniFile='vikatan.ini'):
        self._browser = Browser()
        self._profile = dict()
        consoleLogger()
        # Create Temp Dir [for images] and set it as default tempdir
        try:
            from ConfigParser import SafeConfigParser
            parser = SafeConfigParser()
            parser.read(iniFile)

            if parser.has_section(section):

                if parser.has_option(section,'username'):
                    self._profile['username']=parser.get(section,'username')
                    
                if parser.has_option(section,'password'):
                    self._profile['password']=parser.get(section,'password')
                    
                if parser.has_option(section,'magazine'):
                    mcode=parser.get(section,'magazine')
                    
        except:
            print 'Invalid Settings. Mode set to Default'
            
        try:
            mcode
            if mcode in self._magazines.keys(): self._mcode = mcode
            else: print self._magCodeErrorStr
        except NameError:
            print 'No Parameter found in %s. Assuming Default' % iniFile

        self.status()
Пример #22
0
def run(command, global_options, options, args):
    if not args:
        command.parser.error('Missing instance directory.')
    instance_dir = args[0]
    sqlite_db_dir = ''
    settings_file = os.path.join(instance_dir, 'settings.ini')
    config = SafeConfigParser()
    if not config.read([settings_file]):
        command.parser.error('Settings file "%s" not found.' % settings_file)
    else:
        sqlite_db_dir = config.get('database', 'sqlite_db_dir')
    if not sqlite_db_dir:
        sqlite_db_dir = os.path.join(instance_dir, 'db')
    ecm_db_engine = config.get('database', 'ecm_engine')

    # run collectstatic
    collect_static_files(instance_dir, options)

    # run syncdb
    if 'sqlite' in ecm_db_engine and not os.path.exists(sqlite_db_dir):
        os.makedirs(sqlite_db_dir)
    init_ecm_db(instance_dir)

    log('')
    log('ECM instance initialized in "%s".' % instance_dir)

    print_load_message(instance_dir, ecm_db_engine)
Пример #23
0
	def read_conf_deepsearch(self): 
		self.cfg_deep = []
		parser = SafeConfigParser()
		parser.read(self.dirconf+'custom_params.ini')

		if(parser.has_option('general'  ,'deep_numserver') == False):
			return None

		numserver = parser.get('general', 'deep_numserver')	

		try:
			for i in xrange(int(numserver)):
				spc = self.get_conf_speedopt(parser, i, 'd')
				if ( spc == -1 ):
					spc = 1

				if(parser.has_option('deep_search_provider%d' % (i+1)  , 'type')):	
					typeds = parser.get('deep_search_provider%d' % (i+1)  , 'type')
				else:	
					typeds = 'DSN'

				d1 = {'url': parser.get('deep_search_provider%d' % (i+1)  , 'url'),
					  'user': parser.get('deep_search_provider%d' % (i+1)  , 'user'),
					  'pwd': parser.get('deep_search_provider%d' % (i+1)  , 'pwd'),
					  'type': typeds,
					  'speed_class': spc,
					  'valid': int(parser.getint('deep_search_provider%d' % (i+1)  , 'valid')),
					  }
				self.cfg_deep.append(d1)

		except Exception as e:
			print str(e)
			cfg_deep = None
Пример #24
0
def main():
    configFile = argv[1]

    config = SafeConfigParser()
    config.read(configFile)

    consumer_key    = config.get('consumer', 'key')
    consumer_secret = config.get('consumer', 'secret')

    tumblr = Tumblpy(consumer_key, consumer_secret)
    auth_props = tumblr.get_authentication_tokens()

    print("Go to the following link in your browser:")
    print(auth_props['auth_url'])
    print('')

    oauth_verifier = 'n'
    while oauth_verifier.lower() == 'n':
        oauth_verifier = raw_input('What is the PIN?:  ')

    tumblr = Tumblpy(consumer_key,
                     consumer_secret,
                     auth_props['oauth_token'],
                     auth_props['oauth_token_secret'])

    authorized_tokens = tumblr.get_access_token(oauth_verifier)

    config.set('oauth', 'key', authorized_tokens['oauth_token'])
    config.set('oauth', 'secret', authorized_tokens['oauth_token_secret'])
    
    print('Saving keys to config file %s' % configFile)

    with open(configFile, 'w') as fp:
        config.write(fp)
Пример #25
0
    def get_mom_connection(self):
        """ Gets SQL Server connection to MOM """
        config = SafeConfigParser()
        config.read(os.path.join(os.path.dirname(__file__), 'config.ini'))
        try:
            momdb_host = config.get("momdb", "host")
            momdb_user = config.get("momdb", "user")
            momdb_password = config.get("momdb", "password")
            momdb_db = config.get("momdb", "db")
        except Error as error:
            msg = "Config section [momdb] bad or missing: %s" % error.message
            logging.error(msg)
            raise Exception(msg)

        try:
            logging.info('Connecting to MOM...')
            if self.conn is None:
                self.conn = connect(host=momdb_host, user=momdb_user,
                                    password=momdb_password,
                                    database=momdb_db, as_dict=True)
            return self.conn
        except InterfaceError as error:
            msg = "Error connecting to SQL Server: %s" % error.message
            logging.error(msg)
            raise Exception(msg)
        except Error as error:
            logging.error(error.message)
            raise
Пример #26
0
def upload(filename,filedirectory = None,folderstate = 0):

	parser = SafeConfigParser()
	parser.read('config.ini')
	
	host  = parser.get('FTP Server','host')
	user = parser.get('FTP Server','user')
	password = parser.get('FTP Server','pass')
	
	session = ftplib.FTP(host,user,password)
	filename_extension_together = os.path.splitext(filename)
	storefilename = filename_extension_together[0]+filename_extension_together[1]
	file = open(filename,'rb')                  # file to send
	print "Opening Folder " ,filedirectory
	print "Sending ..."
	


	if folderstate == 1 : # setting folder name
		session.cwd(filedirectory)

	elif folderstate == 2: #new folder create command
		session.mkd(filedirectory)
	 	session.cwd(filedirectory)
	
 	

	session.storbinary('STOR '+storefilename, file)     # send the file
	print "Uploaded File ..."
	file.close()                                    # close file and FTP
	print "Operation Done Session Closed ..."
	session.quit()
Пример #27
0
 def parseConfigFile(self):
     parser = SafeConfigParser()
     parser.read('C:/BorderSoftware/Boris/settings/boris.ini')
     self.key = parser.get('Effiliation', 'key')
     self.filter = parser.get('Effiliation', 'filter')
     self.feedPath = parser.get('Effiliation', 'feedPath')
     self.amountOfThreads = int(parser.get('General', 'amountofthreads'))
Пример #28
0
    def parseConfig (self):
        """Reads the config file (wormBait.ini) and initializes variables accordingly"""
        config = SafeConfigParser()
        config.read('wormBait.ini')

        configDbIds = ""
        configDegFile = ""
        configOutFile = ""
        
        if config.has_section('wormBait'):
            configDbIds = config.get('wormBait', 'dbIds')
            configDegFile = config.get('wormBait', 'degFile')
            configOutFile = config.get('wormBait', 'outFile')

        if configDbIds:
            self.entryList.writeln(configDbIds)
        else:
            self.entryList.writeln("Enter DB IDs here")

        if configDegFile:
            self.dbFilePath.set(configDegFile)
        else:
            self.dbFilePath.set('Enter path to input database file here')

        if configOutFile:
            self.outFilePath.set(configOutFile)
        else:
            self.outFilePath.set('Enter desired path to output CSV file here')
Пример #29
0
    def from_filename(filename, allow_empty=False):
        from ConfigParser import SafeConfigParser, NoOptionError
        parser = SafeConfigParser()
        parser.read(filename)

        obj = OMPEnv()

        # Consistency check. Note that we only check if the option name is correct, 
        # we do not check whether the value is correct or not.
        if "openmp" not in parser.sections():
            if not allow_empty:
                raise ValueError("%s does not contain any [openmp] section" % filename) 
            return obj

        err_msg = ""
        for key in parser.options("openmp"):
            if key.upper() not in OMPEnv._keys:
                err_msg += "unknown option %s, maybe a typo" % key

        if err_msg: 
            raise ValueError(err_msg)

        for key in OMPEnv._keys:
            try:
                obj[key] = str(parser.get("openmp", key))
            except NoOptionError:
                try:
                    obj[key] = str(parser.get("openmp", key.lower()))
                except NoOptionError:
                    pass

        if not allow_empty and not obj:
            raise ValueError("Refusing to return with an empty dict") 

        return obj
Пример #30
0
class TracerConfiguration(object):
	"""Tracer configuration from file"""	

	def __init__(self):
		self.section = 'TracerConfiguration'
		self.parser = SafeConfigParser()
		self.parser.defaults = {
			'server': 'https://sankom-174508.appspot.com',
			'language':'en',
			'station_id': '1',
			}

	def Read(self, configFileName ):
		try:
			self.parser.read( configFileName)
		except Exception as e:
			#log but use defaults
			logger.exception( e )
			print e			

	def ServerUrl(self):
		return self.parser.get( self.section, 'server' )

	def Language(self):
		return self.parser.get( self.section, 'language' )

	def StationId(self):
		return self.parser.get( self.section, 'station_id' )
from ConfigParser import SafeConfigParser
import urllib
from utilities.elasticsearch_interaction import ElasticSearchInteraction
from utilities.elasticsearch_schema import team_schema

config = SafeConfigParser()
config.read('utilities/config.ini')

# elastic search
es_host = config.get('elasticsearch', 'HOST')
es_port = config.getint('elasticsearch', 'PORT')
es_index_name = config.get('elasticsearch', 'TEAM_INDEX')
es_doc_type = config.get('elasticsearch', 'TEAM_DOC_TYPE')
es = ElasticSearchInteraction(es_host, es_port)

# Read team data
teams_data_url = config.get('data_source', 'teams_data_url')
testfile = urllib.URLopener()
testfile.retrieve(teams_data_url, "teams.txt")
file = open('teams.txt', 'r')
obj = file.readlines()

es.create_index(es_index_name,team_schema)
es.index_team_content(es_index_name,es_doc_type,obj)
Пример #32
0
class Csw(object):
    ''' Base CSW server '''
    def __init__(self, rtconfig=None, env=None, version='3.0.0'):
        ''' Initialize CSW '''

        if not env:
            self.environ = os.environ
        else:
            self.environ = env

        self.context = config.StaticContext()

        # Lazy load this when needed
        # (it will permanently update global cfg namespaces)
        self.sruobj = None
        self.opensearchobj = None
        self.oaipmhobj = None

        # init kvp
        self.kvp = {}

        self.mode = 'csw'
        self. async = False
        self.soap = False
        self.request = None
        self.exception = False
        self.status = 'OK'
        self.profiles = None
        self.manager = False
        self.outputschemas = {}
        self.mimetype = 'application/xml; charset=UTF-8'
        self.encoding = 'UTF-8'
        self.pretty_print = 0
        self.domainquerytype = 'list'
        self.orm = 'django'
        self.language = {'639_code': 'en', 'text': 'english'}
        self.process_time_start = time()

        # define CSW implementation object (default CSW3)
        self.iface = csw3.Csw3(server_csw=self)
        self.request_version = version

        if self.request_version == '2.0.2':
            self.iface = csw2.Csw2(server_csw=self)
            self.context.set_model('csw')

        # load user configuration
        try:
            if isinstance(rtconfig, SafeConfigParser):  # serialized already
                self.config = rtconfig
            else:
                self.config = SafeConfigParser()
                if isinstance(rtconfig, dict):  # dictionary
                    for section, options in rtconfig.iteritems():
                        self.config.add_section(section)
                        for k, v in options.iteritems():
                            self.config.set(section, k, v)
                else:  # configuration file
                    import codecs
                    with codecs.open(rtconfig, encoding='utf-8') as scp:
                        self.config.readfp(scp)
        except Exception as err:
            self.response = self.iface.exceptionreport(
                'NoApplicableCode', 'service',
                'Error opening configuration %s' % rtconfig)
            return

        # set server.home safely
        # TODO: make this more abstract
        self.config.set(
            'server', 'home',
            os.path.dirname(os.path.join(os.path.dirname(__file__), '..')))

        self.context.pycsw_home = self.config.get('server', 'home')
        self.context.url = self.config.get('server', 'url')

        log.setup_logger(self.config)

        LOGGER.debug('running configuration %s' % rtconfig)
        LOGGER.debug(str(self.environ['QUERY_STRING']))

        # set OGC schemas location
        if not self.config.has_option('server', 'ogc_schemas_base'):
            self.config.set('server', 'ogc_schemas_base',
                            self.context.ogc_schemas_base)

        # set mimetype
        if self.config.has_option('server', 'mimetype'):
            self.mimetype = self.config.get('server', 'mimetype').encode()

        # set encoding
        if self.config.has_option('server', 'encoding'):
            self.encoding = self.config.get('server', 'encoding')

        # set domainquerytype
        if self.config.has_option('server', 'domainquerytype'):
            self.domainquerytype = self.config.get('server', 'domainquerytype')

        # set XML pretty print
        if (self.config.has_option('server', 'pretty_print')
                and self.config.get('server', 'pretty_print') == 'true'):
            self.pretty_print = 1

        # set Spatial Ranking option
        if (self.config.has_option('server', 'spatial_ranking')
                and self.config.get('server', 'spatial_ranking') == 'true'):
            util.ranking_enabled = True

        # set language default
        if (self.config.has_option('server', 'language')):
            try:
                LOGGER.info('Setting language')
                lang_code = self.config.get('server', 'language').split('-')[0]
                self.language['639_code'] = lang_code
                self.language['text'] = self.context.languages[lang_code]
            except:
                pass

        LOGGER.debug('Configuration: %s.' % self.config)
        LOGGER.debug('Model: %s.' % self.context.model)

        # load user-defined mappings if they exist
        if self.config.has_option('repository', 'mappings'):
            # override default repository mappings
            try:
                import imp
                module = self.config.get('repository', 'mappings')
                modulename = '%s' % \
                os.path.splitext(module)[0].replace(os.sep, '.')
                LOGGER.debug('Loading custom repository mappings from %s.' %
                             module)
                mappings = imp.load_source(modulename, module)
                self.context.md_core_model = mappings.MD_CORE_MODEL
                self.context.refresh_dc(mappings.MD_CORE_MODEL)
            except Exception as err:
                self.response = self.iface.exceptionreport(
                    'NoApplicableCode', 'service',
                    'Could not load repository.mappings %s' % str(err))

        # load outputschemas
        LOGGER.debug('Loading outputschemas.')

        for osch in pycsw.plugins.outputschemas.__all__:
            mod = getattr(
                __import__('pycsw.plugins.outputschemas.%s' %
                           osch).plugins.outputschemas, osch)
            self.outputschemas[mod.NAMESPACE] = mod

        LOGGER.debug('Outputschemas loaded: %s.' % self.outputschemas)
        LOGGER.debug('Namespaces: %s' % self.context.namespaces)

    def expand_path(self, path):
        ''' return safe path for WSGI environments '''
        if 'local.app_root' in self.environ and not os.path.isabs(path):
            return os.path.join(self.environ['local.app_root'], path)
        else:
            return path

    def dispatch_cgi(self):
        ''' CGI handler '''

        if hasattr(self, 'response'):
            return self._write_response()

        LOGGER.debug('CGI mode detected')

        cgifs = cgi.FieldStorage(keep_blank_values=1)

        if cgifs.file:  # it's a POST request
            self.request = cgifs.file.read()
            self.requesttype = 'POST'
            LOGGER.debug('Request type: POST.  Request:\n%s\n', self.request)

        else:  # it's a GET request
            self.requesttype = 'GET'
            self.request = 'http://%s%s' % \
            (self.environ['HTTP_HOST'], self.environ['REQUEST_URI'])
            LOGGER.debug('Request type: GET.  Request:\n%s\n', self.request)
            for key in cgifs.keys():
                self.kvp[key] = cgifs[key].value

        return self.dispatch()

    def dispatch_wsgi(self):
        ''' WSGI handler '''

        if hasattr(self, 'response'):
            return self._write_response()

        LOGGER.debug('WSGI mode detected')

        if self.environ['REQUEST_METHOD'] == 'POST':
            try:
                request_body_size = int(self.environ.get('CONTENT_LENGTH', 0))
            except (ValueError):
                request_body_size = 0

            self.requesttype = 'POST'
            self.request = self.environ['wsgi.input'].read(request_body_size)
            LOGGER.debug('Request type: POST.  Request:\n%s\n', self.request)

        else:  # it's a GET request
            self.requesttype = 'GET'

            scheme = '%s://' % self.environ['wsgi.url_scheme']

            if self.environ.get('HTTP_HOST'):
                url = '%s%s' % (scheme, self.environ['HTTP_HOST'])
            else:
                url = '%s%s' % (scheme, self.environ['SERVER_NAME'])

                if self.environ['wsgi.url_scheme'] == 'https':
                    if self.environ['SERVER_PORT'] != '443':
                        url += ':' + self.environ['SERVER_PORT']
                else:
                    if self.environ['SERVER_PORT'] != '80':
                        url += ':' + self.environ['SERVER_PORT']

            url += quote(self.environ.get('SCRIPT_NAME', ''))
            url += quote(self.environ.get('PATH_INFO', ''))

            if self.environ.get('QUERY_STRING'):
                url += '?' + self.environ['QUERY_STRING']

            self.request = url
            LOGGER.debug('Request type: GET.  Request:\n%s\n', self.request)

            pairs = self.environ.get('QUERY_STRING').split("&")

            kvp = {}

            for pairstr in pairs:
                pair = [unquote(a) for a in pairstr.split("=")]
                kvp[pair[0]] = pair[1] if len(pair) > 1 else ""
            self.kvp = kvp

        return self.dispatch()

    def opensearch(self):
        ''' enable OpenSearch '''
        if not self.opensearchobj:
            self.opensearchobj = opensearch.OpenSearch(self.context)

        return self.opensearchobj

    def sru(self):
        ''' enable SRU '''
        if not self.sruobj:
            self.sruobj = sru.Sru(self.context)

        return self.sruobj

    def oaipmh(self):
        ''' enable OAI-PMH '''
        if not self.oaipmhobj:
            self.oaipmhobj = oaipmh.OAIPMH(self.context, self.config)
        return self.oaipmhobj

    def dispatch(self, writer=sys.stdout, write_headers=True):
        ''' Handle incoming HTTP request '''

        if self.requesttype == 'GET':
            self.kvp = self.normalize_kvp(self.kvp)
            if (('version' in self.kvp and self.kvp['version'] == '2.0.2')
                    or ('acceptversions' in self.kvp
                        and '2.0.2' in self.kvp['acceptversions'])):
                self.request_version = '2.0.2'
        elif self.requesttype == 'POST':
            if self.request.find('2.0.2') != -1:
                self.request_version = '2.0.2'

        if (not isinstance(self.kvp, str) and 'mode' in self.kvp
                and self.kvp['mode'] == 'sru'):
            self.mode = 'sru'
            self.request_version = '2.0.2'
            LOGGER.debug('SRU mode detected; processing request.')
            self.kvp = self.sru().request_sru2csw(self.kvp)

        if (not isinstance(self.kvp, str) and 'mode' in self.kvp
                and self.kvp['mode'] == 'oaipmh'):
            self.mode = 'oaipmh'
            self.request_version = '2.0.2'
            LOGGER.debug('OAI-PMH mode detected; processing request.')
            self.oaiargs = dict((k, v) for k, v in self.kvp.items() if k)
            self.kvp = self.oaipmh().request(self.kvp)

        if self.request_version == '2.0.2':
            self.iface = csw2.Csw2(server_csw=self)
            self.context.set_model('csw')

        # configure transaction support, if specified in config
        self._gen_manager()

        # generate domain model
        # NOTE: We should probably avoid this sort of mutable state for WSGI
        if 'GetDomain' not in self.context.model['operations']:
            self.context.model['operations']['GetDomain'] = \
            self.context.gen_domains()

        # generate distributed search model, if specified in config
        if self.config.has_option('server', 'federatedcatalogues'):
            LOGGER.debug('Configuring distributed search.')

            self.context.model['constraints']['FederatedCatalogues'] = \
            {'values': []}

            for fedcat in \
            self.config.get('server', 'federatedcatalogues').split(','):
                self.context.model\
                ['constraints']['FederatedCatalogues']['values'].append(fedcat)

        for key, value in self.outputschemas.iteritems():
            self.context.model['operations']['GetRecords']['parameters'][
                'outputSchema']['values'].append(value.NAMESPACE)
            self.context.model['operations']['GetRecordById']['parameters'][
                'outputSchema']['values'].append(value.NAMESPACE)
            if 'Harvest' in self.context.model['operations']:
                self.context.model['operations']['Harvest']['parameters'][
                    'ResourceType']['values'].append(value.NAMESPACE)

        LOGGER.debug('Setting MaxRecordDefault')
        if self.config.has_option('server', 'maxrecords'):
            self.context.model['constraints']['MaxRecordDefault']['values'] = \
            [self.config.get('server', 'maxrecords')]

        # load profiles
        if self.config.has_option('server', 'profiles'):
            self.profiles = pprofile.load_profiles(
                os.path.join('pycsw', 'plugins', 'profiles'), pprofile.Profile,
                self.config.get('server', 'profiles'))

            for prof in self.profiles['plugins'].keys():
                tmp = self.profiles['plugins'][prof](self.context.model,
                                                     self.context.namespaces,
                                                     self.context)

                key = tmp.outputschema  # to ref by outputschema
                self.profiles['loaded'][key] = tmp
                self.profiles['loaded'][key].extend_core(
                    self.context.model, self.context.namespaces, self.config)

            LOGGER.debug('Profiles loaded: %s.' %
                         self.profiles['loaded'].keys())

        # init repository
        # look for tablename, set 'records' as default
        if not self.config.has_option('repository', 'table'):
            self.config.set('repository', 'table', 'records')

        repo_filter = None
        if self.config.has_option('repository', 'filter'):
            repo_filter = self.config.get('repository', 'filter')

        if (self.config.has_option('repository', 'source')
                and self.config.get('repository', 'source') == 'geonode'):

            # load geonode repository
            from pycsw.plugins.repository.geonode import geonode_

            try:
                self.repository = \
                geonode_.GeoNodeRepository(self.context)
                LOGGER.debug('GeoNode repository loaded (geonode): %s.' % \
                self.repository.dbtype)
            except Exception as err:
                self.response = self.iface.exceptionreport(
                    'NoApplicableCode', 'service',
                    'Could not load repository (geonode): %s' % str(err))

        elif (self.config.has_option('repository', 'source')
              and self.config.get('repository', 'source') == 'odc'):

            # load odc repository
            from pycsw.plugins.repository.odc import odc

            try:
                self.repository = \
                odc.OpenDataCatalogRepository(self.context)
                LOGGER.debug('OpenDataCatalog repository loaded (geonode): %s.' % \
                self.repository.dbtype)
            except Exception as err:
                self.response = self.iface.exceptionreport(
                    'NoApplicableCode', 'service',
                    'Could not load repository (odc): %s' % str(err))

        else:  # load default repository
            self.orm = 'sqlalchemy'
            from pycsw.core import repository
            try:
                self.repository = \
                repository.Repository(self.config.get('repository', 'database'),
                self.context, self.environ.get('local.app_root', None),
                self.config.get('repository', 'table'), repo_filter)
                LOGGER.debug('Repository loaded (local): %s.' \
                % self.repository.dbtype)
            except Exception as err:
                self.response = self.iface.exceptionreport(
                    'NoApplicableCode', 'service',
                    'Could not load repository (local): %s' % str(err))

        if self.requesttype == 'POST':
            LOGGER.debug(self.iface.version)
            self.kvp = self.iface.parse_postdata(self.request)

        error = 0

        if isinstance(self.kvp, str):  # it's an exception
            error = 1
            locator = 'service'
            text = self.kvp
            if (self.kvp.find('the document is not valid') != -1
                    or self.kvp.find('document not well-formed') != -1):
                code = 'NoApplicableCode'
            else:
                code = 'InvalidParameterValue'

        LOGGER.debug('HTTP Headers:\n%s.' % self.environ)
        LOGGER.debug('Parsed request parameters: %s' % self.kvp)

        if (not isinstance(self.kvp, str) and 'mode' in self.kvp
                and self.kvp['mode'] == 'opensearch'):
            self.mode = 'opensearch'
            LOGGER.debug('OpenSearch mode detected; processing request.')
            self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom'

        if ((self.kvp == {
                '': ''
        } and self.request_version == '3.0.0')
                or (len(self.kvp) == 1 and 'config' in self.kvp)):
            LOGGER.debug('Turning on default csw30:Capabilities for base URL')
            self.kvp = {
                'service': 'CSW',
                'acceptversions': '3.0.0',
                'request': 'GetCapabilities'
            }
            if 'HTTP_ACCEPT' in self.environ and 'application/opensearchdescription+xml' in self.environ[
                    'HTTP_ACCEPT']:
                self.mode = 'opensearch'
                self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom'

        if error == 0:
            # test for the basic keyword values (service, version, request)
            basic_options = ['service', 'request']
            if self.request_version == '2.0.2':
                basic_options.append('version')

            for k in basic_options:
                if k not in self.kvp:
                    if (k in ['version', 'acceptversions']
                            and 'request' in self.kvp
                            and self.kvp['request'] == 'GetCapabilities'):
                        pass
                    else:
                        error = 1
                        locator = k
                        code = 'MissingParameterValue'
                        text = 'Missing keyword: %s' % k
                        break

            # test each of the basic keyword values
            if error == 0:
                # test service
                if self.kvp['service'] != 'CSW':
                    error = 1
                    locator = 'service'
                    code = 'InvalidParameterValue'
                    text = 'Invalid value for service: %s.\
                    Value MUST be CSW' % self.kvp['service']

                # test version
                if ('version' in self.kvp
                        and util.get_version_integer(self.kvp['version']) !=
                        util.get_version_integer(self.request_version)
                        and self.kvp['request'] != 'GetCapabilities'):
                    error = 1
                    locator = 'version'
                    code = 'InvalidParameterValue'
                    text = 'Invalid value for version: %s.\
                    Value MUST be 2.0.2 or 3.0.0' % self.kvp['version']

                # check for GetCapabilities acceptversions
                if 'acceptversions' in self.kvp:
                    for vers in self.kvp['acceptversions'].split(','):
                        if (util.get_version_integer(vers) ==
                                util.get_version_integer(
                                    self.request_version)):
                            break
                        else:
                            error = 1
                            locator = 'acceptversions'
                            code = 'VersionNegotiationFailed'
                            text = 'Invalid parameter value in acceptversions:\
                            %s. Value MUST be 2.0.2 or 3.0.0'                                                              % \
                            self.kvp['acceptversions']

                # test request
                if self.kvp['request'] not in \
                    self.context.model['operations'].keys():
                    error = 1
                    locator = 'request'
                    if self.kvp['request'] in ['Transaction', 'Harvest']:
                        code = 'OperationNotSupported'
                        text = '%s operations are not supported' % \
                        self.kvp['request']
                    else:
                        code = 'InvalidParameterValue'
                        text = 'Invalid value for request: %s' % \
                        self.kvp['request']

        if error == 1:  # return an ExceptionReport
            self.response = self.iface.exceptionreport(code, locator, text)

        else:  # process per the request value

            if 'responsehandler' in self.kvp:
                # set flag to process asynchronously
                import threading
                self. async = True
                if ('requestid' not in self.kvp
                        or self.kvp['requestid'] is None):
                    import uuid
                    self.kvp['requestid'] = str(uuid.uuid4())

            if self.kvp['request'] == 'GetCapabilities':
                self.response = self.iface.getcapabilities()
            elif self.kvp['request'] == 'DescribeRecord':
                self.response = self.iface.describerecord()
            elif self.kvp['request'] == 'GetDomain':
                self.response = self.iface.getdomain()
            elif self.kvp['request'] == 'GetRecords':
                if self. async:  # process asynchronously
                    threading.Thread(target=self.iface.getrecords).start()
                    self.response = self.iface._write_acknowledgement()
                else:
                    self.response = self.iface.getrecords()
            elif self.kvp['request'] == 'GetRecordById':
                self.response = self.iface.getrecordbyid()
            elif self.kvp['request'] == 'GetRepositoryItem':
                self.response = self.iface.getrepositoryitem()
            elif self.kvp['request'] == 'Transaction':
                self.response = self.iface.transaction()
            elif self.kvp['request'] == 'Harvest':
                if self. async:  # process asynchronously
                    threading.Thread(target=self.iface.harvest).start()
                    self.response = self.iface._write_acknowledgement()
                else:
                    self.response = self.iface.harvest()
            else:
                self.response = self.iface.exceptionreport(
                    'InvalidParameterValue', 'request',
                    'Invalid request parameter: %s' % self.kvp['request'])

        if self.mode == 'sru':
            LOGGER.debug('SRU mode detected; processing response.')
            self.response = self.sru().response_csw2sru(
                self.response, self.environ)
        elif self.mode == 'opensearch':
            LOGGER.debug('OpenSearch mode detected; processing response.')
            self.response = self.opensearch().response_csw2opensearch(
                self.response, self.config)

        elif self.mode == 'oaipmh':
            LOGGER.debug('OAI-PMH mode detected; processing response.')
            self.response = self.oaipmh().response(
                self.response, self.oaiargs, self.repository,
                self.config.get('server', 'url'))

        return self._write_response()

    def getcapabilities(self):
        ''' Handle GetCapabilities request '''
        return self.iface.getcapabilities()

    def describerecord(self):
        ''' Handle DescribeRecord request '''
        return self.iface.describerecord()

    def getdomain(self):
        ''' Handle GetDomain request '''
        return self.iface.getdomain()

    def getrecords(self):
        ''' Handle GetRecords request '''
        return self.iface.getrecords()

    def getrecordbyid(self, raw=False):
        ''' Handle GetRecordById request '''
        return self.iface.getrecordbyid()

    def getrepositoryitem(self):
        ''' Handle GetRepositoryItem request '''
        return self.iface.getrepositoryitem()

    def transaction(self):
        ''' Handle Transaction request '''
        return self.iface.transaction()

    def harvest(self):
        ''' Handle Harvest request '''
        return self.iface.harvest()

    def _write_response(self):
        ''' Generate response '''
        # set HTTP response headers and XML declaration

        xmldecl = ''
        appinfo = ''

        LOGGER.debug('Writing response.')

        if hasattr(self, 'soap') and self.soap:
            self._gen_soap_wrapper()

        if (isinstance(self.kvp, dict) and 'outputformat' in self.kvp
                and self.kvp['outputformat'] == 'application/json'):
            self.contenttype = self.kvp['outputformat']
            from pycsw.core.formats import fmt_json
            response = fmt_json.exml2json(self.response,
                                          self.context.namespaces,
                                          self.pretty_print)
        else:  # it's XML
            if 'outputformat' in self.kvp:
                self.contenttype = self.kvp['outputformat']
            else:
                self.contenttype = self.mimetype
            response = etree.tostring(self.response,
                                      pretty_print=self.pretty_print,
                                      encoding='unicode')
            xmldecl = '<?xml version="1.0" encoding="%s" standalone="no"?>\n' \
            % self.encoding
            appinfo = '<!-- pycsw %s -->\n' % self.context.version

        s = (u'%s%s%s' % (xmldecl, appinfo, response)).encode(self.encoding)
        LOGGER.debug('Response code: %s',
                     self.context.response_codes[self.status])
        LOGGER.debug('Response:\n%s', s)
        return [self.context.response_codes[self.status], s]

    def _gen_soap_wrapper(self):
        ''' Generate SOAP wrapper '''
        LOGGER.debug('Writing SOAP wrapper.')
        node = etree.Element(util.nspath_eval('soapenv:Envelope',
                                              self.context.namespaces),
                             nsmap=self.context.namespaces)

        node.attrib[util.nspath_eval('xsi:schemaLocation',
        self.context.namespaces)] = '%s %s' % \
        (self.context.namespaces['soapenv'], self.context.namespaces['soapenv'])

        node2 = etree.SubElement(
            node, util.nspath_eval('soapenv:Body', self.context.namespaces))

        if self.exception:
            node3 = etree.SubElement(
                node2,
                util.nspath_eval('soapenv:Fault', self.context.namespaces))
            node4 = etree.SubElement(
                node3, util.nspath_eval('soapenv:Code',
                                        self.context.namespaces))

            etree.SubElement(
                node4,
                util.nspath_eval('soapenv:Value',
                                 self.context.namespaces)).text = 'soap:Server'

            node4 = etree.SubElement(
                node3,
                util.nspath_eval('soapenv:Reason', self.context.namespaces))

            etree.SubElement(
                node4, util.nspath_eval('soapenv:Text',
                                        self.context.namespaces)
            ).text = 'A server exception was encountered.'

            node4 = etree.SubElement(
                node3,
                util.nspath_eval('soapenv:Detail', self.context.namespaces))
            node4.append(self.response)
        else:
            node2.append(self.response)

        self.response = node

    def _gen_manager(self):
        ''' Update self.context.model with CSW-T advertising '''
        if (self.config.has_option('manager', 'transactions')
                and self.config.get('manager', 'transactions') == 'true'):

            self.manager = True

            self.context.model['operations']['Transaction'] = \
            {'methods': {'get': False, 'post': True}, 'parameters': {}}

            schema_values = [
                'http://www.opengis.net/cat/csw/2.0.2',
                'http://www.opengis.net/cat/csw/3.0',
                'http://www.opengis.net/wms',
                'http://www.opengis.net/wfs',
                'http://www.opengis.net/wcs',
                'http://www.opengis.net/wps/1.0.0',
                'http://www.opengis.net/sos/1.0',
                'http://www.opengis.net/sos/2.0',
                'http://www.isotc211.org/2005/gmi',
                'urn:geoss:waf',
            ]

            self.context.model['operations']['Harvest'] = \
            {'methods': {'get': False, 'post': True}, 'parameters': \
            {'ResourceType': {'values': schema_values}}}

            self.context.model['operations']['Transaction'] = \
            {'methods': {'get': False, 'post': True}, 'parameters': \
            {'TransactionSchemas': {'values': schema_values}}}

            self.csw_harvest_pagesize = 10
            if self.config.has_option('manager', 'csw_harvest_pagesize'):
                self.csw_harvest_pagesize = int(
                    self.config.get('manager', 'csw_harvest_pagesize'))

    def _test_manager(self):
        ''' Verify that transactions are allowed '''

        if self.config.get('manager', 'transactions') != 'true':
            raise RuntimeError('CSW-T interface is disabled')

        ipaddress = self.environ['REMOTE_ADDR']

        if not self.config.has_option('manager', 'allowed_ips') or \
        (self.config.has_option('manager', 'allowed_ips') and not
         util.ipaddress_in_whitelist(ipaddress,
                        self.config.get('manager', 'allowed_ips').split(','))):
            raise RuntimeError(
                'CSW-T operations not allowed for this IP address: %s' %
                ipaddress)

    def _cql_update_queryables_mappings(self, cql, mappings):
        ''' Transform CQL query's properties to underlying DB columns '''
        LOGGER.debug('Raw CQL text = %s.' % cql)
        LOGGER.debug(str(mappings.keys()))
        if cql is not None:
            for key in mappings.keys():
                try:
                    cql = cql.replace(key, mappings[key]['dbcol'])
                except:
                    cql = cql.replace(key, mappings[key])
            LOGGER.debug('Interpolated CQL text = %s.' % cql)
            return cql

    def _process_responsehandler(self, xml):
        ''' Process response handler '''

        if self.kvp['responsehandler'] is not None:
            LOGGER.debug('Processing responsehandler %s.' %
                         self.kvp['responsehandler'])

            uprh = urlparse.urlparse(self.kvp['responsehandler'])

            if uprh.scheme == 'mailto':  # email
                import smtplib

                LOGGER.debug('Email detected.')

                smtp_host = 'localhost'
                if self.config.has_option('server', 'smtp_host'):
                    smtp_host = self.config.get('server', 'smtp_host')

                body = 'Subject: pycsw %s results\n\n%s' % \
                (self.kvp['request'], xml)

                try:
                    LOGGER.debug('Sending email.')
                    msg = smtplib.SMTP(smtp_host)
                    msg.sendmail(
                        self.config.get('metadata:main', 'contact_email'),
                        uprh.path, body)
                    msg.quit()
                    LOGGER.debug('Email sent successfully.')
                except Exception as err:
                    LOGGER.debug('Error processing email: %s.' % str(err))

            elif uprh.scheme == 'ftp':
                import ftplib

                LOGGER.debug('FTP detected.')

                try:
                    LOGGER.debug('Sending to FTP server.')
                    ftp = ftplib.FTP(uprh.hostname)
                    if uprh.username is not None:
                        ftp.login(uprh.username, uprh.password)
                    ftp.storbinary('STOR %s' % uprh.path[1:], StringIO(xml))
                    ftp.quit()
                    LOGGER.debug('FTP sent successfully.')
                except Exception as err:
                    LOGGER.debug('Error processing FTP: %s.' % str(err))

    @staticmethod
    def normalize_kvp(kvp):
        """Normalize Key Value Pairs.

        This method will transform all keys to lowercase and leave values
        unchanged, as specified in the CSW standard (see for example note
        C on Table 62 - KVP Encoding for DescribeRecord operation request
        of the CSW standard version 2.0.2)

        :arg kvp: a mapping with Key Value Pairs
        :type kvp: dict
        :returns: A new dictionary with normalized parameters
        """

        result = dict()
        for name, value in kvp.iteritems():
            result[name.lower()] = value
        return result
Пример #33
0
def run(ini_file='TOPKAPI.ini'):
    """Run the model with the set-up defined by `ini_file`.

    """

    ##================================##
    ##  Read the input file (*.ini)   ##
    ##================================##
    config = SafeConfigParser()
    config.read(ini_file)
    print 'Read the file ', ini_file

    ##~~~~~~ Numerical_options ~~~~~~##
    solve_s = config.getfloat('numerical_options', 'solve_s')
    solve_o = config.getfloat('numerical_options', 'solve_o')
    solve_c = config.getfloat('numerical_options', 'solve_c')
    only_channel_output = config.getboolean('numerical_options',
                                            'only_channel_output')

    ##~~~~~~~~~~~ input files ~~~~~~~~~~~##
    #Param
    file_global_param = config.get('input_files', 'file_global_param')
    file_cell_param = config.get('input_files', 'file_cell_param')
    #Rain
    file_rain = config.get('input_files', 'file_rain')
    #ETP
    file_ET = config.get('input_files', 'file_ET')

    #~~~~~~~~~~~ Group (simulated event) ~~~~~~~~~~~##
    group_name = config.get('groups', 'group_name')

    ##~~~~~~ Calibration ~~~~~~##
    fac_L = config.getfloat('calib_params', 'fac_L')
    fac_Ks = config.getfloat('calib_params', 'fac_Ks')
    fac_n_o = config.getfloat('calib_params', 'fac_n_o')
    fac_n_c = config.getfloat('calib_params', 'fac_n_c')

    ##~~~~~~ External flows ~~~~~~##
    external_flow = config.getboolean('external_flow', 'external_flow')
    if external_flow:
        file_Qexternal_flow = config.get('external_flow',
                                         'file_Qexternal_flow')
        Xexternal_flow = config.getfloat('external_flow', 'Xexternal_flow')
        Yexternal_flow = config.getfloat('external_flow', 'Yexternal_flow')

    ##~~~~~~~~~~~ output files ~~~~~~~~~~##
    file_out = config.get('output_files', 'file_out')
    ut.check_file_exist(file_out)  #create path_out if it doesn't exist
    if os.path.exists(file_out):
        first_run = False
    else:
        first_run = True

    append_output = config.getboolean('output_files', 'append_output')
    if append_output is True:
        fmode = 'a'
    else:
        fmode = 'w'

    ##============================##
    ##   Read the forcing data    ##
    ##============================##
    print 'Read the forcing data'

    #~~~~Rainfall
    h5file_in = h5.openFile(file_rain, mode='r')
    group = '/' + group_name + '/'
    node = h5file_in.getNode(group + 'rainfall')
    ndar_rain = node.read()
    h5file_in.close()

    #~~~~ETr - Reference crop ET
    h5file_in = h5.openFile(file_ET, mode='r')
    group = '/' + group_name + '/'
    node = h5file_in.getNode(group + 'ETr')
    ndar_ETr = node.read()
    h5file_in.close()

    #~~~~ETo - Open water potential evap.
    h5file_in = h5.openFile(file_ET, mode='r')
    group = '/' + group_name + '/'
    node = h5file_in.getNode(group + 'ETo')
    ndar_ETo = node.read()
    h5file_in.close()

    #~~~~external_flow flows
    if external_flow:
        ar_Qexternal_flow = np.loadtxt(file_Qexternal_flow)[:, 5]

    ##============================##
    ## Pretreatment of input data ##
    ##============================##
    print 'Pretreatment of input data'

    #~~~~Read Global parameters file
    X, Dt, alpha_s, \
    alpha_o, alpha_c, \
    A_thres, W_min, W_max = pm.read_global_parameters(file_global_param)

    #~~~~Read Cell parameters file
    ar_cell_label, ar_coorx, \
    ar_coory, ar_lambda, \
    ar_Xc, ar_dam, \
    ar_tan_beta, ar_tan_beta_channel, \
    ar_L0, ar_Ks0, \
    ar_theta_r, ar_theta_s, \
    ar_n_o0, ar_n_c0, \
    ar_cell_down, ar_pVs_t0, \
    ar_Vo_t0, ar_Qc_t0, \
    ar_kc, psi_b, lamda = pm.read_cell_parameters(file_cell_param)

    #~~~~Number of cell in the catchment
    nb_cell = len(ar_cell_label)

    #~~~~Computation of cell order
    ar_label_sort = pm.sort_cell(ar_cell_label, ar_cell_down)

    #~~~~Computation of upcells
    li_cell_up = pm.direct_up_cell(ar_cell_label, ar_cell_down, ar_label_sort)

    #~~~~Computation of drained area
    ar_A_drained = pm.drained_area(ar_label_sort, li_cell_up, X)

    #~~~~Apply calibration factors to the parameter values
    ar_L = ar_L0 * fac_L
    ar_Ks = ar_Ks0 * fac_Ks
    ar_n_o = ar_n_o0 * fac_n_o
    ar_n_c = ar_n_c0 * fac_n_c

    print 'Max L=', max(ar_L)
    print 'Max Ks=', max(ar_Ks)
    print 'Max n_o=', max(ar_n_o)
    print 'Max n_c=', max(ar_n_c)

    #~~~~Computation of model parameters from physical parameters
    ar_Vsm, ar_b_s, ar_b_o, \
    ar_W, ar_b_c = pm.compute_cell_param(X, ar_Xc, Dt, alpha_s,
                                         alpha_o, alpha_c, nb_cell,
                                         A_thres, W_max, W_min,
                                         ar_lambda, ar_tan_beta,
                                         ar_tan_beta_channel, ar_L,
                                         ar_Ks, ar_theta_r, ar_theta_s,
                                         ar_n_o, ar_n_c, ar_A_drained)

    #~~~~Look for the cell of external_flow tunnel
    if external_flow:
        cell_external_flow = ut.find_cell_coordinates(ar_cell_label,
                                                      Xexternal_flow,
                                                      Yexternal_flow, ar_coorx,
                                                      ar_coory, ar_lambda)

        print 'external flows will be taken into account for cell no',\
            cell_external_flow, ' coordinates ('\
            ,Xexternal_flow,',',Yexternal_flow,')'

    #~~~~Number of simulation time steps
    nb_time_step = len(ndar_rain[:, 0])

    ##=============================##
    ##  Variable array definition  ##
    ##=============================##

    ## Initialisation of the reservoirs
    #Matrix of soil,overland and channel store at the begining of the time step
    if append_output and not first_run:
        print 'Initialize from file'
        # read from file
        h5file_in = h5.openFile(file_out, mode='r')

        node = h5file_in.getNode('/Soil/V_s')
        ar_Vs0 = node.read()[-1, :]

        node = h5file_in.getNode('/Overland/V_o')
        ar_Vo0 = node.read()[-1, :]

        node = h5file_in.getNode('/Channel/V_c')
        ar_Vc0 = node.read()[-1, :]

        h5file_in.close()
    else:
        print 'Initialize from parms'
        ar_Vs0 = fl.initial_volume_soil(ar_pVs_t0, ar_Vsm)
        ar_Vo0 = ar_Vo_t0
        ar_Vc0 = fl.initial_volume_channel(ar_Qc_t0, ar_W, X, ar_n_c)

    ## Computed variables
    #Matrix of soil,overland and channel store at the end of the time step
    ar_Vs1 = np.ones(nb_cell) * -99.9
    ar_Vo1 = np.ones(nb_cell) * -99.9
    ar_Vc1 = np.ones(nb_cell) * -99.9

    #Matrix of outflows between two time steps
    ar_Qs_out = np.ones(nb_cell) * -99.9
    ar_Qo_out = np.ones(nb_cell) * -99.9
    ar_Qc_out = np.zeros(nb_cell)

    ## Intermediate variables
    ar_a_s = np.ones(nb_cell) * -99.9
    ar_a_o = np.ones(nb_cell) * -99.9
    ar_a_c = np.ones(nb_cell) * -99.9
    ar_Q_to_next_cell = np.ones(nb_cell) * -99.9
    ar_Q_to_channel = np.ones(nb_cell) * -99.9
    ar_Q_to_channel_sub = np.zeros(nb_cell)
    ar_Qc_cell_up = np.zeros(nb_cell)
    ar_ETa = np.zeros(nb_cell)
    ar_ET_channel = np.zeros(nb_cell)

    ##=============================##
    ## HDF5 output file definition ##
    ##=============================##
    h5file = h5.openFile(file_out, mode=fmode, title='TOPKAPI_out')

    root = h5file.getNode('/')
    root._v_attrs.pytopkapi_version = pytopkapi.__version__
    root._v_attrs.pytopkapi_git_revision = pytopkapi.__git_revision__

    atom = h5.Float32Atom()
    h5filter = h5.Filters(9)  # maximum compression

    # create file structure as necessary
    grp_name = '/Soil'
    if grp_name not in h5file:
        h5file.createGroup('/', 'Soil', 'Soil arrays')
    if grp_name + '/Qs_out' not in h5file:
        array_Qs_out = h5file.createEArray(grp_name,
                                           'Qs_out',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='m3/s',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_Qs_out = h5file.getNode(grp_name + '/Qs_out')
    if grp_name + '/V_s' not in h5file:
        array_Vs = h5file.createEArray(grp_name,
                                       'V_s',
                                       atom,
                                       shape=(0, nb_cell),
                                       title='m3',
                                       filters=h5filter,
                                       expectedrows=nb_time_step + 1)
    else:
        array_Vs = h5file.getNode(grp_name + '/V_s')

    grp_name = '/Overland'
    if grp_name not in h5file:
        h5file.createGroup('/', 'Overland', 'Overland arrays')
    if grp_name + '/Qo_out' not in h5file:
        array_Qo_out = h5file.createEArray(grp_name,
                                           'Qo_out',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='m3/s',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_Qo_out = h5file.getNode(grp_name + '/Qo_out')
    if grp_name + '/V_o' not in h5file:
        array_Vo = h5file.createEArray(grp_name,
                                       'V_o',
                                       atom,
                                       shape=(0, nb_cell),
                                       title='m3',
                                       filters=h5filter,
                                       expectedrows=nb_time_step + 1)
    else:
        array_Vo = h5file.getNode(grp_name + '/V_o')

    grp_name = '/Channel'
    if grp_name not in h5file:
        h5file.createGroup('/', 'Channel', 'Channel arrays')
    if grp_name + '/Qc_out' not in h5file:
        array_Qc_out = h5file.createEArray(grp_name,
                                           'Qc_out',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='m3/s',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_Qc_out = h5file.getNode(grp_name + '/Qc_out')
    if grp_name + '/V_c' not in h5file:
        array_Vc = h5file.createEArray(grp_name,
                                       'V_c',
                                       atom,
                                       shape=(0, nb_cell),
                                       title='m3',
                                       filters=h5filter,
                                       expectedrows=nb_time_step)
    else:
        array_Vc = h5file.getNode(grp_name + '/V_c')
    if grp_name + '/Ec_out' not in h5file:
        array_Ec_out = h5file.createEArray(grp_name,
                                           'Ec_out',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='m3',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_Ec_out = h5file.getNode(grp_name + '/Ec_out')

    if '/ET_out' not in h5file:
        array_ET_out = h5file.createEArray('/',
                                           'ET_out',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='mm',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_ET_out = h5file.getNode('/ET_out')

    if '/Q_down' not in h5file:
        array_Q_down = h5file.createEArray('/',
                                           'Q_down',
                                           atom,
                                           shape=(0, nb_cell),
                                           title='m3',
                                           filters=h5filter,
                                           expectedrows=nb_time_step)
    else:
        array_Q_down = h5file.getNode('/Q_down')

    if append_output is False or first_run is True:
        #Write the initial values into the output file
        array_Vs.append(ar_Vs0.reshape((1, nb_cell)))
        array_Vo.append(ar_Vo0.reshape((1, nb_cell)))
        array_Vc.append(ar_Vc0.reshape((1, nb_cell)))

        array_Qs_out.append(ar_Qs_out.reshape((1, nb_cell)))
        array_Qo_out.append(ar_Qo_out.reshape((1, nb_cell)))
        array_Qc_out.append(ar_Qc_out.reshape((1, nb_cell)))

        array_Q_down.append(ar_Q_to_next_cell.reshape((1, nb_cell)))

        array_ET_out.append(ar_ETa.reshape((1, nb_cell)))

        E_vol = ar_ET_channel * 1e-3 * ar_W * ar_Xc
        array_Ec_out.append(E_vol.reshape((1, nb_cell)))

    eff_theta = ar_theta_s - ar_theta_r

    ##===========================##
    ##     Core of the Model     ##
    ##===========================##
    print '** NB_CELL=', nb_cell
    print '** NB_TIME_STEP=', nb_time_step
    print '--> SIMULATIONS <--'

    ## Loop on time
    for t in range(nb_time_step):
        print t + 1, '/', nb_time_step

        eff_sat = ar_Vs0 / ar_Vsm

        # estimate soil suction head using Brookes and Corey (1964)
        psi = psi_b / np.power(eff_sat, 1.0 / lamda)

        ## Loop on cells
        n = -1
        for cell1 in ar_label_sort:
            cell = np.where(ar_cell_label == cell1)[0][0]
            n = n + 1

            ## ======================== ##
            ## ===== INTERCEPTION ===== ##
            ## ======================== ##
            ## No interception for the moment

            ## ======================== ##
            ## ===== INFILTRATION ===== ##
            ## ======================== ##
            rain_rate = ndar_rain[t, cell] / Dt

            infiltration_depth = green_ampt_cum_infiltration(
                rain_rate, psi[cell], eff_theta[cell], eff_sat[cell],
                ar_Ks[cell], Dt)

            ## ====================== ##
            ## ===== SOIL STORE ===== ##
            ## ====================== ##
            #~~~~ Computation of soil input
            ar_a_s[cell] = fl.input_soil(infiltration_depth, Dt, X,
                                         ar_Q_to_next_cell, li_cell_up[cell])

            #~~~~ Resolution of the equation dV/dt=a_s-b_s*V^alpha_s
            # Calculate the volume in the soil store at the end of the
            # current time-step.

            Vs_prim = om.solve_storage_eq(ar_a_s[cell], ar_b_s[cell], alpha_s,
                                          ar_Vs0[cell], Dt, solve_s)

            #~~~~ Computation of soil outflow and overland input
            ar_Qs_out[cell], ar_Vs1[cell] = fl.output_soil(
                ar_Vs0[cell], Vs_prim, ar_Vsm[cell], ar_a_s[cell],
                ar_b_s[cell], alpha_s, Dt)
            if ar_Qs_out[cell] < 0:
                print 'Problem Soil:output greater than input....'
                print 'n=', n, 'label=', cell
                stop

            ## ========================== ##
            ## ===== OVERLAND STORE ===== ##
            ## ========================== ##
            #~~~~ Computation of overland input
            rain_excess = ndar_rain[t, cell] - infiltration_depth
            # convert mm to m^3/s
            rain_excess = max(0, (rain_excess * (10**-3) / Dt) * X**2)

            ar_a_o[cell] = max(0,
                               ar_a_s[cell] \
                               - ((ar_Vs1[cell]-ar_Vs0[cell])/Dt \
                                  + ar_Qs_out[cell]) \
                               + rain_excess)

            #~~~~ Resolution of the equation dV/dt=a_o-b_o*V^alpha_o

            ar_Vo1[cell] = om.solve_storage_eq(ar_a_o[cell], ar_b_o[cell],
                                               alpha_o, ar_Vo0[cell], Dt,
                                               solve_o)

            #~~~~ Computation of overland outflows
            ar_Qo_out[cell] = fl.Qout_computing(ar_Vo0[cell], ar_Vo1[cell],
                                                ar_a_o[cell], Dt)

            if ar_Qo_out[cell] < 0:
                print 'Problem Overland:output greater than input....'
                print 'n=', n, 'label=', cell
                stop

            ## ============================= ##
            ## ===== FLOW PARTITIONING ===== ##
            ## ============================= ##
            # ar_Q_to_channel_sub doesn't get used for anything?

            ar_Q_to_next_cell[cell], \
            ar_Q_to_channel[cell], \
            ar_Q_to_channel_sub[cell] = fl.flow_partitioning(ar_lambda[cell],
                                                             ar_Qs_out[cell],
                                                             ar_Qo_out[cell],
                                                             ar_W[cell],
                                                             X, ar_Xc[cell])

            ## ======================== ##
            ## ===== CHANNEL STORE ==== ##
            ## ======================== ##
            if ar_lambda[cell] == 1:
                if ar_cell_down[cell] >= 0 \
                   and ar_lambda[ar_cell_down[cell]] == 0:

                    print 'Problem: the present cell has a channel but not the cell down...'
                    Stop

                #~~~~ Computation of channel input
                ar_a_c[cell], \
                ar_Qc_cell_up[cell] = fl.input_channel(ar_Qc_out,
                                                       ar_Q_to_channel[cell],
                                                       li_cell_up[cell])

                if external_flow \
                and cell == np.where(ar_cell_label==cell_external_flow)[0][0]:
                    ar_a_c[cell] = ar_a_c[cell] + ar_Qexternal_flow[t]

                #~~~~ Resolution of the equation dV/dt=a_c-b_c*V^alpha_c

                ar_Vc1[cell] = om.solve_storage_eq(ar_a_c[cell], ar_b_c[cell],
                                                   alpha_c, ar_Vc0[cell], Dt,
                                                   solve_c)

                #~~~~ Computation of channel outflows
                ar_Qc_out[cell] = fl.Qout_computing(ar_Vc0[cell], ar_Vc1[cell],
                                                    ar_a_c[cell], Dt)

                if ar_Qc_out[cell] < 0:
                    print 'Problem Channel: output greater than input....'
                    stop
                if str(ar_Qc_out[cell]).count('N') > 0:
                    print ar_Qc_out[cell]
                    print 'Problem Channel: Non authorized operand....'
                    stop

            else:
                ar_a_c[cell] = 0.
                ar_Vc1[cell] = 0.
                ar_Qc_out[cell] = 0.

            ## ============================== ##
            ## ===== EVAPOTRANSPIRATION ===== ##
            ## ============================== ##
            #~~~~~ From soil
            ar_ETa[cell], \
            ar_Vs1[cell], \
            ar_Vo1[cell] = em.evapot_soil_overland(ar_Vo1[cell],
                                                   ar_Vs1[cell],
                                                   ar_Vsm[cell],
                                                   ar_kc[cell],
                                                   ndar_ETr[t, cell], X)

            #~~~~~ Evaporation from channel
            if ar_lambda[cell] == 1:
                ar_ET_channel[cell], \
                ar_Vc1[cell] = em.evapor_channel(ar_Vc1[cell],
                                                 ndar_ETo[t, cell],
                                                 ar_W[cell], ar_Xc[cell])

        ####===================================####
        #### Affectation of new vector values  ####
        ####===================================####
        ar_Vs0 = np.array(ar_Vs1)
        ar_Vo0 = np.array(ar_Vo1)
        ar_Vc0 = np.array(ar_Vc1)

        ####===================================####
        #### Results writing at each time step ####
        ####===================================####
        array_Vs.append(ar_Vs1.reshape((1, nb_cell)))
        array_Vo.append(ar_Vo1.reshape((1, nb_cell)))
        array_Vc.append(ar_Vc1.reshape((1, nb_cell)))

        array_Qs_out.append(ar_Qs_out.reshape((1, nb_cell)))
        array_Qo_out.append(ar_Qo_out.reshape((1, nb_cell)))
        array_Qc_out.append(ar_Qc_out.reshape((1, nb_cell)))

        array_Q_down.append(ar_Q_to_next_cell.reshape((1, nb_cell)))

        array_ET_out.append(ar_ETa.reshape((1, nb_cell)))

        E_vol = ar_ET_channel * 1e-3 * ar_W * ar_Xc
        array_Ec_out.append(E_vol.reshape((1, nb_cell)))

    h5file.close()

    print ' '
    print '***** THE END *****'