Esempio n. 1
0
def get_bch(mms_id, holding_id):
    hol = session.get(holdings_api.format(mms_id=mms_id) + "/" + holding_id,
                      headers={"accept": "application/xml"})
    try:
        holxml = ET.fromstring(hol.text)
        b = holxml.find('.//*[@tag="852"]/*[@code="b"]').text
        c = holxml.find('.//*[@tag="852"]/*[@code="c"]').text
        h = holxml.find('.//*[@tag="852"]/*[@code="h"]').text
    except:
        logging.exception("Fehler beim Lesen des Zielholdings (XML).")
        logging.errorr(hol.text)
        print("Ein Fehler ist aufgetreten. Kontrollieren Sie die Log-Datei.")
        input("Drücken Sie ENTER um das Programm zu beenden.")
        sys.exit(1)

    return b, c, h
Esempio n. 2
0
def create_categorical_mapping(df):
    try:
        cat_columns = df.select_dtypes(['object']).columns
        _CATEGORICAL_TYPES = {}
        for col in cat_columns:
            _CATEGORICAL_TYPES[col] = pd.api.types.CategoricalDtype(
                categories=df[col].unique())

        cat_data = {}
        for i in cat_columns:
            opt = list(_CATEGORICAL_TYPES[i].categories)
            cat_data[i] = dict(zip(opt, list(range(0, len(opt)))))

        with open("categorical_data.json", 'w') as cd:
            json.dump(cat_data, cd)

        logging.debug("Converted Categorical Data")
        return df, cat_data

    except Exception as e:
        logging.errorr(e)
    #           'BODY':'hit rate limit at {}, retry in {} seconds.'.format(
    #               curtime.strftime("%Y-%m-%d %H:%M:%S"), backoff_counter)}

    #  thisHost = socket.gethostname()
    #  thisIP = socket.gethostbyname(thisHost)


    #  Email['ALL'] = 'From: %s\nTo: %s\nSubject: %s\n\n%s' % (Email['FROM'], Email['TO'],
    #          Email['SUBJECT'], Email['BODY'])
    #  server = smtplib.SMTP_SSL(email['server'])
    #  #server.starttls()
    #  #server.connect(email['server'], 465)
    #  #server.ehlo()
    #  server.login(email['user'], email['password'])
    #  server.sendmail(Email['FROM'], Email['TO'], Email['ALL'])
    #  server.quit()

    #  #time.sleep(backoff_counter)

    #  #auth = tweepy.OAuthHandler(config['consumer_key'], config['consumer_secret'])
    #  #auth.set_access_token(config['access_token'], config['access_token_secret'])
    #  #api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)

    #  #stream_listener = StreamListener()
    #  #stream = tweepy.Stream(auth=api.auth, listener=stream_listener)

    #  logging.info("Reconnect")
    #  break
  logging.errorr('error at {}.'.format(curtime.strftime("%Y-%m-%d %H:%M:%S")))

Esempio n. 4
0
	def get(self, throttle=False, new_dt_exchange=None):
		"""
		Parameters
		----------
		throttle : bool, optional
			option to modify nTS_exchange based on the precipitation intensity new_dt_exchange: option to manually change the dt_exchange for the current local simulation period. The default is False.
		new_dt_exchange : int, optional
			Explicitly input a new length of the local simulation time periodd. The default is None.
		"""
		strt_time = time.time()
		start = datetime(self.times.stmp["local_start"].year, self.times.stmp["local_start"].month, self.times.stmp["local_start"].day,\
		self.times.stmp["local_start"].hour)
		end = datetime(self.times.stmp["local_end"].year, self.times.stmp["local_end"].month, self.times.stmp["local_end"].day,\
		self.times.stmp["local_end"].hour)
		p_mask = (self.pDF.index >= start) & (self.pDF.index <= end)
		pDF_sel = self.pDF.loc[p_mask]
		p = pDF_sel[pDF_sel.columns[0]].to_numpy()[0]
		if throttle:
			if not new_dt_exchange:
				new_dt_exchange = self.times.dt["exchange"]
			if p.max() == 0:
				self.times.nTS["exchange"] = 1
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			elif p.max() <= 0.1:
				self.times.nTS["exchange"] = 2
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			elif p.max() <= 0.2:
				self.times.nTS["exchange"] = 4
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			elif p.max() <= 0.3:
				self.times.nTS["exchange"] = 6
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			elif p.max() <= 0.5:
				self.times.nTS["exchange"] = 8
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			elif p.max() <= 1:
				self.times.nTS["exchange"] = 10
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			else:
				self.times.nTS["exchange"] = 15
				old_dt_exch = self.times.dt["exchange"]
				self.times.dt["exchange"] = new_dt_exchange
			self.times.dt["FortRun"] = self.times.dt["exchange"]/self.times.nTS["exchange"]
			self.times.update(update_by=self.times.dt["exchange"]-old_dt_exch, internal=True)
			logging.warning("PET: !Throttling! nTS: {0}. ts_exch: {1}s".format(self.times.nTS["exchange"], self.times.dt["exchange"]))
		p = np.repeat(p, (self.times.nTS["exchange"])+1)
		p = p/3600
		logging.info("PET: max P for this ts is {} mm/s".format(p.max()))
		et_mask = (self.etDF.index >= start) & (self.etDF.index <= end)
		etDF_sel = self.etDF.loc[et_mask]
		et = etDF_sel[etDF_sel.columns[0]].to_numpy()[0]
		et = np.repeat(et, (self.times.nTS["exchange"]/et.size)+1)
		et = et/3600
		logging.info("PET: max ET for this ts is {} mm/s".format(et.max()))
		if pDF_sel.index.equals(etDF_sel.index):
			petDF_sel = pDF_sel
			petDF_sel[etDF_sel.columns[0]] = etDF_sel[etDF_sel.columns[0]]
		else:
			logging.errorr("WARNING: P and ET indexes do not match")
			sys.exit("WARNING: P and ET indexes do not match")
		self.Fort.Ini.p = p
		self.Fort.Ini.et = et
		self.Fort.Ini.ts = p.size
		self.Fort.Ini.ts_size = self.times.dt["FortRun"]
		self.p = p
		self.et = et
		self.petDF_sel = petDF_sel
		self.times.run["PET.get"].append(time.time() - strt_time)
		logging.info("PET: Fetched and Passed P and ET values to FORT")