def import_file(self, sess): found_new = False try: contract = Contract.get_dc_by_id(sess, self.contract_id) properties = contract.make_properties() enabled = properties["enabled"] if enabled: protocol = properties["protocol"] self.log("Protocol is " + protocol) if protocol == 'ftp': found_new = self.ftp_handler(sess, properties, contract) elif protocol == 'sftp': found_new = self.sftp_handler(sess, properties, contract) elif protocol == 'https': found_new = self.https_handler(sess, properties, contract) else: self.log("Protocol '" + protocol + "' not recognized.") else: self.log("Importer is disabled. To enable it, set " + "the 'enabled' property to 'True'.") self.is_error = False except BadRequest as e: self.log("Problem " + str(e)) sess.rollback() self.is_error = True except Exception: self.log("Unknown Exception " + traceback.format_exc()) sess.rollback() self.is_error = True return found_new
def import_file(self, sess): found_new = False self.wait_seconds = 30 * 60 try: contract = Contract.get_dc_by_id(sess, self.contract_id) properties = contract.make_properties() enabled = properties["enabled"] if enabled: protocol = properties["protocol"] self.log("Protocol is " + protocol) if protocol == "ftp": found_new = self.ftp_handler(sess, properties, contract) elif protocol == "sftp": found_new = self.sftp_handler(sess, properties, contract) elif protocol == "https": found_new = https_handler(sess, self.log, properties, contract) else: self.log(f"Protocol '{protocol}' not recognized.") self.wait_seconds = properties.get("check_minutes", 30) * 60 else: self.log( "Importer is disabled. To enable it, set the 'enabled' property " "to 'True'.") self.is_error = False except BadRequest as e: self.log(f"Problem {e}") sess.rollback() self.is_error = True except Exception: self.log(f"Unknown Exception {traceback.format_exc()}") sess.rollback() self.is_error = True return found_new
def run(self): sess = None try: sess = Session() contract = Contract.get_dc_by_id(sess, self.dc_contract_id) sess.rollback() properties = contract.make_properties() mpan_map = properties.get("mpan_map", {}) parser_module = importlib.import_module( "chellow.hh_parser_" + self.conv_ext[0][1:].replace(".", "_")) self.converter = parser_module.create_parser( self.istream, mpan_map) sess.rollback() HhDatum.insert(sess, self.converter, contract) sess.commit() except BadRequest as e: self.messages.append(e.description) except BaseException: self.messages.append("Outer problem " + traceback.format_exc()) finally: if sess is not None: sess.close()
def sftp_handler(self, sess, properties, contract): host_name = properties["hostname"] user_name = properties["username"] password = properties["password"] try: port = properties["port"] except KeyError: port = None file_type = properties["file_type"] directories = properties["directories"] state = contract.make_state() try: last_import_keys = state["last_import_keys"] except KeyError: last_import_keys = state["last_import_keys"] = {} sess.rollback() self.log("Connecting to sftp server at " + host_name + ":" + str(port) + ".") cnopts = pysftp.CnOpts() cnopts.hostkeys = None ftp = pysftp.Connection(host_name, username=user_name, password=password, cnopts=cnopts) ftp.timeout = 120 home_path = ftp.pwd f = None for directory in directories: self.log("Checking the directory '" + directory + "'.") try: last_import_key = last_import_keys[directory] except KeyError: last_import_key = last_import_keys[directory] = "" dir_path = home_path + "/" + directory ftp.cwd(dir_path) files = [] for attr in ftp.listdir_attr(): fpath = dir_path + "/" + attr.filename try: ftp.cwd(fpath) continue # directory except paramiko.SFTPError: pass key = str(attr.st_mtime) + "_" + attr.filename if key > last_import_key: files.append((key, fpath)) if len(files) > 0: f = sorted(files)[0] last_import_keys[directory] = f[0] break if f is None: self.log("No new files found.") ftp.close() self.log("Logged out.") return False else: key, fpath = f self.log("Attempting to download " + fpath + " with key " + key + ".") f = tempfile.TemporaryFile() ftp.getfo(fpath, f) self.log("File downloaded successfully.") ftp.close() self.log("Logged out.") f.seek(0, os.SEEK_END) fsize = f.tell() f.seek(0) self.log("File size is " + str(fsize) + " bytes.") self.log("Treating files as type " + file_type) self.importer = HhDataImportProcess(self.contract_id, 0, TextIOWrapper(f, "utf8"), fpath + file_type, fsize) self.importer.run() messages = self.importer.messages self.importer = None for message in messages: self.log(message) if len(messages) > 0: raise BadRequest("Problem loading file.") contract = Contract.get_dc_by_id(sess, self.contract_id) contract.update_state(state) sess.commit() self.log("Finished loading '" + fpath) return True
def ftp_handler(self, sess, properties, contract): host_name = properties["hostname"] user_name = properties["username"] password = properties["password"] try: port = properties["port"] except KeyError: port = None file_type = properties["file_type"] directories = properties["directories"] state = contract.make_state() try: last_import_keys = state["last_import_keys"] except KeyError: last_import_keys = {} state["last_import_keys"] = last_import_keys sess.rollback() self.log("Connecting to ftp server at " + host_name + ":" + str(port) + ".") ftp = ftplib.FTP() if port is None: ftp.connect(host=host_name, timeout=120) else: ftp.connect(host=host_name, port=port, timeout=120) ftp.login(user_name, password) home_path = ftp.pwd() file = None for directory in directories: self.log("Checking the directory '" + directory + "'.") try: last_import_key = last_import_keys[directory] except KeyError: last_import_key = last_import_keys[directory] = "" dir_path = home_path + "/" + directory ftp.cwd(dir_path) files = [] for fname in ftp.nlst(): fpath = dir_path + "/" + fname try: ftp.cwd(fpath) continue # directory except ftplib.error_perm: pass key = ftp.sendcmd("MDTM " + fpath).split()[1] + "_" + fname if key > last_import_key: files.append((key, fpath)) if len(files) > 0: file = sorted(files)[0] last_import_keys[directory] = file[0] break if file is None: self.log("No new files found.") ftp.quit() self.log("Logged out.") return False else: key, fpath = file self.log("Attempting to download " + fpath + " with key " + key + ".") f = tempfile.TemporaryFile() ftp.retrbinary("RETR " + fpath, f.write) self.log("File downloaded successfully.") ftp.quit() self.log("Logged out.") f.seek(0, os.SEEK_END) fsize = f.tell() f.seek(0) self.log("File size is " + str(fsize) + " bytes.") self.log("Treating files as type " + file_type) self.importer = HhDataImportProcess(self.contract_id, 0, TextIOWrapper(f, "utf8"), fpath + file_type, fsize) self.importer.run() for message in self.importer.messages: self.log(message) if len(self.importer.messages) > 0: raise BadRequest("Problem loading file.") contract = Contract.get_dc_by_id(sess, self.contract_id) contract.update_state(state) sess.commit() self.log("Finished loading '" + fpath) return True
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = supply_source = None try: sess = Session() contract = Contract.get_dc_by_id(sess, contract_id) month_list = list( c_months_u(finish_year=end_year, finish_month=end_month, months=months)) start_date, finish_date = month_list[0][0], month_list[-1][-1] forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( "dc_virtual_bills.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") bill_titles = chellow.computer.contract_func(caches, contract, "virtual_bill_titles")() header_titles = [ "Import MPAN Core", "Export MPAN Core", "Start Date", "Finish Date", ] vb_func = chellow.computer.contract_func(caches, contract, "virtual_bill") writer.writerow(header_titles + bill_titles) for era in (sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.dc_contract == contract, ).options(joinedload(Era.channels)).order_by(Era.supply_id)): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = "" is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = "" if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish), ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(csv_make_val(bill.get(title))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(csv_make_val(bill[k])) writer.writerow(vals) # Avoid long-running transactions sess.rollback() except BadRequest as e: msg = "Problem " if supply_source is not None: msg += ("with supply " + supply_source.mpan_core + " starting at " + hh_format(supply_source.start_date) + " ") msg += str(e) writer.writerow([msg]) except BaseException: msg = "Problem " + traceback.format_exc() + "\n" f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def check_permissions(*args, **kwargs): g.user = None config_contract = Contract.get_non_core_by_name( g.sess, "configuration") props = config_contract.make_properties() ad_props = props.get("ad_authentication", {}) ad_auth_on = ad_props.get("on", False) if ad_auth_on: username = request.headers["X-Isrw-Proxy-Logon-User"].upper() user = g.sess.query(User).filter( User.email_address == username).first() if user is None: try: username = ad_props["default_user"] user = (g.sess.query(User).filter( User.email_address == username).first()) except KeyError: user = None if user is not None: g.user = user else: auth = request.authorization if auth is None: try: ips = props["ips"] if request.remote_addr in ips: key = request.remote_addr elif "*.*.*.*" in ips: key = "*.*.*.*" else: key = None email = ips[key] g.user = (g.sess.query(User).filter( User.email_address == email).first()) except KeyError: pass else: user = (g.sess.query(User).filter( User.email_address == auth.username).first()) if user is not None and user.password_matches(auth.password): g.user = user # Got our user path = request.path method = request.method if path in ( "/health", "/nationalgrid/sf_bsuos.xls", "/nationalgrid/cv.csv", "/elexonportal/file/download/BESTVIEWPRICES_FILE", "/ecoes", "/elexonportal/file/download/TLM_FILE", "/elexonportal/file/download/RCRC_FILE", "/ecoes/NonDomesticCustomer/ExportPortfolioMPANs", "/hh_api", ): return if g.user is not None: if "X-Isrw-Proxy-Logon-User" in request.headers: g.user.proxy_username = request.headers[ "X-Isrw-Proxy-Logon-User"].upper() role = g.user.user_role role_code = role.code if (role_code == "viewer" and (method in ("GET", "HEAD") or path in ("/reports/169", "/reports/187", "/reports/247", "/reports/111")) and path not in ("/system", )): return elif role_code == "editor": return elif role_code == "party-viewer": if method in ("GET", "HEAD"): party = g.user.party market_role_code = party.market_role.code if market_role_code in ("C", "D"): dc_contract_id = request.args["dc_contract_id"] dc_contract = Contract.get_dc_by_id( g.sess, dc_contract_id) if dc_contract.party == party and request.full_path.startswith( "/channel_snags?"): return elif market_role_code == "X": if path.startswith("/supplier_contracts/" + party.id): return if g.user is None and g.sess.query(User).count() == 0: g.sess.rollback() user_role = g.sess.query(UserRole).filter( UserRole.code == "editor").one() User.insert(g.sess, "*****@*****.**", "admin", user_role, None) g.sess.commit() return if g.user is None or (not ad_auth_on and auth is None): return Response( "Could not verify your access level for that URL.\n" "You have to login with proper credentials", 401, {"WWW-Authenticate": 'Basic realm="Chellow"'}, ) config = Contract.get_non_core_by_name(g.sess, "configuration") return make_response( render_template("403.html", properties=config.make_properties()), 403)
def content(contract_id, days_hidden, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "channel_snags.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Hidden Days", "Chellow Id", "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Snag Description", "Import Related?", "Channel Type", "Start Date", "Finish Date", "Is Ignored?", "Days Since Snag Finished", "Duration Of Snag (Days)", ) writer.writerow(titles) contract = Contract.get_dc_by_id(sess, contract_id) now = utc_datetime_now() cutoff_date = now - relativedelta(days=days_hidden) for snag, channel, era, supply, site_era, site in (sess.query( Snag, Channel, Era, Supply, SiteEra, Site).join(Channel, Snag.channel_id == Channel.id).join( Era, Channel.era_id == Era.id).join( Supply, Era.supply_id == Supply.id).join( SiteEra, Era.site_eras).join( Site, SiteEra.site_id == Site.id).filter( SiteEra.is_physical == true(), Era.dc_contract == contract, Snag.start_date < cutoff_date, ).order_by( Site.code, Supply.id, Channel.imp_related, Channel.channel_type, Snag.description, Snag.start_date, Snag.id, )): snag_start = snag.start_date snag_finish = snag.finish_date imp_mc = "" if era.imp_mpan_core is None else era.imp_mpan_core exp_mc = "" if era.exp_mpan_core is None else era.exp_mpan_core if snag_finish is None: duration = now - snag_start age_of_snag = None else: duration = snag_finish - snag_start if hh_before(cutoff_date, snag_finish): age_of_snag = None else: delta = now - snag_finish age_of_snag = delta.days vals = { "Hidden Days": days_hidden, "Chellow Id": snag.id, "Imp MPAN Core": imp_mc, "Exp MPAN Core": exp_mc, "Site Code": site.code, "Site Name": site.name, "Snag Description": snag.description, "Import Related?": channel.imp_related, "Channel Type": channel.channel_type, "Start Date": snag_start, "Finish Date": snag_finish, "Is Ignored?": snag.is_ignored, "Days Since Snag Finished": age_of_snag, "Duration Of Snag (Days)": duration.days, } writer.writerow(csv_make_val(vals[t]) for t in titles) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = supply_source = None try: sess = Session() contract = Contract.get_dc_by_id(sess, contract_id) finish_date = utc_datetime(end_year, end_month, 1) + MONTH - HH start_date = utc_datetime(end_year, end_month, 1) - relativedelta(months=months - 1) forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( 'dc_virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') bill_titles = chellow.computer.contract_func(caches, contract, 'virtual_bill_titles')() header_titles = [ 'Import MPAN Core', 'Export MPAN Core', 'Start Date', 'Finish Date' ] vb_func = chellow.computer.contract_func(caches, contract, 'virtual_bill') writer.writerow(header_titles + bill_titles) for era in sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.dc_contract == contract).options(joinedload( Era.channels)).order_by(Era.supply_id): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = '' is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = '' if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish) ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(csv_make_val(bill.get(title))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(csv_make_val(bill[k])) writer.writerow(vals) # Avoid long-running transactions sess.rollback() except BadRequest as e: msg = 'Problem ' if supply_source is not None: msg += "with supply " + supply_source.mpan_core + \ " starting at " + hh_format(supply_source.start_date) + " " msg += str(e) writer.writerow([msg]) except BaseException: msg = "Problem " + traceback.format_exc() + '\n' f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(contract_id, days_hidden, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'channel_snags.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ('Hidden Days', 'Chellow Id', 'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name', 'Snag Description', 'Import Related?', 'Channel Type', 'Start Date', 'Finish Date', 'Days Since Snag Finished', 'Duration Of Snag (Days)', 'Is Ignored?')) contract = Contract.get_dc_by_id(sess, contract_id) now = Datetime.now(pytz.utc) cutoff_date = now - relativedelta(days=days_hidden) for snag, channel, era, supply, site_era, site in sess.query( Snag, Channel, Era, Supply, SiteEra, Site).join(Channel, Snag.channel_id == Channel.id).join( Era, Channel.era_id == Era.id).join( Supply, Era.supply_id == Supply.id).join( SiteEra, Era.site_eras).join( Site, SiteEra.site_id == Site.id).filter( SiteEra.is_physical == true(), Era.dc_contract == contract, Snag.start_date < cutoff_date).order_by( Site.code, Supply.id, Channel.imp_related, Channel.channel_type, Snag.description, Snag.start_date, Snag.id): snag_start = snag.start_date snag_finish = snag.finish_date if snag_finish is None: snag_finish_str = '' duration = now - snag_start age_of_snag = datetime.timedelta(0) else: snag_finish_str = snag_finish.strftime("%Y-%m-%d %H:%M") duration = snag_finish - snag_start age_of_snag = now - snag_finish writer.writerow( (str(days_hidden), str(snag.id), '' if era.imp_mpan_core is None else era.imp_mpan_core, '' if era.exp_mpan_core is None else era.exp_mpan_core, site.code, site.name, snag.description, str(channel.imp_related), channel.channel_type, snag_start.strftime("%Y-%m-%d %H:%M"), snag_finish_str, str(age_of_snag.days + age_of_snag.seconds / (3600 * 24)), str(duration.days + duration.seconds / (3600 * 24)), str(snag.is_ignored))) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)