def do_load_programme_data(progid): ''' Load programme data. This populates the canonical model from the exposure files. ''' try: row = flamingo_db_utils.get_transform_file_names_prog(progid) ts = values.get_timestamp() dest_file_path = str(FILES_DIRECTORY) + "/Exposures/" canonicallocfilename = "CanLocProg" + str(progid) + "_" + str( ts) + ".csv" transform_source_to_canonical(progid, row[0], row[1], row[2], dest_file_path, canonicallocfilename) ts = values.get_timestamp() canonicalaccfilename = "CanAccProg" + str(progid) + "_" + str( ts) + ".csv" transform_source_to_canonical(progid, row[3], row[4], row[5], dest_file_path, canonicalaccfilename) flamingo_db_utils.generate_output_transform_file_records_for_prog( progid, canonicallocfilename, canonicalaccfilename) rows = flamingo_db_utils.get_profile_details(progid) schema_filepath = FILES_DIRECTORY + "/Exposures/Schema.ini" with open(schema_filepath, "w") as schema_file: for line in rows: line = re.sub(",", "\t", str(line)) logging.getLogger().debug(re.sub("[()']", "", str(line))) schema_file.write(re.sub("[()']", "", str(line)) + "\n") flamingo_db_utils.generate_canonical_model(progid) except: logging.getLogger().exception("Error in do_load_programme_data") flamingo_db_utils.update_prog_status(progid, "Failed")
def test_fmt_is_supplied___timestamp_of_supplied_time_is_returned( self, dt): fmt = 'foo %Y-%m-%d %H:%M:%S bar' res = get_timestamp(dt, fmt=fmt) self.assertEqual(dt.strftime(fmt), res)
def test_time_is_not_supplied___timestamp_of_now_is_returned(self): _now = datetime.now() with freeze_time(_now): res = get_timestamp() self.assertEqual(_now.strftime('%Y%m%d%H%M%S'), res)
def generate_summary_files(processrunid): flamingo_db_utils.generate_oasis_files_outputs(processrunid) prog_oasis_location = \ flamingo_db_utils.get_prog_oasis_location(processrunid) ts = values.get_timestamp() process_dir = "ProcessRun_" + str(processrunid) + "_" + ts input_location = str(prog_oasis_location) + "/" + str(process_dir) if not os.path.isdir(input_location): os.mkdir(input_location) for i in ("items", "coverages", "fm_programme", "fm_policytc", "fm_xref", "fm_profile"): source_file = "{}/{}.csv".format(prog_oasis_location, i) target_file = "{}/{}.csv".format(input_location, i) if not IS_WINDOWS_HOST: os.symlink(source_file, target_file) else: shutil.copy(source_file, target_file) db.bcp("OasisGULSUMMARYXREF", input_location + "/gulsummaryxref_temp.csv") db.bcp("OasisFMSUMMARYXREF", input_location + "/fmsummaryxref_temp.csv") gulsummaryxref = input_location + "/gulsummaryxref.csv" destination = open(gulsummaryxref, 'wb') destination.write("coverage_id,summary_id,summaryset_id\n") shutil.copyfileobj(open(input_location + "/gulsummaryxref_temp.csv", 'rb'), destination) destination.close() os.remove(input_location + "/gulsummaryxref_temp.csv") fmsummaryxref = input_location + "/fmsummaryxref.csv" destination = open(fmsummaryxref, 'wb') destination.write("output_id,summary_id,summaryset_id\n") shutil.copyfileobj(open(input_location + "/fmsummaryxref_temp.csv", 'rb'), destination) destination.close() os.remove(input_location + "/fmsummaryxref_temp.csv") process_run_locationid = flamingo_db_utils.get_process_run_locationid( prog_oasis_location, process_dir, processrunid) flamingo_db_utils.generate_oasis_files_records_outputs( processrunid, process_run_locationid) return input_location
def do_load_programme_model(progoasisid): ''' Load programme model. ''' try: row = flamingo_db_utils.get_transform_file_names_progoasis(progoasisid) extension = flamingo_db_utils.get_model_file_extension(progoasisid) ts = values.get_timestamp() dest_file_path = str(FILES_DIRECTORY) + "/APIInput/" destinationfile = "ModelLocProgOasis" + str(progoasisid) + "_" + str( ts) + "." + str(extension[0]) transform_canonical_to_model(progoasisid, row[0], row[1], row[2], dest_file_path, destinationfile) flamingo_db_utils.generate_output_transform_file_records_for_progoasis( progoasisid, destinationfile) do_call_keys_service(progoasisid) do_generate_oasis_files(progoasisid) except: logging.getLogger().exception("Error in do_load_programme_model") flamingo_db_utils.update_progoasis_status(progoasisid, "Failed")
def process_keys_response(progoasisid, modelid, apiJSON, sessionid): all_location_count = 0 success_location_count = 0 nomatch_location_count = 0 fail_location_count = 0 ts = values.get_timestamp() mapped_exposure_file = FILES_DIRECTORY + "/APIOutput/ExposureKeys_" + str( ts) + ".csv" logging.getLogger().info( "Writing mapped exposure to {}".format(mapped_exposure_file)) error_file = FILES_DIRECTORY + "/APIOutput/ExposureKeysError_" + str( ts) + ".csv" logging.getLogger().info( "Writing non-mapped and failed exposure to {}".format(error_file)) error_file = FILES_DIRECTORY + "/APIOutput/ExposureKeysError_" + str( ts) + ".csv" with open(mapped_exposure_file, "w") as out_file, open(error_file, "w") as error_file: out_writer = csv.writer(out_file) error_writer = csv.writer(error_file) out_writer.writerow([ "LocID", "PerilID", "CoverageID", "AreaPerilID", "VulnerabilityID" ]) error_writer.writerow(["LocID", "PerilID", "CoverageID", "Message"]) for location in apiJSON: all_location_count = all_location_count + 1 if location['status'] == 'success': success_location_count = success_location_count + 1 out_writer.writerow([ location['id'], location['peril_id'], location["coverage"], location['area_peril_id'], location['vulnerability_id'] ]) elif location['status'] == 'nomatch': nomatch_location_count = nomatch_location_count + 1 error_writer.writerow([ location['id'], location['peril_id'], location["coverage"], location['message'] ]) elif location['status'] == 'fail': fail_location_count = fail_location_count + 1 error_writer.writerow([ location['id'], location['peril_id'], location["coverage"], location['message'] ]) logging.getLogger().info('{:,} locations'.format(all_location_count)) logging.getLogger().info('{0:.2f}% success'.format( 100.0 * success_location_count / all_location_count)) logging.getLogger().info('{0:.2f}% fail'.format( 100.0 * fail_location_count / all_location_count)) logging.getLogger().info('{0:.2f}% no match'.format( 100.0 * nomatch_location_count / all_location_count)) flamingo_db_utils.get_api_return_data(progoasisid, "ExposureKeys_" + str(ts) + ".csv", sessionid) flamingo_db_utils.create_api_error_file_record( "ExposureKeysError_" + str(ts) + ".csv", progoasisid)
def generate_summary_files(processrunid): flamingo_db_utils.generate_oasis_files_outputs(processrunid) prog_oasis_location = \ flamingo_db_utils.get_prog_oasis_location(processrunid) ts = values.get_timestamp() process_dir = "ProcessRun_" + str(processrunid) + "_" + ts input_location = str(prog_oasis_location) + "/" + str(process_dir) if not os.path.isdir(input_location): os.mkdir(input_location) for i in ("items", "coverages", "fm_programme", "fm_policytc", "fm_xref", "fm_profile"): source_file = "{}/{}.csv".format(prog_oasis_location, i) target_file = "{}/{}.csv".format(input_location, i) if not IS_WINDOWS_HOST: os.symlink(source_file, target_file) else: shutil.copy(source_file, target_file) input_dir_list = os.listdir(prog_oasis_location) ri_dir_list = [] for dirs in input_dir_list: if dirs.startswith('RI_'): ri_dir_list.append(dirs) for dirs in ri_dir_list: ri_full_path = prog_oasis_location + '/' + dirs ri_target_path = input_location + '/' + dirs if not IS_WINDOWS_HOST: os.symlink(ri_full_path, ri_target_path) else: shutil.copytree(ri_full_path, ri_target_path) db.bcp("OasisGULSUMMARYXREF", input_location + "/gulsummaryxref_temp.csv") db.bcp("OasisFMSUMMARYXREF", input_location + "/fmsummaryxref_temp.csv") db.bcp("OasisRISUMMARYXREF", input_location + "/risummaryxref_temp.csv") gulsummaryxref = input_location + "/gulsummaryxref.csv" destination = open(gulsummaryxref, 'wb') destination.write("coverage_id,summary_id,summaryset_id\n") shutil.copyfileobj(open(input_location + "/gulsummaryxref_temp.csv", 'rb'), destination) destination.close() os.remove(input_location + "/gulsummaryxref_temp.csv") fmsummaryxref = input_location + "/fmsummaryxref.csv" destination = open(fmsummaryxref, 'wb') destination.write("output_id,summary_id,summaryset_id\n") shutil.copyfileobj(open(input_location + "/fmsummaryxref_temp.csv", 'rb'), destination) destination.close() os.remove(input_location + "/fmsummaryxref_temp.csv") risummaryxref = input_location + "/risummaryxref.csv" destination = open(risummaryxref, 'wb') destination.write("output_id,summary_id,summaryset_id\n") shutil.copyfileobj(open(input_location + "/risummaryxref_temp.csv", 'rb'), destination) destination.close() os.remove(input_location + "/risummaryxref_temp.csv") dirs = os.listdir(input_location) ri_dirs = [] for dir in dirs: if dir.startswith('RI_'): ri_dirs.append(dir) for dir in ri_dirs: full_dir = os.path.join(input_location, dir) shutil.copy(os.path.join(input_location, "risummaryxref.csv"), os.path.join(full_dir, "fmsummaryxref.csv")) process_run_locationid = flamingo_db_utils.get_process_run_locationid( prog_oasis_location, process_dir, processrunid) flamingo_db_utils.generate_oasis_files_records_outputs( processrunid, process_run_locationid) return input_location
def test_time_is_supplied___timestamp_of_supplied_time_is_returned( self, dt): res = get_timestamp(dt) self.assertEqual(dt.strftime('%Y%m%d%H%M%S'), res)