def main(arguments: argparse.Namespace): # Set Logging Level logging.Logger.setLevel(system_helpers.logger, arguments.logLevel) # DoltPy's Log Level logger.setLevel(arguments.logLevel) # This Script's Log Level rover: Rover = Rover(threadID=1, name="Rover", requested_wait_time=arguments.wait * 60, reply=arguments.reply, threadLock=threadLock) archiver: Archiver = Archiver(threadID=2, name="Archiver", requested_wait_time=arguments.wait * 60, commit=arguments.commit, threadLock=threadLock) server: WebServer = WebServer( threadID=3, name="Analysis Server" ) # https://www.tutorialspoint.com/python3/python_multithreading.htm # Start Archiver if arguments.archive: archiver.start() # Start Rover if arguments.rover: rover.start() # Start Webserver if arguments.server: server.start()
def _archive_and_delete_notes(self, notepaths: Union[NotePath, List[NotePath]], cursor ) -> None: if isinstance(notepaths, NotePath): notepaths = [notepaths] # Archive the notes before deleting them from the database. note_ids = self._get_note_ids(notepaths, cursor) archiver = Archiver(self._archive_path) old_notes = self._get_notes_by_id(note_ids, cursor) if old_notes: archiver.append_notes(old_notes) self._delete_notes_by_path(notepaths, cursor)
def getArchiverData(startTime, numPoints, signals, timeInt=ARCHIVER_TIME_INTERVAL): # type: (datetime, int, List[str], int) -> Optional[ArchiverData] archiver = Archiver("lcls") try: data = archiver.getDataWithTimeInterval( pvList=signals, startTime=startTime, endTime=(startTime + timedelta(seconds=(numPoints * timeInt))), timeDelta=timedelta(seconds=timeInt)) return data except ConnectTimeout: writeAndFlushStdErr("Archiver timed out - are you VPNed in?") return None
def __init__(self, config_file_or_database, db_engine=None, user=None, pw=None, host=None, port=5432): if not db_engine: config = read_config_file(config_file_or_database) db_engine = config['db_engine'] if 'db_engine' in config else db_engine else: config = { 'database': config_file_or_database, 'user': user, 'password': pw, 'host': host, 'port': port, } self.archiver = Archiver(db_engine, config) self.rpa = False self.dry_run = False self.generator = None
def destalinate_job(): print("Destalinating") if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ: print "ERR: Missing at least one Slack environment variable." else: warner = Warner() archiver = Archiver() announcer = Announcer() flagger = Flagger() print("Warning") warner.warn() print("Archiving") archiver.archive() print("Announcing") announcer.announce() print("Flagging") flagger.flag() print("OK: destalinated") print("END: destalinate_job")
def parse_xml(xml_file, output_format, connection, config): output_format = output_format.lower() if not os.path.exists(xml_file): sys.exit('Could not find input file: ' + xml_file) BUFFER_SIZE = 65536 archiver = Archiver(connection, config) if output_format in SUPPORTED_OUTPUT_FORMATS: handler = SUPPORTED_OUTPUT_FORMATS[output_format](archiver) else: raise Exception("Unsupported report format '{}'".format(output_format)) parser = xml.sax.make_parser() parser.setContentHandler(handler) with open(xml_file) as file: buffer = file.read(BUFFER_SIZE) while buffer: parser.feed(buffer) buffer = file.read(BUFFER_SIZE) if len(archiver.stack) != 1: raise Exception('File parse error. Please check you used proper output format (default: robotframework).') else: archiver.end_test_run()
def __init__(self, user_nickname='alex', **kwargs): config = configparser.ConfigParser() try: self.dev_mode = kwargs['dev'] except KeyError: self.dev_mode = False try: self.archive_mode = kwargs['archive'] except KeyError: self.archive_mode = True try: self.prevent_shutdown = kwargs['prevent_shutdown'] except KeyError: self.prevent_shutdown = False try: use_gui = kwargs['use_gui'] except KeyError: use_gui = False config.read('test_config.ini' if self.dev_mode else 'live_config.ini') self.user = User(user_nickname) self.webdriver = get_webdriver(self.user.id, use_gui) self.archiver = Archiver(self.webdriver) if self.archive_mode else None self.fb = FacebookHandler(self.webdriver) try: self.init_selenium() except Exception as err: print(f'Encountered exception in selenium init:{err.__repr__()}') self.webdriver.quit() raise err self.pending_auctions = get_unexpired_auctions(dev=self.dev_mode)
def __init__(self, config_file_or_database, db_engine=None, user=None, pw=None, host=None, port=5432): if not db_engine: config = read_config_file(config_file_or_database) else: config = { 'database': config_file_or_database, 'db_engine': db_engine, 'user': user, 'password': pw, 'host': host, 'port': port, } database = database_connection(config) self.archiver = Archiver(database, config) self.archiver.test_type = "Robot Framework" self.rpa = False self.dry_run = False self.generator = None
def main(): # Set GPU device rtx.set_device(args.gpu_device) # Initialize colors color_array = [] for n in range(args.num_colors): hue = n / (args.num_colors - 1) saturation = 0.9 lightness = 1 red, green, blue = colorsys.hsv_to_rgb(hue, saturation, lightness) color_array.append((red, green, blue, 1)) screen_width = args.image_size screen_height = args.image_size # Setting up a raytracer rt_args = rtx.RayTracingArguments() rt_args.num_rays_per_pixel = 512 rt_args.max_bounce = 2 rt_args.supersampling_enabled = args.anti_aliasing rt_args.ambient_light_intensity = 0.05 cuda_args = rtx.CUDAKernelLaunchArguments() cuda_args.num_threads = 64 cuda_args.num_rays_per_thread = 32 renderer = rtx.Renderer() render_buffer = np.zeros((screen_height, screen_width, 3), dtype=np.float32) archiver = Archiver( directory=args.output_directory, total_scenes=args.total_scenes, num_scenes_per_file=min(args.num_scenes_per_file, args.total_scenes), image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=args.initial_file_number) camera = rtx.OrthographicCamera() for _ in tqdm(range(args.total_scenes)): scene = build_scene(args.num_cubes, color_array) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) camera_distance = 1 for _ in range(args.num_observations_per_scene): # Generate random point on a sphere camera_position = np.random.normal(size=3) camera_position = camera_distance * camera_position / np.linalg.norm( camera_position) # Compute yaw and pitch yaw, pitch = compute_yaw_and_pitch(camera_position) center = (0, 0, 0) camera.look_at(tuple(camera_position), center, up=(0, 1, 0)) renderer.render(scene, camera, rt_args, cuda_args, render_buffer) # Convert to sRGB image = np.power(np.clip(render_buffer, 0, 1), 1.0 / 2.2) image = np.uint8(image * 255) image = cv2.bilateralFilter(image, 3, 25, 25) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) archiver.add(scene_data)
def main(): try: os.makedirs(args.output_directory) except: pass last_file_number = args.initial_file_number + args.total_scenes // args.num_scenes_per_file - 1 initial_file_number = args.initial_file_number if os.path.isdir(args.output_directory): files = os.listdir(args.output_directory) for name in files: number = int(name.replace(".h5", "")) if number > last_file_number: continue if number < args.initial_file_number: continue if number < initial_file_number: continue initial_file_number = number + 1 total_scenes_to_render = args.total_scenes - args.num_scenes_per_file * ( initial_file_number - args.initial_file_number) assert args.num_scenes_per_file <= total_scenes_to_render # Colors colors = [] for n in range(args.num_colors): hue = n / args.num_colors saturation = 1 lightness = 1 red, green, blue = colorsys.hsv_to_rgb(hue, saturation, lightness) colors.append(np.array((red, green, blue, 1))) renderer = OffscreenRenderer(viewport_width=args.image_size, viewport_height=args.image_size) archiver = Archiver( directory=args.output_directory, num_scenes_per_file=args.num_scenes_per_file, image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=initial_file_number) for scene_index in tqdm(range(total_scenes_to_render)): scene = build_scene(floor_textures, wall_textures, fix_light_position=args.fix_light_position) place_objects(scene, colors, objects, max_num_objects=args.max_num_objects, discrete_position=args.discrete_position, rotate_object=args.rotate_object) camera_distance = 4.5 camera = PerspectiveCamera(yfov=math.pi / 4) camera_node = Node(camera=camera, translation=np.array([0, 1, 1])) scene.add_node(camera_node) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for observation_index in range(args.num_observations_per_scene): rand_position_xz = np.random.normal(size=2) rand_position_xz = camera_distance * rand_position_xz / np.linalg.norm( rand_position_xz) # Compute yaw and pitch camera_direction = np.array( [rand_position_xz[0], 0, rand_position_xz[1]]) yaw, pitch = compute_yaw_and_pitch(camera_direction) camera_node.rotation = genearte_camera_quaternion(yaw, pitch) camera_position = np.array( [rand_position_xz[0], 1, rand_position_xz[1]]) camera_node.translation = camera_position # Rendering flags = RenderFlags.SHADOWS_DIRECTIONAL if args.anti_aliasing: flags |= RenderFlags.ANTI_ALIASING image = renderer.render(scene, flags=flags)[0] scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data) renderer.delete()
def parse(self): data_type = "contacts" xero_url = "https://go.xero.com/Contacts/View/" proc_dir = "processing/default" json_file_name = "{}/{}/{}.json".format(self.data_dir, proc_dir, data_type) csv_file_name = "{}/{}/{}.csv".format(self.data_dir, proc_dir, data_type) if not os.path.isfile(json_file_name): self.log.write( "ERROR {} file does not exist, did you forget to extract this?" .format(json_file_name)) return False with open(json_file_name, encoding='utf-8') as f: data = json.load(f) collection = 'Contacts' if collection not in data: self.log.write( "ERROR '{}' collection not found in JSON file".format( collection)) return # zero ts in json header #zero_created_datetime = clean_date(data['DateTimeUTC']) col_header = """ ContactID,AccountNumber,ContactStatus,Name,FirstName,LastName,EmailAddress,SkypeUserName,Segment1,Segment2,Segment3,BankAccountDetails,TaxNumber, Street_City,Street_Region,Street_PostalCode,Street_Country, POBOX_AddressLine1,POBOX_AddressLine2,POBOX_AddressLine3,POBOX_AddressLine4,POBOX_City,POBOX_Region,POBOX_PostalCode,POBOX_Country,POBOX_AttentionTo, DEFAULT_PhoneNumber,DEFAULT_PhoneAreaCode,DEFAULT_PhoneCountryCode, MOBILE_PhoneNumber,MOBILE_PhoneAreaCode,MOBILE_PhoneCountryCode, FAX_PhoneNumber,FAX_PhoneAreaCode,FAX_PhoneCountryCode, DDI_PhoneNumber,DDI_PhoneAreaCode,DDI_PhoneCountryCode, UpdatedDateUTC,IsSupplier,IsCustomer, ProcessingNotes,URL """ csv_file = open(csv_file_name, 'w', encoding='utf-8') csv_file.write(re.sub(r"[\n\t\s]", "", col_header) + "\n") i = 0 for contact in data[collection]: i = i + 1 contact_id = contact['ContactID'] account_number = contact[ 'AccountNumber'] if 'AccountNumber' in contact else '' contact_status = contact[ 'ContactStatus'] if 'ContactStatus' in contact else '' name = contact['Name'] if 'Name' in contact else '' first_name = contact['FirstName'] if 'FirstName' in contact else '' last_name = contact['LastName'] if 'LastName' in contact else '' email_address = contact[ 'EmailAddress'] if 'EmailAddress' in contact else '' # parse segments skype_user_name = contact[ 'SkypeUserName'] if 'SkypeUserName' in contact else '' parts = skype_user_name.split("|") if len(parts) == 3: segment_1 = parts[0].strip() segment_2 = parts[1].strip() segment_3 = parts[2].strip() processing_notes = "" else: segment_1 = "" segment_2 = "" segment_3 = "" processing_notes = "malformed [SkypeUserName] field" bank_account_details = contact[ 'BankAccountDetails'] if 'BankAccountDetails' in contact else '' tax_number = zero_to_empty( contact['TaxNumber']) if 'TaxNumber' in contact else '' # (ugly) initializer street_city, street_region, street_postalcode, street_country = "", "", "", "" pobox_addressline1, pobox_addressline2, pobox_address_line3, pobox_address_line4, pobox_city, pobox_region = "", "", "", "", "", "" pobox_postal_code, pobox_country, pobox_attention_to = "", "", "" if 'Addresses' in contact and contact['Addresses']: addresses = contact['Addresses'] for address in addresses: if address['AddressType'] == 'STREET': street_city = zero_to_empty( address['City']) if 'City' in address else '' street_region = zero_to_empty( address['Region']) if 'Region' in address else '' street_postalcode = zero_to_empty( address['PostalCode'] ) if 'PostalCode' in address else '' street_country = zero_to_empty( address['Country']) if 'Country' in address else '' elif address['AddressType'] == 'POBOX': pobox_addressline1 = zero_to_empty( address['AddressLine1'] ) if 'AddressLine1' in address else '' pobox_addressline2 = zero_to_empty( address['AddressLine2'] ) if 'AddressLine2' in address else '' pobox_address_line3 = zero_to_empty( address['AddressLine3'] ) if 'AddressLine3' in address else '' pobox_address_line4 = zero_to_empty( address['AddressLine4'] ) if 'AddressLine4' in address else '' pobox_city = zero_to_empty( address['City']) if 'City' in address else '' pobox_region = zero_to_empty( address['Region']) if 'Region' in address else '' pobox_postal_code = zero_to_empty( address['PostalCode'] ) if 'PostalCode' in address else '' pobox_country = zero_to_empty( address['Country']) if 'Country' in address else '' pobox_attention_to = zero_to_empty( address['AttentionTo'] ) if 'AttentionTo' in address else '' else: # TODO : other type of address (write note to log) pass # (ugly) initializer ddi_phone_number, ddi_phone_area_code, ddi_phone_country_code = "", "", "" default_phone_number, default_phone_area_code, default_phone_country_code = "", "", "" fax_phone_number, fax_phone_area_code, fax_phone_country_code = "", "", "" mobile_phone_number, mobile_phone_area_code, mobile_phone_country_code = "", "", "" if 'Phones' in contact and contact['Phones']: phones = contact['Phones'] for phone in phones: if phone['PhoneType'] == 'DDI': ddi_phone_number = zero_to_empty( phone['PhoneNumber'] ) if 'PhoneNumber' in phone else '' ddi_phone_area_code = zero_to_empty( phone['PhoneAreaCode'] ) if 'PhoneAreaCode' in phone else '' ddi_phone_country_code = zero_to_empty( phone['PhoneCountryCode'] ) if 'PhoneCountryCode' in phone else '' elif phone['PhoneType'] == 'DEFAULT': default_phone_number = zero_to_empty( phone['PhoneNumber'] ) if 'PhoneNumber' in phone else '' default_phone_area_code = zero_to_empty( phone['PhoneAreaCode'] ) if 'PhoneAreaCode' in phone else '' default_phone_country_code = zero_to_empty( phone['PhoneCountryCode'] ) if 'PhoneCountryCode' in phone else '' elif phone['PhoneType'] == 'FAX': fax_phone_number = zero_to_empty( phone['PhoneNumber'] ) if 'PhoneNumber' in phone else '' fax_phone_area_code = zero_to_empty( phone['PhoneAreaCode'] ) if 'PhoneAreaCode' in phone else '' fax_phone_country_code = zero_to_empty( phone['PhoneCountryCode'] ) if 'PhoneCountryCode' in phone else '' elif phone['PhoneType'] == 'MOBILE': mobile_phone_number = zero_to_empty( phone['PhoneNumber'] ) if 'PhoneNumber' in phone else '' mobile_phone_area_code = zero_to_empty( phone['PhoneAreaCode'] ) if 'PhoneAreaCode' in phone else '' mobile_phone_country_code = zero_to_empty( phone['PhoneCountryCode'] ) if 'PhoneCountryCode' in phone else '' else: # TODO : other type of phone (write note to log) pass updated_date_utc = clean_date( contact['UpdatedDateUTC'] ) if 'UpdatedDateUTC' in contact else '' is_supplier = contact[ 'IsSupplier'] if 'IsSupplier' in contact else '' is_customer = contact[ 'IsCustomer'] if 'IsCustomer' in contact else '' url = xero_url + contact_id columns = [ contact_id, account_number, contact_status, name, first_name, last_name, email_address, skype_user_name, segment_1, segment_2, segment_3, bank_account_details, tax_number, street_city, street_region, street_postalcode, street_country, pobox_addressline1, pobox_addressline2, pobox_address_line3, pobox_address_line4, pobox_city, pobox_region, pobox_postal_code, pobox_country, pobox_attention_to, default_phone_number, default_phone_area_code, default_phone_country_code, mobile_phone_number, mobile_phone_area_code, mobile_phone_country_code, fax_phone_number, fax_phone_area_code, fax_phone_country_code, ddi_phone_number, ddi_phone_area_code, ddi_phone_country_code, updated_date_utc, is_supplier, is_customer, processing_notes, url ] prep_columns = list( map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_file.write(line) csv_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format( data_type, csv_file_name, i)) formats = [ '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'long', '', '', '', '' ] ark = Archiver(self.log) ark.archive(data_type, json_file_name) ark.archive(data_type, csv_file_name) ark.copy(data_type, csv_file_name, 'master') ark.copy(data_type, csv_file_name, 'current', excelize=True, xlsx_formats=formats)
def main(): try: os.makedirs(args.output_directory) except: pass last_file_number = args.initial_file_number + args.total_scenes // args.num_scenes_per_file - 1 initial_file_number = args.initial_file_number if os.path.isdir(args.output_directory): files = os.listdir(args.output_directory) for name in files: number = int(name.replace(".h5", "")) if number > last_file_number: continue if number < args.initial_file_number: continue if number < initial_file_number: continue initial_file_number = number + 1 total_scenes_to_render = args.total_scenes - args.num_scenes_per_file * ( initial_file_number - args.initial_file_number) assert args.num_scenes_per_file <= total_scenes_to_render # Initialize colors color_candidates = [] for n in range(args.num_colors): hue = n / args.num_colors saturation = 1 lightness = 1 red, green, blue = colorsys.hsv_to_rgb(hue, saturation, lightness) color_candidates.append((red, green, blue)) scene, cube_nodes = build_scene(args.num_cubes, color_candidates) camera = OrthographicCamera(xmag=0.9, ymag=0.9) camera_node = Node(camera=camera) scene.add_node(camera_node) renderer = OffscreenRenderer( viewport_width=args.image_size, viewport_height=args.image_size) archiver = Archiver( directory=args.output_directory, num_scenes_per_file=args.num_scenes_per_file, image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=initial_file_number) for scene_index in tqdm(range(total_scenes_to_render)): camera_distance = 2 scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for observation_index in range(args.num_observations_per_scene): # Generate random point on a sphere camera_position = np.random.normal(size=3) camera_position = camera_distance * camera_position / np.linalg.norm( camera_position) # Compute yaw and pitch yaw, pitch = compute_yaw_and_pitch(camera_position) camera_node.rotation = genearte_camera_quaternion(yaw, pitch) camera_node.translation = camera_position # Rendering flags = RenderFlags.SHADOWS_DIRECTIONAL if args.anti_aliasing: flags |= RenderFlags.ANTI_ALIASING image = renderer.render(scene, flags=flags)[0] scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data) # Change cube color and position update_cube_color_and_position(cube_nodes, color_candidates) # Transfer changes to the vertex buffer on gpu udpate_vertex_buffer(cube_nodes) renderer.delete()
def merge_invoice_delta(self): # master + (daily) delta files header_master_file = "{}/master/invoice-headers.csv".format( self.data_dir) header_delta_file = "{}/processing/invoices/invoice-headers-delta.csv".format( self.data_dir) detail_master_file = "{}/master/invoice-line-items.csv".format( self.data_dir) detail_delta_file = "{}/processing/invoices/invoice-line-items-delta.csv".format( self.data_dir) # read in as df's df_header_master = pd.read_csv(header_master_file, index_col='InvoiceID') df_header_delta = pd.read_csv(header_delta_file, index_col='InvoiceID') df_detail_master = pd.read_csv( detail_master_file) #, index_col='LineItemID') df_detail_delta = pd.read_csv( detail_delta_file) #, index_col='LineItemID') hm_cnt = df_header_master.shape[0] hd_cnt = df_header_delta.shape[0] #print("{:,} rows in header master".format(hm_cnt)) #print("{:,} rows in header delta".format(hd_cnt)) dm_cnt = df_detail_master.shape[0] dd_cnt = df_detail_delta.shape[0] #print("{:,} rows in detail master".format(dm_cnt)) #print("{:,} rows in detail delta".format(dd_cnt)) h_del_cnt = 0 d_del_cnt = 0 # loop through invoice header delta for id, row in df_header_delta.iterrows(): # id record exists delete it (will be re-inserted next) if id in df_header_master.index.values: # delete header row h_del_cnt = h_del_cnt + 1 df_header_master.drop(id, inplace=True) # delete related detail rows d_del_cnt = d_del_cnt + df_detail_master[ df_detail_master['InvoiceID'] == id].shape[0] df_detail_master.drop(df_detail_master[ df_detail_master['InvoiceID'] == id].index, inplace=True) # concat master files (with deletes) + delta files = UPSERTED master files df_new_header_master = pd.concat([df_header_master, df_header_delta]) df_new_detail_master = pd.concat([df_detail_master, df_detail_delta]) new_header_master_file = "{}/processing/invoices/invoice-headers.csv".format( self.data_dir) new_detail_master_file = "{}/processing/invoices/invoice-line-items.csv".format( self.data_dir) if os.path.exists(new_header_master_file): os.remove(new_header_master_file) df_new_header_master.to_csv(new_header_master_file, header=True, index=True, quoting=csv.QUOTE_ALL) if os.path.exists(new_detail_master_file): os.remove(new_detail_master_file) df_new_detail_master.to_csv(new_detail_master_file, header=True, index=False, quoting=csv.QUOTE_ALL) self.log.write( "INFO [invoice-headers] {:,} invoice records inserted into header master" .format(df_new_header_master.shape[0] - hm_cnt)) self.log.write( "INFO [invoice-headers] {:,} invoice records updated in header master" .format(hd_cnt - (df_new_header_master.shape[0] - hm_cnt))) self.log.write( "INFO [invoice-headers] master file written to {}".format( new_header_master_file)) self.log.write( "INFO [invoice-details] {:,} invoice records inserted into detail master" .format(df_new_detail_master.shape[0] - dm_cnt)) self.log.write( "INFO [invoice-details] {:,} invoice records updated in detail master" .format(dd_cnt - (df_new_detail_master.shape[0] - dm_cnt))) self.log.write( "INFO [invoice-details] master file written to {}".format( new_detail_master_file)) ark = Archiver(self.log) ark.archive('invoice-headers', new_header_master_file) ark.archive('invoice-line-items', new_detail_master_file) ark.copy('invoice-headers', new_header_master_file, 'master') ark.copy('invoice-line-items', new_detail_master_file, 'master') formats = [ '', '', '', '', '0.00', '0.00', '0.00', '0.00', '', '', '', '', '', 'short', '', '', '', '', '0.00', '0.00', '0.00', 'long', '', '', '' ] ark.copy('invoice-headers', new_header_master_file, 'current', excelize=True, xlsx_formats=formats) formats = [ '', '', '', 'short', '0', '', '', '', '', '0.00', '0.00', '0.00', '', '0.00' ] ark.copy('invoice-line-items', new_detail_master_file, 'current', excelize=True, xlsx_formats=formats)
def main(): try: os.makedirs(args.output_directory) except: pass # Colors colors = [] for n in range(args.num_colors): hue = n / args.num_colors saturation = 1 lightness = 1 red, green, blue = colorsys.hsv_to_rgb(hue, saturation, lightness) colors.append(np.array((red, green, blue, 1))) renderer = OffscreenRenderer(viewport_width=args.image_size, viewport_height=args.image_size) archiver = Archiver( directory=args.output_directory, total_scenes=args.total_scenes, num_scenes_per_file=min(args.num_scenes_per_file, args.total_scenes), image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=args.initial_file_number) for scene_index in tqdm(range(args.total_scenes)): scene = build_scene(floor_textures, wall_textures, fix_light_position=args.fix_light_position) place_objects(scene, colors, objects, max_num_objects=args.max_num_objects, discrete_position=args.discrete_position, rotate_object=args.rotate_object) camera_distance = 3 camera = PerspectiveCamera(yfov=math.pi / 4) camera_node = Node(camera=camera, translation=np.array([0, 1, 1])) scene.add_node(camera_node) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for observation_index in range(args.num_observations_per_scene): # Sample camera position rand_position_xz = np.random.uniform(-3, 3, size=2) rand_lookat_xz = np.random.uniform(-6, 6, size=2) camera_position = np.array( [rand_position_xz[0], 1, rand_position_xz[1]]) # Compute yaw and pitch camera_direction = rand_position_xz - rand_lookat_xz camera_direction = np.array( [camera_direction[0], 0, camera_direction[1]]) yaw, pitch = compute_yaw_and_pitch(camera_direction) camera_node.rotation = genearte_camera_quaternion(yaw, pitch) camera_node.translation = camera_position # Rendering flags = RenderFlags.SHADOWS_DIRECTIONAL if args.anti_aliasing: flags |= RenderFlags.ANTI_ALIASING image = renderer.render(scene, flags=flags)[0] scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data) renderer.delete()
def parse(self): h_data_type = "invoice-headers" d_data_type = "invoice-line-items" proc_dir = "processing/invoices" json_dir = "{}/{}".format(self.data_dir, proc_dir) json_files = [f for f in os.listdir(json_dir) if f.endswith('.json')] if not json_files: self.log.write( "ERROR {}/*.json files do not exist, did you forget to extract this?" .format(json_dir)) return False xero_url_accrec = "https://go.xero.com/AccountsReceivable/View.aspx?InvoiceID=" xero_url_accpay = "https://go.xero.com/AccountsPayable/View.aspx?InvoiceID=" csv_header_file_name = "{}/{}-delta.csv".format(json_dir, h_data_type) csv_detail_file_name = "{}/{}-delta.csv".format(json_dir, d_data_type) header_col_header = """ Type,InvoiceID,InvoiceNumber,Reference, AmountDue,AmountPaid,AmountCredited,CurrencyRate,IsDiscounted,HasAttachments,HasErrors, ContactID,Name,Date,BrandingThemeID,BrandingThemeName,Status,LineAmountTypes, SubTotal,TotalTax,Total,UpdatedDateUTC,CurrencyCode,ProcessingNotes, URL """ detail_col_header = """ InvoiceID,InvoiceNumber,Type,Date,LineItemNumber, LineItemID,ItemCode,ItemCode2,Description,UnitAmount,TaxAmount,LineAmount,AccountCode,Quantity """ csv_header_file = open(csv_header_file_name, 'w', encoding='utf-8') csv_header_file.write( re.sub(r"[\n\t\s]", "", header_col_header) + "\n") csv_detail_file = open(csv_detail_file_name, 'w', encoding='utf-8') csv_detail_file.write( re.sub(r"[\n\t\s]", "", detail_col_header) + "\n") # read in branding themes themes_csv_file = self.data_dir + "/processing/default/branding-themes.csv" if not os.path.isfile(themes_csv_file): msg = "ERROR {} file does not exist, did you forget to extract this?" print(msg) self.log.write(msg) return themes = {} with open(themes_csv_file, "r") as f: reader = csv.reader(f, delimiter=",") for j, line in enumerate(reader): if j > 0: themes[line[0]] = line[1] i = 0 j = 0 for file in json_files: i = i + 1 self.log.write( "INFO [invoice-line-items] processing file {}".format(file)) json_file_name = "{}/{}".format(json_dir, file) with open(json_file_name, encoding='utf-8') as f: data = json.load(f) collection = 'Invoices' if collection not in data: self.log.write( "ERROR '{}' collection not found in JSON file {}".format( collection, file)) continue # zero ts in json header zero_created_datetime = clean_date(data['DateTimeUTC']) # single invoice, but is part of a collection invoice = data['Invoices'][0] type = invoice['Type'] if 'Type' in invoice else '' invoice_id = invoice['InvoiceID'] invoice_number = invoice[ 'InvoiceNumber'] if 'InvoiceNumber' in invoice else '' reference = invoice['Reference'] if 'Reference' in invoice else '' amount_due = invoice[ 'AmountDue'] if 'AmountDue' in invoice else 0.00 amount_paid = invoice[ 'AmountPaid'] if 'AmountPaid' in invoice else 0.00 amount_credited = invoice[ 'AmountCredited'] if 'AmountCredited' in invoice else 0.00 currency_rate = invoice[ 'CurrencyRate'] if 'CurrencyRate' in invoice else 0.00 is_discounted = invoice[ 'IsDiscounted'] if 'IsDiscounted' in invoice else '' has_attachments = invoice[ 'HasAttachments'] if 'HasAttachments' in invoice else '' has_errors = invoice['HasErrors'] if 'HasErrors' in invoice else '' if 'Contact' in invoice and invoice['Contact']: contact = invoice['Contact'] contact_id = contact['ContactID'] name = contact['Name'] if 'Name' in contact else '' else: contact = "" contact_id = "" name = "" # use DateString date = ( invoice['DateString'])[:10] if 'DateString' in invoice else '' branding_theme_id = invoice[ 'BrandingThemeID'] if 'BrandingThemeID' in invoice else '' status = invoice['Status'] if 'Status' in invoice else '' line_amount_types = invoice[ 'LineAmountTypes'] if 'LineAmountTypes' in invoice else '' sub_total = invoice['SubTotal'] if 'SubTotal' in invoice else '' total_tax = invoice['TotalTax'] if 'TotalTax' in invoice else '' total = invoice['Total'] if 'Total' in invoice else '' updated_date_utc = clean_date( invoice['UpdatedDateUTC'] ) if 'UpdatedDateUTC' in invoice else '' currency_code = invoice[ 'CurrencyCode'] if 'CurrencyCode' in invoice else '' if type == "ACCPAY": url = xero_url_accpay + invoice_id elif type == "ACCREC": url = xero_url_accrec + invoice_id else: url = "" # get branding theme name processing_notes = "" if branding_theme_id in themes.keys(): branding_theme_name = themes[branding_theme_id] else: branding_theme_name = "" processing_note = "branding theme id not found" columns = [ type, invoice_id, invoice_number, reference, amount_due, amount_paid, amount_credited, currency_rate, is_discounted, has_attachments, has_errors, contact_id, name, date, branding_theme_id, branding_theme_name, status, line_amount_types, sub_total, total_tax, total, updated_date_utc, currency_code, processing_notes, url ] prep_columns = list( map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_header_file.write(line) # process line items if 'LineItems' not in invoice: self.log.write( "WARN no line items found for invoice {}".format( invoice_id)) continue line_items = invoice['LineItems'] # artificial li number line_item_num = 0 for line_item in line_items: # total line items j = j + 1 line_item_num = line_item_num + 1 li_lid = line_item['LineItemID'] li_item_code = line_item[ 'ItemCode'] if 'ItemCode' in line_item else '' li_desc = line_item[ 'Description'] if 'Description' in line_item else '' li_unit_amt = line_item[ 'UnitAmount'] if 'UnitAmount' in line_item else 0.00 li_tax_amt = line_item[ 'TaxAmount'] if 'TaxAmount' in line_item else 0.00 li_line_amt = line_item[ 'LineAmount'] if 'LineAmount' in line_item else 0.00 li_acct_code = line_item[ 'AccountCode'] if 'AccountCode' in line_item else '' li_qty = line_item[ 'Quantity'] if 'Quantity' in line_item else 0.00 # desc field allows cr's, strip here li_desc = li_desc.strip("\n") # some codes have an addiitonal code piped on to them li_item_code2 = "" parts = li_item_code.split("|") if len(parts) == 2: li_item_code = parts[0].strip() li_item_code2 = parts[1].strip() columns = [ invoice_id, invoice_number, type, date, line_item_num, li_lid, li_item_code, li_item_code2, li_desc, li_unit_amt, li_tax_amt, li_line_amt, li_acct_code, li_qty ] prep_columns = list( map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_detail_file.write(line) csv_header_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format( 'invoice-headers', csv_header_file_name, i)) csv_detail_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format( 'invoice-line-items', csv_detail_file_name, i)) ark = Archiver(self.log) files = list( map(lambda file: "{}/{}".format(json_dir, file), json_files)) ark.archive('invoices', files) ark.archive(h_data_type, csv_header_file_name) ark.archive(d_data_type, csv_detail_file_name)
from flask import Flask, jsonify, send_file, request from tornado.wsgi import WSGIContainer from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from archiver import Archiver from presets import PRESETS from utils import route_preset, load_webcam from utils import average_color, image_color from utils import draw_rectangle, img_to_io app = Flask(__name__) ARCHIVER = Archiver(PRESETS, callback_minutes=15).start() route_preset.app = app route_preset.presets = PRESETS @app.route("/archive/<string:location>") def get_archive(location): if location not in PRESETS: return jsonify([]) try: N = int(request.args.get("N", 96)) except ValueError: N = 96 finally: N = min(N, 96*7) return jsonify(ARCHIVER.get_last_N(location, N))
print(' original aspect ratio).') print('') print('Tarball archiving: (NOT implemented yet)') print(' All files contained in the target folder will be compressed') print(' into a .tar.xz archive using the LZMA2 compression filter.') print('') def print_error(msg): print('archive: ' + str(msg)) print('Use "archive --help" for info on the usage') if __name__ == '__main__': print_header() archiver = Archiver() try: opts, args = getopt.getopt(sys.argv[1:], "hpvt", ["help", "pictures", "videos", "tarball"]) except getopt.GetoptError as err: print_error(err) sys.exit(2) for o, v in opts: if o in ('-h', '--help'): print_help() sys.exit(0) elif o in ('-p', '--pictures'): archiver.rescale_pictures = True elif o in ('-v', '--videos'):
from archiver import Archiver Archiver().run()
def main(): try: os.makedirs(args.output_directory) except: pass # Load MNIST images mnist_images = load_mnist_images() # Set GPU device rtx.set_device(args.gpu_device) screen_width = args.image_size screen_height = args.image_size # Setting up a raytracer rt_args = rtx.RayTracingArguments() rt_args.num_rays_per_pixel = 1024 rt_args.max_bounce = 3 rt_args.supersampling_enabled = args.anti_aliasing rt_args.next_event_estimation_enabled = True rt_args.ambient_light_intensity = 0.1 cuda_args = rtx.CUDAKernelLaunchArguments() cuda_args.num_threads = 64 cuda_args.num_rays_per_thread = 32 renderer = rtx.Renderer() render_buffer = np.zeros((screen_height, screen_width, 3), dtype=np.float32) archiver = Archiver( directory=args.output_directory, total_scenes=args.total_scenes, num_scenes_per_file=min(args.num_scenes_per_file, args.total_scenes), image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=args.initial_file_number) camera = rtx.PerspectiveCamera(fov_rad=math.pi / 3, aspect_ratio=screen_width / screen_height) camera_distance = 2 for _ in tqdm(range(args.total_scenes)): scene = build_scene(floor_textures, wall_textures, fix_light_position=args.fix_light_position) place_dice(scene, mnist_images, discrete_position=args.discrete_position, rotate_dice=args.rotate_dice) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for _ in range(args.num_observations_per_scene): # Sample camera position rand_position_xz = np.random.normal(size=2) rand_position_xz = camera_distance * rand_position_xz / np.linalg.norm( rand_position_xz) camera_position = np.array( (rand_position_xz[0], wall_height / 2, rand_position_xz[1])) center = np.array((0, wall_height / 2, 0)) # Compute yaw and pitch camera_direction = camera_position - center yaw, pitch = compute_yaw_and_pitch(camera_direction) camera.look_at(tuple(camera_position), tuple(center), up=(0, 1, 0)) renderer.render(scene, camera, rt_args, cuda_args, render_buffer) # Convert to sRGB image = np.power(np.clip(render_buffer, 0, 1), 1.0 / 2.2) image = np.uint8(image * 255) image = cv2.bilateralFilter(image, 3, 25, 25) scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data)
def parse(self): data_type = "items" xero_url = "https://go.xero.com/Accounts/Inventory/" proc_dir = "processing/default" json_file_name = "{}/{}/{}.json".format(self.data_dir, proc_dir, data_type) csv_file_name = "{}/{}/{}.csv".format(self.data_dir, proc_dir, data_type) if not os.path.isfile(json_file_name): self.log.write("ERROR {} file does not exist, did you forget to extract this?".format(json_file_name)) return False with open(json_file_name, encoding='utf-8') as f: data = json.load(f) collection = 'Items' if collection not in data: self.log.write("ERROR '{}' collection not found in JSON file".format(collection)) return # zero ts in json header zero_created_datetime = clean_date(data['DateTimeUTC']) col_header = """ ItemID,Code,Code2,Description,PurchaseDescription,UpdatedDateUTC,PurchasedUnitPrice,PurchasedCOGSAccountCode, PurchasedTaxType,SalesUnitPrice,SalesAccountCode,SalesTaxType,Name,IsTrackedAsInventory, InventoryAssetAccountCode,TotalCostPool,QuantityOnHand,IsSold,IsPurchased, SupplierCode,ProductDescription,ProductSegment1,ProductSegment2, PurchaseSupplierCode,PurchaseProductDescription,PurchaseProductSegment1,PurchaseProductSegment2, ProcessingNotes, URL """ csv_file = open(csv_file_name, 'w', encoding='utf-8') csv_file.write(re.sub(r"[\n\t\s]", "", col_header) + "\n") i = 0 for item in data[collection]: i = i + 1 item_id = item['ItemID'] code = item['Code'] description = item['Description'] purchase_description = item['PurchaseDescription'] if 'PurchaseDescription' in item else '' updated_date_utc = clean_date(item['UpdatedDateUTC']) if 'UpdatedDateUTC' in item else '' if 'PurchaseDetails' in item and item['PurchaseDetails']: details = item['PurchaseDetails'] purchase_unit_price = details['UnitPrice'] if 'UnitPrice' in details else '' purchase_cogs_account_code = details['COGSAccountCode'] if 'COGSAccountCode' in details else '' purchase_tax_type = details['TaxType'] if 'TaxType' in details else '' else: purchase_unit_price = '' purchase_cogs_account_code = '' purchase_tax_type = '' if 'SalesDetails' in item and item['SalesDetails']: details = item['SalesDetails'] sales_unit_price = details['UnitPrice'] if 'UnitPrice' in details else '' sales_account_code = details['AccountCode'] if 'AccountCode' in details else '' sales_tax_type = details['TaxType'] if 'TaxType' in details else '' else: sales_unit_price = '' sales_account_code = '' sales_tax_type = '' name = item['Name'] is_tracked_as_inventory = item['IsTrackedAsInventory'] if 'IsTrackedAsInventory' in item else' ' inventory_asset_account_code = item['InventoryAssetAccountCode'] if 'InventoryAssetAccountCode' in item else' ' total_cost_pool = item['TotalCostPool'] if 'TotalCostPool' in item else' ' quantity_on_hand = item['QuantityOnHand'] if 'QuantityOnHand' in item else' ' is_sold = item['IsSold'] if 'IsSold' in item else' ' is_purchased = item['IsPurchased'] if 'IsPurchased' in item else' ' # some codes have an addiitonal code piped on to them code2 = "" parts = code.split("|") if len(parts) == 2: code = parts[0].strip() code2 = parts[1].strip() processing_notes = "" supplier_code = "" product_description = "" product_segment_1 = "" product_segment_2 = "" purchase_supplier_code = "" purchase_product_description = "" purchase_product_segment_1 = "" purchase_product_segment_2 = "" # parse desc's for supplier code, desc, product segment 1, product segment 2 parts = description.split("|") if len(parts) != 4: processing_notes = "malformed [Description] field" else: supplier_code = parts[0].strip() product_description = parts[1].strip() product_segment_1 = parts[2].strip() product_segment_2 = parts[3].strip() parts = purchase_description.split("|") if len(parts) != 4: if not processing_notes: ProcessingNotes = "malformed [PurchaseDescription] field" else: processing_notes = processing_notes + "/" + "malformed [PurchaseDescription] field" else: purchase_supplier_code = parts[0].strip() purchase_product_description = parts[1].strip() purchase_product_segment_1 = parts[2].strip() purchase_product_segment_2 = parts[3].strip() url = xero_url + item_id columns = [ item_id, code, code2, description, purchase_description, updated_date_utc, purchase_unit_price, purchase_cogs_account_code, purchase_tax_type, sales_unit_price, sales_account_code, sales_tax_type, name, is_tracked_as_inventory, inventory_asset_account_code, total_cost_pool, quantity_on_hand, is_sold, is_purchased, supplier_code, product_description, product_segment_1, product_segment_2, purchase_supplier_code, purchase_product_description, purchase_product_segment_1, purchase_product_segment_2, processing_notes, url ] prep_columns = list(map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_file.write(line) csv_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format(data_type, csv_file_name, i)) formats = [ '', '', '', '', '', 'long', '0.00', '', '', '0.00', '', '', '', '', '', '0.00', '0.00', '', '', '', '', '', '', '', '', '', '', '', '' ] ark = Archiver(self.log) ark.archive(data_type, json_file_name) ark.archive(data_type, csv_file_name) ark.copy(data_type, csv_file_name, 'master') ark.copy(data_type, csv_file_name, 'current', excelize=True, xlsx_formats=formats)
def main(): try: os.makedirs(args.output_directory) except: pass # Load MNIST images mnist_images = load_mnist_images() renderer = OffscreenRenderer(viewport_width=args.image_size, viewport_height=args.image_size) archiver = Archiver( directory=args.output_directory, total_scenes=args.total_scenes, num_scenes_per_file=min(args.num_scenes_per_file, args.total_scenes), image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=args.initial_file_number) for scene_index in tqdm(range(args.total_scenes)): scene = build_scene(floor_textures, wall_textures, fix_light_position=args.fix_light_position) place_dice(scene, mnist_images, discrete_position=args.discrete_position, rotate_dice=args.rotate_dice) camera_distance = 4 camera = PerspectiveCamera(yfov=math.pi / 4) camera_node = Node(camera=camera, translation=np.array([0, 1, 1])) scene.add_node(camera_node) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for observation_index in range(args.num_observations_per_scene): rand_position_xz = np.random.normal(size=2) rand_position_xz = camera_distance * rand_position_xz / np.linalg.norm( rand_position_xz) # Compute yaw and pitch camera_direction = np.array( [rand_position_xz[0], 0, rand_position_xz[1]]) yaw, pitch = compute_yaw_and_pitch(camera_direction) camera_node.rotation = genearte_camera_quaternion(yaw, pitch) camera_position = np.array( [rand_position_xz[0], 1, rand_position_xz[1]]) camera_node.translation = camera_position # Rendering flags = RenderFlags.SHADOWS_DIRECTIONAL if args.anti_aliasing: flags |= RenderFlags.ANTI_ALIASING image = renderer.render(scene, flags=flags)[0] scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data) renderer.delete()
def __init__(self, cam_config, storage): self.storage = storage self.cam = IpCamSnapper(cam_config) self.writer = FileDao() self.archiver = Archiver(self.storage, self.cam) self.store_in = self.storage.get_cam_snap_path(self.cam.name)
def parse(self): data_type = "branding-themes" proc_dir = "processing/default" json_file_name = "{}/{}/{}.json".format(self.data_dir, proc_dir, data_type) csv_file_name = "{}/{}/{}.csv".format(self.data_dir, proc_dir, data_type) if not os.path.isfile(json_file_name): self.log.write( "ERROR {} file does not exist, did you forget to extract this?" .format(json_file_name)) return False with open(json_file_name, encoding='utf-8') as f: data = json.load(f) collection = 'BrandingThemes' if collection not in data: self.log.write( "ERROR '{}' collection not found in JSON file".format( collection)) return # zero ts in json header zero_created_datetime = clean_date(data['DateTimeUTC']) col_header = "BrandingThemeID,Name,LogoUrl,Type,SortOrder,CreatedDateUTC" csv_file = open(csv_file_name, 'w', encoding='utf-8') csv_file.write(re.sub(r'\n', '', col_header) + "\n") i = 0 for theme in data['BrandingThemes']: i = i + 1 id = theme['BrandingThemeID'] name = theme['Name'] url = theme['LogoUrl'] if 'LogoUrl' in theme else '' type = theme['Type'] if 'Type' in theme else '' sort_order = theme['SortOrder'] if 'SortOrder' in theme else '' created_date = clean_date(theme['CreatedDateUTC']) columns = [id, name, url, type, sort_order, created_date] prep_columns = list( map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_file.write(line) csv_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format( data_type, csv_file_name, i)) ark = Archiver(self.log) ark.archive(data_type, json_file_name)
def __init__(self): self.session = requests.Session() self.archiver = Archiver() self.response = "" self.html_soup = ""
def parse(self): data_type = "invoices" xero_url_accrec = "https://go.xero.com/AccountsReceivable/View.aspx?InvoiceID=" xero_url_accpay = "https://go.xero.com/AccountsPayable/View.aspx?InvoiceID=" proc_dir = "processing/default" json_file_name = "{}/{}/{}.json".format(self.data_dir, proc_dir, data_type) csv_file_name = "{}/{}/{}.csv".format(self.data_dir, proc_dir, data_type) if not os.path.isfile(json_file_name): self.log.write("ERROR {} file does not exist, did you forget to extract this?".format(json_file_name)) return False with open(json_file_name, encoding='utf-8') as f: data = json.load(f) collection = 'Invoices' if collection not in data: self.log.write("ERROR '{}' collection not found in JSON file".format(collection)) return # zero ts in json header zero_created_datetime = clean_date(data['DateTimeUTC']) col_header = """ InvoiceID,Type,InvoiceNumber,Reference, AmountDue,AmountPaid,AmountCredited,CurrencyRate,IsDiscounted,HasAttachments,HasErrors, ContactID,Name,Date,BrandingThemeID,BrandingThemeName,Status,LineAmountTypes, SubTotal,TotalTax,Total,UpdatedDateUTC,CurrencyCode,ProcessingNotes, URL """ csv_file = open(csv_file_name, 'w', encoding='utf-8') csv_file.write(re.sub(r"[\n\t\s]", "", col_header) + "\n") # read in branding themes themes_csv_file = self.data_dir + "/processing/default/branding-themes.csv" if not os.path.isfile(themes_csv_file): self.log.write("ERROR {} file does not exist, did you forget to extract this?".format(themes_csv_file)) return themes = {} with open(themes_csv_file, "r") as f: reader = csv.reader(f, delimiter = ",") for j, line in enumerate(reader): if j > 0: themes[line[0]] = line[1] i = 0 for invoice in data['Invoices']: i = i + 1 type = invoice['Type'] if 'Type' in invoice else '' invoice_id = invoice['InvoiceID'] invoice_number = invoice['InvoiceNumber'] if 'InvoiceNumber' in invoice else '' reference = invoice['Reference'] if 'Reference' in invoice else '' amount_due = invoice['AmountDue'] if 'AmountDue' in invoice else 0.00 amount_paid = invoice['AmountPaid'] if 'AmountPaid' in invoice else 0.00 amount_credited = invoice['AmountCredited'] if 'AmountCredited' in invoice else 0.00 currency_rate = invoice['CurrencyRate'] if 'CurrencyRate' in invoice else 0.00 is_discounted = invoice['IsDiscounted'] if 'IsDiscounted' in invoice else '' has_attachments = invoice['HasAttachments'] if 'HasAttachments' in invoice else '' has_errors = invoice['HasErrors'] if 'HasErrors' in invoice else '' if 'Contact' in invoice and invoice['Contact']: contact = invoice['Contact'] contact_id = contact['ContactID'] name = contact['Name'] if 'Name' in contact else '' else: contact = "" contact_id = "" name = "" # use DateString date = (invoice['DateString'])[:10] if 'DateString' in invoice else '' branding_theme_id = invoice['BrandingThemeID'] if 'BrandingThemeID' in invoice else '' status = invoice['Status'] if 'Status' in invoice else '' line_amount_types = invoice['LineAmountTypes'] if 'LineAmountTypes' in invoice else '' sub_total = invoice['SubTotal'] if 'SubTotal' in invoice else '' total_tax = invoice['TotalTax'] if 'TotalTax' in invoice else '' total = invoice['Total'] if 'Total' in invoice else '' updated_date_utc = clean_date(invoice['UpdatedDateUTC']) if 'UpdatedDateUTC' in invoice else '' currency_code = invoice['CurrencyCode'] if 'CurrencyCode' in invoice else '' if type == "ACCPAY": url = xero_url_accpay + invoice_id elif type == "ACCREC": url = xero_url_accrec + invoice_id else: url = "" # get branding theme name processing_notes = "" if branding_theme_id in themes.keys(): branding_theme_name = themes[branding_theme_id] else: branding_theme_name = "" processing_note = "branding theme id not found" columns = [ invoice_id, type, invoice_number, reference, amount_due, amount_paid, amount_credited, currency_rate, is_discounted, has_attachments, has_errors, contact_id, name, date, branding_theme_id, branding_theme_name, status, line_amount_types, sub_total, total_tax, total, updated_date_utc, currency_code, processing_notes, url ] prep_columns = list(map(lambda col: "\"" + str(col) + "\"", columns)) line = ",".join(prep_columns) + "\n" csv_file.write(line) csv_file.close() self.log.write("INFO [{}] CSV file created {} ({:,} records)".format(data_type, csv_file_name, i)) formats = [ '', '', '', '', '0.00', '0.00', '0.00', '0.00', '', '', '', '', '', 'short', '', '', '', '', '0.00', '0.00', '0.00', 'long', '', '', '' ] ark = Archiver(self.log) ark.archive(data_type, json_file_name) ark.archive(data_type, csv_file_name) ark.copy(data_type, csv_file_name, 'master') ark.copy(data_type, csv_file_name, 'current', excelize=True, xlsx_formats=formats)
def main(): try: os.makedirs(args.output_directory) except: pass last_file_number = args.initial_file_number + args.total_scenes // args.num_scenes_per_file - 1 initial_file_number = args.initial_file_number if os.path.isdir(args.output_directory): files = os.listdir(args.output_directory) for name in files: number = int(name.replace(".h5", "")) if number > last_file_number: continue if number < args.initial_file_number: continue if number < initial_file_number: continue initial_file_number = number + 1 total_scenes_to_render = args.total_scenes - args.num_scenes_per_file * ( initial_file_number - args.initial_file_number) assert args.num_scenes_per_file <= total_scenes_to_render # Set GPU device rtx.set_device(args.gpu_device) # Initialize colors colors = [] for n in range(args.num_colors): hue = n / args.num_colors saturation = 1 lightness = 1 red, green, blue = colorsys.hsv_to_rgb(hue, saturation, lightness) colors.append((red, green, blue, 1)) screen_width = args.image_size screen_height = args.image_size # Setting up a raytracer rt_args = rtx.RayTracingArguments() rt_args.num_rays_per_pixel = 1024 rt_args.max_bounce = 3 rt_args.supersampling_enabled = args.anti_aliasing rt_args.next_event_estimation_enabled = True rt_args.ambient_light_intensity = 0.1 cuda_args = rtx.CUDAKernelLaunchArguments() cuda_args.num_threads = 64 cuda_args.num_rays_per_thread = 32 renderer = rtx.Renderer() render_buffer = np.zeros((screen_height, screen_width, 3), dtype=np.float32) archiver = Archiver( directory=args.output_directory, num_scenes_per_file=args.num_scenes_per_file, image_size=(args.image_size, args.image_size), num_observations_per_scene=args.num_observations_per_scene, initial_file_number=initial_file_number) camera = rtx.PerspectiveCamera(fov_rad=math.pi / 3, aspect_ratio=screen_width / screen_height) camera_distance = 2 for _ in tqdm(range(total_scenes_to_render)): scene = build_scene(floor_textures, wall_textures, fix_light_position=args.fix_light_position) place_objects(scene, colors, max_num_objects=args.max_num_objects, discrete_position=args.discrete_position, rotate_object=args.rotate_object) scene_data = SceneData((args.image_size, args.image_size), args.num_observations_per_scene) for _ in range(args.num_observations_per_scene): # Sample camera position rand_position_xz = np.random.normal(size=2) rand_position_xz = camera_distance * rand_position_xz / np.linalg.norm( rand_position_xz) camera_position = np.array( (rand_position_xz[0], wall_height / 2, rand_position_xz[1])) center = np.array((0, wall_height / 2, 0)) # Compute yaw and pitch camera_direction = camera_position - center yaw, pitch = compute_yaw_and_pitch(camera_direction) camera.look_at(tuple(camera_position), tuple(center), up=(0, 1, 0)) renderer.render(scene, camera, rt_args, cuda_args, render_buffer) # Convert to sRGB image = np.power(np.clip(render_buffer, 0, 1), 1.0 / 2.2) image = np.uint8(image * 255) image = cv2.bilateralFilter(image, 3, 25, 25) scene_data.add(image, camera_position, math.cos(yaw), math.sin(yaw), math.cos(pitch), math.sin(pitch)) if args.visualize: plt.clf() plt.imshow(image) plt.pause(1e-10) archiver.add(scene_data)
from archiver import Archiver addons = [Archiver()]