def _run_install_cli(desktop, icon): """ Runs the full install interface. The user is asked for all the basic info a .desktop file requires. The user is provided the option to install an icon or not. """ desktop.terminal = prompt.for_yes_no("Terminal app?") desktop.tooltip = prompt.for_string("Enter tooltip") desktop.exe = prompt.for_string("Enter execution command") output.option_list("Select a category", const.CATEGORIES) selection = prompt.for_selection("Make a selection", const.CATEGORIES_LEN) desktop.category = const.CATEGORIES[selection] icon.install_icon = prompt.for_yes_no("Install an icon?") if icon.install_icon: icon.icon_to_install = prompt.for_path("Enter full path to icon") icon.icon_type = utils.file_type(icon.icon_to_install) output.option_list("Select icon size", const.ICON_SIZES) selection = prompt.for_selection("Make a selection", const.ICON_SIZES_LEN) icon.icon_size = const.ICON_SIZES[selection] desktop.icon = str(icon) return desktop, icon
def album_art(self, art_location: str): with open(art_location, 'rb') as image: image_data = image.read() image_format = MP4Cover.FORMAT_PNG if file_type( art_location) == 'image/png' else MP4Cover.FORMAT_JPEG self.file['covr'] = [MP4Cover(image_data, imageformat=image_format)] return self
def album_art(self, art_location: str): with open(art_location, 'rb') as image: image_data = image.read() # https://github.com/beetbox/mediafile/blob/master/mediafile.py#L231 value = struct.pack('<bi', 3, len(image_data)) value += file_type(art_location).encode('utf-16-le') + b'\x00\x00' value += 'Cover'.encode('utf-16-le') + b'\x00\x00' value += image_data self.file['WM/Picture'] = ASFByteArrayAttribute(value=value) return self
def album_art(self, art_location: str): with open(art_location, 'rb') as image: image_data = image.read() self.file['APIC'] = APIC( encoding=0, mime=file_type(art_location), type=3, desc='Cover', data=image_data ) return self
def create_album_art(self, art_location: str): with open(art_location, 'rb') as image: image_data = image.read() image = Image.open(io.BytesIO(image_data)) picture = Picture() picture.data = image_data picture.type = 3 # COVER_FRONT picture.mime = file_type(art_location) picture.width, picture.height = image.size picture.depth = mode_to_depth(image.mode) picture.desc = "Front Cover" image.close() return picture
def _retrieve_remote_file(self, url, destfilename=None): """Retrieve a file from a remote location. It logins in the archives private page if necessary. """ # Creat a temporary file is no destination is given if not destfilename: destfilename = os.tmpnam() content = fetch_remote_resource(url, self.web_user, self.web_password) if file_type(content) is None: fd = gzip.GzipFile(destfilename, 'wb') else: fd = open(destfilename, 'wb') fd.write(content) fd.close() return destfilename, len(content)
def main(input_table=None, sr=None, output_loc=None, output_gdb=None, output_fc=None, genetic=None, identification=None, location=None, other=None, mode='toolbox', protected_map=config.protected_columns): # set mode based on how script is called. settings.mode = mode # First, create a geodatabase for all our future results. # TODO: can we generate this from a single value? gdb_path = os.path.abspath(os.path.join(output_loc, output_gdb + '.gdb')) # if the user is calling this from the command-line, they won't have necessarily # entered a full path for the FC output. Infer it from the input instead. if output_fc.lower().find('gdb') == -1: output_fc = os.path.join(gdb_path, output_fc) # check if we received a value spatial reference -- if not, use WGS84. if sr in ('', None): # default spatial reference can be redefined. sr = config.sr try: # only try to create this GDB if it doesn't already exist. if not os.path.exists(gdb_path): # Process: Create File GDB # SYNTAX: CreateFileGDB_management (out_folder_path, out_name, {out_version}) arcpy.CreateFileGDB_management(output_loc, output_gdb, "CURRENT") utils.msg("File geodatabase successfully created: %s" % gdb_path) else: utils.msg("File geodatabase already exists, skipping creation.") except Exception as e: utils.msg("Error creating file geodatabase", mtype='error', exception=e) sys.exit() # TODO: WE NEED TO DO A FULL CLASSIFICATION OF THE INPUT AND MANUALLY BUILD UP THE LAYER... # We'll have two columns per locus, need to import correctly # Start things off by importing the table directly. We still need to edit the header # because of ArcGIS' restrictions on table names. # do we have a text-based file? file_type = utils.file_type(input_table) if file_type == 'Text': # Generate a temporary copy of the input CSV which corrects it for # ArcGIS, stripping invalid column label characters. data_table = utils.validate_table(input_table) # TODO: use field mapping to handle the date-time field? utils.protect_columns(data_table, protected_map) else: data_table = input_table # write out our table, after additional validation. try: arcpy.env.overwriteOutput = settings.overwrite # generate table name based on input name (label, ext) = os.path.splitext(os.path.basename(input_table)) # Validate label will produce a valid table name from our input file validated_label = arcpy.ValidateTableName(label) # write out our filtered table to ArcGIS arcpy.TableToTable_conversion(data_table, gdb_path, validated_label) if file_type == 'Text': # Delete the temporary table with validated names; # temp file is stored in the same spot as the original. temp_dir = os.path.dirname(input_table) temp_path = os.path.join(temp_dir, data_table) os.remove(temp_path) except Exception as e: utils.msg("Error converting table %s to GDB" % input_table, mtype='error', exception=e) sys.exit() input_csv = os.path.join(gdb_path, validated_label) utils.msg("Table successfully imported: \n %s" % input_csv) fields = [f.name.lower() for f in arcpy.ListFields(input_csv)] # intially, our date column is imported as text to prevent ArcGIS # from inadvertently munging it. Add a formatted date column. try: # TODO: make date field defined elsewhere. input_time_field = "Date_Time" field_name = 'Date_formatted' expression = 'formatDate(!{input_time_field}!)'.format( input_time_field=input_time_field) code_block = """ import dateutil.parser def formatDate(input_date): parsed_date = dateutil.parser.parse(input_date) return parsed_date.strftime("%m/%d/%Y %H:%M:%S")""" # check if a formatted date field exists; if so skip this step if field_name.lower() not in fields: arcpy.AddField_management(input_csv, field_name, 'DATE') arcpy.CalculateField_management(input_csv, field_name, expression, "PYTHON_9.3", code_block) utils.msg("Added a formatted date field: {field_name}.".format( field_name=field_name)) except Exception as e: utils.msg("Error parsing date information", mtype='error', exception=e) sys.exit() # coordinate columns x = y = None # Convert the table to a temporary spatial feature try: if location is None: raise Exception("Required location columns not set.") # A temporary XY Layer needed to create the feature class. # NOTE: This table is deleted when the script finishes temporary_layer = input_csv + '_xy_temp' # 'location', ArcGIS passes semicolon separated values loc_parts = location.split(";") # TODO: ArcGIS doesn't preserve order; do we need separate fields for these? or some other approach? if loc_parts[0].lower() in ['x', 'longitude', 'lon']: (x, y) = loc_parts[:2] else: (y, x) = loc_parts[:2] # Process: Make XY Event Layer. This layer is temporary and will be # deleted upon script completion. # SYNTAX: arcpy.MakeXYEventLayer_management(table, in_x_field, # in_y_field, out_layer, {spatial_reference}, {in_z_field}) arcpy.MakeXYEventLayer_management(input_csv, x, y, temporary_layer, sr) except Exception as e: utils.msg("Error making XY Event Layer", mtype='error', exception=e) sys.exit() utils.msg("XY event layer successfully created.") # Copy our features to a permanent layer try: # for this step, overwrite any existing results arcpy.env.overwriteOutput = True # Process: Copy Features # SYNTAX: CopyFeatures_management (in_features, out_feature_class, {config_keyword}, {spatial_grid_1}, {spatial_grid_2}, {spatial_grid_3}) arcpy.CopyFeatures_management(temporary_layer, output_fc, "", "0", "0", "0") utils.msg("Features succesfully created: \n %s" % output_fc) except Exception as e: utils.msg("Error copying features to a feature class", mtype='error', exception=e) sys.exit() utils.msg( "Feature Class successfully created, your SRGD file has been imported!" ) try: haplotype_table = os.path.join( gdb_path, "{}_{}".format(validated_label, 'Haplotypes')) # look up our haplotype data haplotypes = utils.Haplotype(output_fc) # create a dictionary for inserting records dts = { 'names': ('code', 'haplotype', 'count'), 'formats': (numpy.uint16, 'S6', numpy.uint8) } # create a numpy formatted structure from this data array = numpy.rec.fromrecords(haplotypes.indexed, dtype=dts) # output the new table arcpy.da.NumPyArrayToTable(array, haplotype_table) utils.msg("Haplotype table created: \n {}".format(haplotype_table)) except Exception as e: utils.msg("Error creating supplemental haplotype table", mtype='error', exception=e) sys.exit() # Because we can't pass around objects between this process and the calling # addin environment, dump out the settings to our shared configuration file. try: config.update('fc_path', output_fc.strip()) config.update('x_coord', x) config.update('y_coord', y) var_types = { 'identification': identification, 'genetic': genetic, 'location': location, 'other': other } if identification is None: raise Exception("Required Identification columns not entered.") # the first ID field should be used as the default key. id_cols = identification.split(";") id_field = id_cols[0] for (i, col) in enumerate(id_cols): # FIXME this will always set individual_id to the primary key. if col.lower() == 'individual_id': id_field = id_cols[i] config.update('id_field', id_field) for (var, val) in var_types.items(): if val is None: val = '' config.update('%s_columns' % var, val.strip()) except Exception as e: msg = "Error creating output configuration file: %s" % config.config_path utils.msg(msg, mtype='error', exception=e) sys.exit() # clean up: remove intermediate steps. try: arcpy.Delete_management(temporary_layer) except Exception as e: utils.msg("Unable to delete temporary layer", mtype='error', exception=e) sys.exit()
def main(input_table=None, sr=None, output_loc=None, output_gdb=None, output_fc=None, genetic=None, identification=None, location=None, other=None, mode='toolbox', protected_map=config.protected_columns): # set mode based on how script is called. settings.mode = mode # First, create a geodatabase for all our future results. # TODO: can we generate this from a single value? gdb_path = os.path.abspath(os.path.join(output_loc, output_gdb + '.gdb')) # if the user is calling this from the command-line, they won't have necessarily # entered a full path for the FC output. Infer it from the input instead. if output_fc.lower().find('gdb') == -1: output_fc = os.path.join(gdb_path, output_fc) # check if we received a value spatial reference -- if not, use WGS84. if sr in ('', None): # default spatial reference can be redefined. sr = config.sr try: # only try to create this GDB if it doesn't already exist. if not os.path.exists(gdb_path): # Process: Create File GDB # SYNTAX: CreateFileGDB_management (out_folder_path, out_name, {out_version}) arcpy.CreateFileGDB_management(output_loc, output_gdb, "CURRENT") utils.msg("File geodatabase successfully created: %s" % gdb_path) else: utils.msg("File geodatabase already exists, skipping creation.") except Exception as e: utils.msg("Error creating file geodatabase", mtype='error', exception=e) sys.exit() # TODO: WE NEED TO DO A FULL CLASSIFICATION OF THE INPUT AND MANUALLY BUILD UP THE LAYER... # We'll have two columns per locus, need to import correctly # Start things off by importing the table directly. We still need to edit the header # because of ArcGIS' restrictions on table names. # do we have a text-based file? file_type = utils.file_type(input_table) if file_type == 'Text': # Generate a temporary copy of the input CSV which corrects it for # ArcGIS, stripping invalid column label characters. data_table = utils.validate_table(input_table) # TODO: use field mapping to handle the date-time field? utils.protect_columns(data_table, protected_map) else: data_table = input_table # write out our table, after additional validation. try: arcpy.env.overwriteOutput = settings.overwrite # generate table name based on input name (label, ext) = os.path.splitext(os.path.basename(input_table)) # Validate label will produce a valid table name from our input file validated_label = arcpy.ValidateTableName(label) # write out our filtered table to ArcGIS arcpy.TableToTable_conversion(data_table, gdb_path, validated_label) if file_type == 'Text': # Delete the temporary table with validated names; # temp file is stored in the same spot as the original. temp_dir = os.path.dirname(input_table) temp_path = os.path.join(temp_dir, data_table) os.remove(temp_path) except Exception as e: utils.msg("Error converting table %s to GDB" % input_table, mtype='error', exception=e) sys.exit() input_csv = os.path.join(gdb_path, validated_label) utils.msg("Table successfully imported: \n %s" % input_csv) fields = [f.name.lower() for f in arcpy.ListFields(input_csv)] # intially, our date column is imported as text to prevent ArcGIS # from inadvertently munging it. Add a formatted date column. try: # TODO: make date field defined elsewhere. input_time_field = "Date_Time" field_name = 'Date_formatted' expression = 'formatDate(!{input_time_field}!)'.format( input_time_field=input_time_field) code_block = """ import dateutil.parser def formatDate(input_date): parsed_date = dateutil.parser.parse(input_date) return parsed_date.strftime("%m/%d/%Y %H:%M:%S")""" # check if a formatted date field exists; if so skip this step if field_name.lower() not in fields: arcpy.AddField_management(input_csv, field_name, 'DATE') arcpy.CalculateField_management(input_csv, field_name, expression, "PYTHON_9.3", code_block) utils.msg("Added a formatted date field: {field_name}.".format(field_name=field_name)) except Exception as e: utils.msg("Error parsing date information", mtype='error', exception=e) sys.exit() # coordinate columns x = y = None # Convert the table to a temporary spatial feature try: if location is None: raise Exception("Required location columns not set.") # A temporary XY Layer needed to create the feature class. # NOTE: This table is deleted when the script finishes temporary_layer = input_csv + '_xy_temp' # 'location', ArcGIS passes semicolon separated values loc_parts = location.split(";") # TODO: ArcGIS doesn't preserve order; do we need separate fields for these? or some other approach? if loc_parts[0].lower() in ['x', 'longitude', 'lon']: (x, y) = loc_parts[:2] else: (y, x) = loc_parts[:2] # Process: Make XY Event Layer. This layer is temporary and will be # deleted upon script completion. # SYNTAX: arcpy.MakeXYEventLayer_management(table, in_x_field, # in_y_field, out_layer, {spatial_reference}, {in_z_field}) arcpy.MakeXYEventLayer_management(input_csv, x, y, temporary_layer, sr) except Exception as e: utils.msg("Error making XY Event Layer", mtype='error', exception=e) sys.exit() utils.msg("XY event layer successfully created.") # Copy our features to a permanent layer try: # for this step, overwrite any existing results arcpy.env.overwriteOutput = True # Process: Copy Features # SYNTAX: CopyFeatures_management (in_features, out_feature_class, {config_keyword}, {spatial_grid_1}, {spatial_grid_2}, {spatial_grid_3}) arcpy.CopyFeatures_management(temporary_layer, output_fc, "", "0", "0", "0") utils.msg("Features succesfully created: \n %s" % output_fc) except Exception as e: utils.msg("Error copying features to a feature class", mtype='error', exception=e) sys.exit() utils.msg("Feature Class successfully created, your SRGD file has been imported!") try: haplotype_table = os.path.join(gdb_path, "{}_{}".format(validated_label, 'Haplotypes')) # look up our haplotype data haplotypes = utils.Haplotype(output_fc) # create a dictionary for inserting records dts = {'names': ('code', 'haplotype', 'count'), 'formats': (numpy.uint16, 'S6', numpy.uint8)} # create a numpy formatted structure from this data array = numpy.rec.fromrecords(haplotypes.indexed, dtype=dts) # output the new table arcpy.da.NumPyArrayToTable(array, haplotype_table) utils.msg("Haplotype table created: \n {}".format(haplotype_table)) except Exception as e: utils.msg("Error creating supplemental haplotype table", mtype='error', exception=e) sys.exit() # Because we can't pass around objects between this process and the calling # addin environment, dump out the settings to our shared configuration file. try: config.update('fc_path', output_fc.strip()) config.update('x_coord', x) config.update('y_coord', y) var_types = { 'identification': identification, 'genetic': genetic, 'location': location, 'other': other } if identification is None: raise Exception("Required Identification columns not entered.") # the first ID field should be used as the default key. id_cols = identification.split(";") id_field = id_cols[0] for (i, col) in enumerate(id_cols): # FIXME this will always set individual_id to the primary key. if col.lower() == 'individual_id': id_field = id_cols[i] config.update('id_field', id_field) for (var, val) in var_types.items(): if val is None: val = '' config.update('%s_columns' % var, val.strip()) except Exception as e: msg = "Error creating output configuration file: %s" % config.config_path utils.msg(msg, mtype='error', exception=e) sys.exit() # clean up: remove intermediate steps. try: arcpy.Delete_management(temporary_layer) except Exception as e: utils.msg("Unable to delete temporary layer", mtype='error', exception=e) sys.exit()