def __init__(self, _device): gtk.ListStore.__init__(self, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_PYOBJECT) self.__filter_tracks = self.filter_new() self.__filter_tracks.set_visible_func(self.__filter_type, Metadata.TYPE_TRACK) self.__filter_folders = self.filter_new() self.__filter_folders.set_visible_func(self.__filter_folder) self.__current_folder_id = None tracks = [] # add all tracks tracks_list = _device.get_track_listing() for track_metadata in tracks_list: assert type(track_metadata) is type(Metadata.Metadata()) self.append(track_metadata) tracks.append(track_metadata.id) # add other files (the ones which are not already registered as tracks) file_list = _device.get_file_listing() for file_metadata in file_list: assert type(file_metadata) is type(Metadata.Metadata()) if not file_metadata.id in tracks: self.append(file_metadata)
def fill(self): # add playlists for playlist in self._device.get_playlist_listing(): assert type(playlist) is type(Metadata.Metadata()) self.append(playlist) for track in self._device.get_tracks_for_playlist(playlist): assert type(track) is type(Metadata.Metadata()) track.parent_id = playlist.id self.append(track)
def fill(self): # add folder list #FIXME: sort item so that parent is alway created before its childs folder_list = self._device.get_folder_listing() for dir in folder_list: assert type(dir) is type(Metadata.Metadata()) self.append(dir)
def append(self, metadata): assert type(metadata) is type(Metadata.Metadata()) m = metadata if DEBUG_LOCK: debug_trace("Requesting lock", sender=self) self.__lock.acquire() if DEBUG_LOCK: debug_trace("Lock acquired", sender=self) parent = 0 if m.parent_id <> 0: parent = self.__get_iter(m.parent_id) if m.type == Metadata.TYPE_FOLDER: row = [m.id, m.parent_id, m.title, "", 0, "folder", m] else: icon = "gtk-file" if Metadata.TYPE_TRACK: icon = "audio-x-generic" row = [ m.id, m.parent_id, m.title, util.format_filesize(m.filesize), m.filesize, icon, m ] iter = gtk.TreeStore.append(self, parent, row) self.__cache[m.id] = gtk.TreeRowReference(self, self.get_path(iter)) self.__lock.release() if DEBUG_LOCK: debug_trace("Lock released", sender=self) return iter
def create_playlist(self, playlist_name): metadata = Metadata.Metadata() metadata.id = playlist_name metadata.title = playlist_name metadata.filename = playlist_name metadata.parent_id = 0 metadata.type = Metadata.TYPE_PLAYLIST self.__queue_job(self.ACTION_CREATE_PLAYLIST, metadata)
def create_folder(self, folder_name, parent_id): metadata = Metadata.Metadata() metadata.id = folder_name metadata.title = folder_name metadata.filename = folder_name metadata.parent_id = parent_id metadata.type = Metadata.TYPE_FOLDER self.__queue_job(self.ACTION_CREATE_FOLDER, metadata)
def __init__(self, object_id, action, status, metadata): assert type(metadata) is type(Metadata.Metadata()) self.object_id = object_id self.action = action self.status = status self.exception = None self.progress = 0 self.metadata = metadata
def __init__(self, statements, database): self.statementList = statements self.data = [] global meta meta = Metadata() self.qlength = len(statements) self.database = database
def set_metadata(self, mid='tmd_id', src_tbl= '', title='metadata', kind='workbook', msg="Mapping Target to Metadata for "): """ Function to set metadata that maps source data elements to target data elements. :param mid: name of metadata id to reference in the source MDE metadata dictionary. :type mid: str :param kind: Type of source in which the catalog (data to map) resides in. :type kind: str :param src_tbl: name of the table where the source-target data resides in. :type src_tbl: str :param title: name of the target file to map data elements from/to. :type title: str :return Boolean: True/False :rtype Boolean: Boolean """ try: if not src_tbl: src_tbl = self.attrs['mde']['crawler']['tbl'].lower() if self.attrs['log']: print(msg, src_tbl, " in ", self.attrs['mdd']) if re.search(str(title), self.objs[kind].attrs['title']): self.objs['workbook'].set_workbook() self.objs['df'] = self.objs[kind].get_sheet_body(src_tbl.lower()) if self.objs['df'].empty: print(" No Context found to Metadata in ", src_tbl) else: self.attrs['dict'] = self.objs['df'].set_index('sde',drop=False).T.to_dict('dict') else: print(' ', title, ' does not exists or it is not a ', kind) if not self.objs['df'].empty: des = self.objs['df']["tde"].tolist() des.sort() self.attrs['mde']['src'][mid] = int(hashlib.md5("".join(des).encode('utf-8')).hexdigest(), 16) self.attrs['id'] = self.attrs['mde']['src'][mid] print(self.objs['df'].head(10), '\n Target Data Elements: ', len(des), '\n ID: ', self.attrs['id']) self.objs['src_map_metadata'] = Metadata(name=self.attrs['src_mdd'],mid=str(self.get('src', 'smd_id'))) self.objs['metadata'] = Metadata(name=self.attrs['mdd'], mid=str(self.attrs['id'])) else: print("\t", "ERROR! MISSING DATA!") except Exception as e: print(e, "\n\t", "ERROR! ", msg, src_tbl) return len(self.attrs['dict']) > 0
def __get_file_metadata(self, file_id): file = self.FILE_LISTING[file_id] m = Metadata() m.id = str(file_id) m.type = TYPE_FILE m.parent_id = str(file[1]) m.filename = file[2] m.title = file[2] return m
def __init__(self, tbldata, tblname): global md md = Metadata() self.database = {} self.tbldata = tbldata self.tblname = tblname self.clean_data = {} self.getPrimary() self.addToHash()
def __queue_job(self, job_type, metadata): assert type(metadata) is type(Metadata.Metadata()) job = Job(metadata.id, job_type, self.STATUS_QUEUED, metadata) self.__queue.put_nowait(job) self.__model.append(job.get_list()) trace("queued file %s for %s" % (job.object_id, job.action), sender=self) self.__notebook.set_current_page(1)
def get_playlist_listing(self): playlists = [] for id in self.PLAYLIST_LISTING.keys(): playlist = self.PLAYLIST_LISTING[id] m = Metadata() m.id = str(id) m.parent_id = str(playlist[0]) m.type = TYPE_PLAYLIST m.filename = str(playlist[1]) m.title = str(playlist[1]) playlists.append(m) return playlists
def __init__(self, _device): gtk.TreeStore.__init__(self, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_PYOBJECT) self.__cache = {} # lock to prevent more thread for updating the model at the same time self.__lock = Lock() # add folder list #FIXME: sort item so that parent is alway created before its childs folder_list = _device.get_folder_list() for dir in folder_list: assert type(dir) is type(Metadata.Metadata()) self.append(dir) # add file list file_list = _device.get_filelisting() for file_metadata in file_list: assert type(file_metadata) is type(Metadata.Metadata()) self.append(file_metadata)
def init_elements(self, metadataFile, csvFile, ncOutput): self.ncOutput = ncOutput self.ncOutput = Checker().check_source(metadataFile, csvFile, ncOutput) self.metadata = Metadata(metadataFile) self.metadataData = self.metadata.get_metadata() self.data = Data(csvFile) self.ncOutput = self.ncOutput + self.metadata.get_global_attributes( ).get_id() + ".nc" self.version = self.metadata.get_global_attributes( ).get_netcdf_version() self.temporalAppendPosition = {} self.globalAttributes = Metadata(metadataFile).get_global_attributes() self.dimensions = self.metadata.get_dimensions() self.naming_authority = self.globalAttributes.attributesList[ 'naming_authority'] if self.naming_authority == 'EGO': self.data_ego = Data_ego(csvFile) self.ego_standard_metadata = EgoReaderStandardMetadata() self.dimensionsEgo = self.ego_standard_metadata.get_dimensions() self.metadata.change_variable()
def get_folder_listing(self): folders = [] for id in self.FOLDER_LISTING.keys(): folder = self.FOLDER_LISTING[id] m = Metadata() m.id = str(id) m.parent_id = str(folder[0]) m.type = TYPE_FOLDER m.filename = folder[1] m.title = folder[1] folders.append(m) return folders
def append(self, metadata): assert type(metadata) is type(Metadata.Metadata()) parent = self.__get_iter(metadata.parent_id) row = [ metadata.id, metadata.parent_id, metadata.title, metadata.get_icon(), metadata ] next_object = self.__get_iter(metadata.next_object) if next_object: iter = gtk.TreeStore.insert_before(self, parent, next_object, row) else: iter = gtk.TreeStore.append(self, parent, row) return False # needed by idle_add
def append(self, metadata): assert type(metadata) is type(Metadata.Metadata()) m = metadata if DEBUG: debug_trace("Requesting lock", sender=self) self.__lock.acquire() if DEBUG: debug_trace("Lock acquired", sender=self) iter = gtk.ListStore.append(self, [ m.id, m.title, m.artist, m.album, m.genre, util.format_filesize(m.duration), m.duration, m.date, m ]) self.__cache[m.id] = gtk.TreeRowReference(self, self.get_path(iter)) self.__lock.release() if DEBUG: debug_trace("Lock released", sender=self) return iter
def __init__(self, _device): gtk.ListStore.__init__(self, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT, gobject.TYPE_STRING, gobject.TYPE_PYOBJECT) self.__cache = {} # lock to prevent more thread for uodating the model at the same time self.__lock = Lock() tracks_list = _device.get_tracklisting() for track_metadata in tracks_list: assert type(track_metadata) is type(Metadata.Metadata()) self.append(track_metadata)
def __get_track_metadata(self, track_id): track = self.FILE_LISTING[track_id] m = Metadata() m.id = str(track_id) m.type = TYPE_TRACK m.parent_id = str(track[1]) m.filename = track[2] m.title = track[3] m.artist = track[4] m.album = track[5] m.genre = track[6] m.filesize = track[7] m.date = track[8] return m
def append(self, metadata): assert type(metadata) is type(Metadata.Metadata()) m = metadata date_str = "" if metadata.date: date_str = datetime.datetime.fromtimestamp( metadata.date).strftime('%a %d %b %Y') row = [ m.id, m.parent_id, m.type, m.filename, m.title, m.artist, m.album, m.genre, util.format_filesize(m.filesize), m.filesize, date_str, m.date, m.get_icon(), m ] iter = gtk.ListStore.append(self, row) return False # needed by idle_add
def append(self, metadata): assert type(metadata) is type(Metadata.Metadata()) m = metadata date_str = "" if metadata.date: date_str = datetime.datetime.fromtimestamp( metadata.date).strftime('%a %d %b %Y') if DEBUG_LOCK: debug_trace("Requesting lock", sender=self) self.__lock.acquire() if DEBUG_LOCK: debug_trace("Lock acquired", sender=self) iter = gtk.ListStore.append(self, [ m.id, m.parent_id, m.title, m.artist, m.album, m.genre, util.format_filesize(m.filesize), m.filesize, date_str, m.date, m ]) self.__cache[m.id] = gtk.TreeRowReference(self, self.get_path(iter)) self.__lock.release() if DEBUG_LOCK: debug_trace("Lock released", sender=self) return iter
class Main: database = {} md = Metadata() statement = '' while statement != 'quit': statement = input("SQL>") statement = statement.lower() statement = statement.replace('\"', "") statement = statement.replace('\'', "") statement = statement.replace(",", "|") print(statement) if statement == 'quit': break else: q1 = Query(statement, database) q1.classify_query()
def send_file(self, uri, selrow_metadata): """ selected_row: the metadata of the selected row """ assert not selrow_metadata or type(selrow_metadata) is type( Metadata.Metadata()) parent_id = 0 if selrow_metadata: parent_id = selrow_metadata.id debug_trace("files where dropped on %s" % parent_id, sender=self) # if the row is not a folder, take the parent which should be one if selrow_metadata.type <> Metadata.TYPE_FOLDER: parent_id = selrow_metadata.parent_id debug_trace( "It was not a folder. Its parent %s is taken instead." % parent_id, sender=self) return self.__transferManager.send_file(uri, parent_id)
def __init__(self, title, file, sfx='.csv', kind='workbook', log=False): """ Constructor to initialize attributes, including objects, of the MDE such as the workbook, bucket and metadata, as well as other parameters such as the source, extraction time, table name, among others. :param kind: the type of source that the Module will be extracting data from, either workbook, s3 or database. :type kind: str :param title: the name of the google sheet workbook (source), if any, containing the data. :type title: str :param file: the name of the file (source), in S3, containing the data. :type file: str :param sfx: the sufix of the file, if provided, to read and write. :type sfx: str :return: :rtype: """ self.attrs = {} self.attrs['bucket'] = {'dir': '', 'file': None} self.attrs['date'] = datetime.date.today().strftime("%m-%d-%Y") self.objs = {} self.objs.update({ 'df': pd.DataFrame({}), 'bucket': None, 'metadata': None, 'workbook': Workbook(title) }) #----------------------------------------------------------------------- # Dynamically obtain source reference from title and file name, or user. #----------------------------------------------------------------------- file = file.lower() sevent = file.split('-')[0] sfile = re.sub(r'\W', '_', file) sstudy = title.split('-')[0] sid = title.split('-')[-1] srefs = re.split(r'\W', sid) print(srefs) sref = "".join( [re.sub(r'\(|\)', '', ref)[0] for ref in srefs if len(ref) > 0]) #--------------------------------------------------------------------------------------- self.attrs['src'] = {} self.attrs['src']['name'] = sstudy + '_' + sref + '_' + sfile + sfx self.attrs['table'] = re.sub(r'\W', '_', self.attrs['src']['name']).lower() self.attrs['src'].update({ 'title': title, 'refs': srefs, 'study': sstudy, 'sid': sid, 'mmd_id': '', 'tmd_id': '', 'mapped': False }) self.attrs['src'].update({ 'ref': sref, 'ofile': file, 'file': sfile, 'sfx': sfx, 'type': kind, 'event': sevent, 'sdes': [] }) self.attrs['src']['smd_id'] = int( hashlib.md5(str( self.attrs['src']['name']).encode('utf-8')).hexdigest(), 16) self.attrs['log'] = log ### Step 8: Creating a Metadata Object for Source Metadata and Loading it from the Data Lake self.objs['metadata'] = Metadata(name='source_metadata', mid=str(self.attrs['src']['smd_id'])) AwsGlue.__init__(self)
print obj_m.__str__() print obj_m.getNoOfObjects() obj_m.addObject(obj2) print obj_m.getNoOfObjects() obj3 = obj_m.list_objects[0] obj3.x = 45 obj3.__str__() obj_m.list_objects[2].__str__() obj_m.list_objects[1].__str__() obj_m.newObject() print obj_m.__str__() # testing metadata a = Metadata_addition(0.0, 100.0, 200.0) a.setMin(3) # not working print a.__str__() md = Metadata("in", "out", [a]) print md.__str__() md.exportNewMetadata() pm = Processed_Img(obj_m, a) print pm.getMetadataAdditionStr() pm.algorithm() pm.exportImg("<outFileName>") print "End of test script"