def data(self, index, role): if not index.isValid(): return QVariant() ms = self.mapper.getMergeScanForId(index.internalId()) if ms is None: logger.critical( "data: wasn't able to get the FileSystemMerge for internal id {0}" .format(index.internalId())) return QVariant() if role == Qt.CheckStateRole and index.column( ) == MergeScanTreeModel.COL_CHECKED: return ms.checked if role == Qt.DecorationRole and index.column( ) == MergeScanTreeModel.COL_ICON: return self.IconProvider.icon( QFileIconProvider.Folder) if ms.is_dir else None if role == Qt.TextColorRole: if ms.flags == PersistentScanningState.ITEM_DELETED: return QColor(Qt.red) return QVariant() if role == Qt.FontRole: font = QFont() info = PathInfo(ms.abs_path, ms.path_info) if info.is_symlink or ms.flags == PersistentScanningState.ITEM_UNREADABLE: font.setItalic(True) return font if role == Qt.DisplayRole: data = None if index.column() == MergeScanTreeModel.COL_NAME: data = os.path.basename(ms.abs_path) if "Sample" in data: x = 0 elif index.column() == MergeScanTreeModel.COL_CHANGE_TYPE: if ms.flags == PersistentScanningState.ITEM_ADDED: data = "Added" elif ms.flags == PersistentScanningState.ITEM_DELETED: data = "Deleted" elif ms.flags == PersistentScanningState.ITEM_MODIFIED: data = "Modified" elif ms.flags == PersistentScanningState.ITEM_UNCHANGED: data = "" elif index.column() == MergeScanTreeModel.COL_SIZE: data = PathInfo( ms.abs_path, ms.path_info ).size_human_readable if not ms.is_dir else None elif index.column() == MergeScanTreeModel.COL_DATE_MODIFIED: value = QDateTime.fromTime_t( int(PathInfo(ms.abs_path, ms.path_info).modified_date)) data = value.toString() elif index.column() == MergeScanTreeModel.COL_PERMISSIONS: info = PathInfo(ms.abs_path, ms.path_info) data = "{0}:{1} {2}".format(info.uid, info.gid, info.posix_perms_human_readable) return data return QVariant()
def test_permissions(self): info = PathInfo(self.tempFile1) self.assertEqual(info.dirname, os.path.dirname(self.tempFile1)) self.assertEqual(info.basename, os.path.basename(self.tempFile1)) self.assertEqual(info.size_bytes, 2048) if not Platform.isWindows: self.assertEqual(info.uid, os.geteuid()) self.assertEqual(info.gid, os.getegid()) self.assertEqual(info.posix_perms, 0755) self.assertNotEqual(info.posix_perms, 0711)
def test_junction_detection(self): # add more here as you see fit... paths = ["C:\\ProgramData\Documents", "C:\\ProgramData\Desktop"] for path in paths: is_symlink = PathInfo(path).is_symlink self.assertEqual(True, is_symlink)
def test_can_use_two_param_constructor(self): info = PathInfo(self.tempFile1) as_json = info.to_json() info2 = PathInfo(info.abs_path, as_json) self.assertEqual(info2.to_json(), as_json) self.assertFalse(info2.is_symlink)
def test_properties_cannot_be_set(self): info = PathInfo(self.tempFile1) info.dirname = "something"
def test_file3_exists_and_is_symlink(self): info = PathInfo(self.tempFile3) self.assertTrue(info.is_symlink) info = PathInfo(self.tempFile1) self.assertFalse(info.is_symlink)
def storeSecondScan(self, directory_scanner): """ Another scan is being performed, which needs to be integrated into the pre-scanned result set. Each scan is checked against the existing database records, in order to create the added/modified/deleted flag. This method returns a generator that punts out the very same object produced by the input generator - namely a PathInfo instance. """ self.isScanning = True # take a copy of the file_system_snapshot table, first dump and re-create the file_system_merge table - the schema parts are # done using SQLAlchemy, but its way too slow for the rest of the work - which is done using the sqlite3 module # directly. FileSystemMerge.__table__.drop(bind=self.engine, checkfirst=True) FileSystemMerge.__table__.create(bind=self.engine) self.info.date_last_scanned = datetime.now() self.clearMergeCompleteFlag() self.scanningStateChanged.emit( PersistentScanningState.STATE_MERGESCAN_COPYING) self.add_list = [] add_callable = lambda x: self.add_list.append(x) try: paths = [PathInfo(p.abs_path) for p in self.pathsBeingScanned()] # add each of the scan paths into the FileSystemMerge table, without marking them as modified etc for path_info in paths: self.__prepareForMergeTable(add_callable, path_info) yield path_info # now add each of the paths handed to us from the iterator, they are all PathInfo instances - and are converted into an 'INSERT' # for the FileSystemMerge table - which happens in batches to make use of executemany() - and bypass the ORM cos its a woof woof. for path_info in directory_scanner.performScan(): self.__prepareForMergeTable(add_callable, path_info) if not self.isScanning: raise StopScanningException() if len(self.add_list) > 1000: self.__flushAddQueueToDatabase() yield path_info self.__flushAddQueueToDatabase() # to work out which records have been deleted, simply find the abs_path entries NOT in the FileSystemMerge table, # and copy them over as PersistentScanningState.ITEM_DELETED self.session.execute( "INSERT into file_system_merge (abs_path, is_dir, path_info, flags, checked, parent_key) SELECT abs_path, is_dir, path_info, '4', '0', parent_key FROM file_system_snapshot WHERE abs_path NOT IN (SELECT abs_path FROM file_system_merge)" ) self.session.commit() self.scanningStateChanged.emit( PersistentScanningState.STATE_MERGESCAN_UPDATE_INITIAL_STATE) self.__updateInitialCheckedState() self.info.merge_complete = True self.session.commit() except StopScanningException: self.scanningStateChanged.emit( PersistentScanningState.STATE_MERGESCAN_ROLLBACK)
def storeFilesystemSnapshot(self, directory_scanner): self.isScanning = True FileSystemSnapshot.__table__.drop(bind=self.engine, checkfirst=True) FileSystemSnapshot.__table__.create(bind=self.engine) self.info.files_scanned = 0 self.clearMergeCompleteFlag() self.session.commit() try: self.scanningStateChanged.emit( PersistentScanningState.STATE_FILESYSTEM_SCANNING) # for every item in the list stuff it into the table and we're done self.storePathsBeingScanned(directory_scanner.pathsForScanning()) total_count = 0 count_added = 0 sp_info = [ PathInfo(p) for p in directory_scanner.pathsForScanning() ] for info in sp_info: assert info.abs_path is not None fs = FileSystemSnapshot(abs_path=ensure_unicode(info.abs_path), is_dir=info.is_dir, path_info=info.to_json(), parent_key=ensure_unicode( info.parentpath)) self.session.add(fs) total_count += 1 yield fs for info in directory_scanner.performScan(): assert info.abs_path is not None fs = FileSystemSnapshot(abs_path=ensure_unicode(info.abs_path), is_dir=info.is_dir, path_info=info.to_json(), parent_key=ensure_unicode( info.parentpath)) self.session.add(fs) total_count += 1 count_added += 1 if not self.isScanning: raise StopScanningException() yield fs # just so we don't blow up the machine's RAM... commit each chunk now and then... if count_added > 1000: self.session.commit() count_added = 0 self.info.files_scanned = total_count self.session.commit() except StopScanningException: self.scanningStateChanged.emit( PersistentScanningState.STATE_FILESYSTEM_ROLLBACK) self.session.rollback() self.isScanning = False
def getFileInformation(self, abs_filename): return PathInfo(abs_filename)