def ConvertToPngIfBmp( path ): with open( path, 'rb' ) as f: header = f.read( 2 ) if header == b'BM': ( os_file_handle, temp_path ) = HydrusPaths.GetTempPath() try: with open( path, 'rb' ) as f_source: with open( temp_path, 'wb' ) as f_dest: HydrusPaths.CopyFileLikeToFileLike( f_source, f_dest ) pil_image = GeneratePILImage( temp_path ) pil_image.save( path, 'PNG' ) finally: HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
def SetEnvTempDir(path): if os.path.exists(path) and not os.path.isdir(path): raise Exception( 'The given temp directory, "{}", does not seem to be a directory!'. format(path)) try: HydrusPaths.MakeSureDirectoryExists(path) except Exception as e: raise Exception('Could not create the temp dir: {}'.format(e)) if not HydrusPaths.DirectoryIsWriteable(path): raise Exception( 'The given temp directory, "{}", does not seem to be writeable-to!' .format(path)) for tmp_name in ('TMPDIR', 'TEMP', 'TMP'): if tmp_name in os.environ: os.environ[tmp_name] = path tempfile.tempdir = path
def GetExportPath(): portable_path = HG.client_controller.options[ 'export_path' ] if portable_path is None: desired_path = os.path.join( '~', 'hydrus_export' ) path = os.path.expanduser( desired_path ) if path == desired_path: # could not figure it out, probably crazy user setup atm return None HydrusPaths.MakeSureDirectoryExists( path ) else: path = HydrusPaths.ConvertPortablePathToAbsPath( portable_path ) return path
def _callbackParsePOSTArgs( self, request ): request.content.seek( 0 ) if not request.requestHeaders.hasHeader( 'Content-Type' ): parsed_request_args = HydrusNetworking.ParsedRequestArguments() else: content_types = request.requestHeaders.getRawHeaders( 'Content-Type' ) content_type = content_types[0] try: mime = HC.mime_enum_lookup[ content_type ] except: raise HydrusExceptions.BadRequestException( 'Did not recognise Content-Type header!' ) total_bytes_read = 0 if mime == HC.APPLICATION_JSON: json_string = request.content.read() total_bytes_read += len( json_string ) parsed_request_args = HydrusNetwork.ParseNetworkBytesToParsedHydrusArgs( json_string ) else: ( os_file_handle, temp_path ) = HydrusPaths.GetTempPath() request.temp_file_info = ( os_file_handle, temp_path ) with open( temp_path, 'wb' ) as f: for block in HydrusPaths.ReadFileLikeAsBlocks( request.content ): f.write( block ) total_bytes_read += len( block ) decompression_bombs_ok = self._DecompressionBombsOK( request ) parsed_request_args = HydrusServerResources.ParseFileArguments( temp_path, decompression_bombs_ok ) self._reportDataUsed( request, total_bytes_read ) request.parsed_request_args = parsed_request_args return request
def GenerateThumbnailBytes( path, target_resolution, mime, duration, num_frames, percentage_in = 35 ): if mime in ( HC.IMAGE_JPEG, HC.IMAGE_PNG, HC.IMAGE_GIF, HC.IMAGE_WEBP, HC.IMAGE_TIFF, HC.IMAGE_ICON ): # not apng atm thumbnail_bytes = HydrusImageHandling.GenerateThumbnailBytesFromStaticImagePath( path, target_resolution, mime ) else: if mime == HC.APPLICATION_FLASH: ( os_file_handle, temp_path ) = HydrusPaths.GetTempPath() try: HydrusFlashHandling.RenderPageToFile( path, temp_path, 1 ) thumbnail_bytes = HydrusImageHandling.GenerateThumbnailBytesFromStaticImagePath( temp_path, target_resolution, mime ) except: thumb_path = os.path.join( HC.STATIC_DIR, 'flash.png' ) thumbnail_bytes = HydrusImageHandling.GenerateThumbnailBytesFromStaticImagePath( thumb_path, target_resolution, mime ) finally: HydrusPaths.CleanUpTempPath( os_file_handle, temp_path ) else: renderer = HydrusVideoHandling.VideoRendererFFMPEG( path, mime, duration, num_frames, target_resolution ) renderer.read_frame() # this initialises the renderer and loads the first frame as a fallback desired_thumb_frame = int( ( percentage_in / 100.0 ) * num_frames ) renderer.set_position( desired_thumb_frame ) numpy_image = renderer.read_frame() if numpy_image is None: raise Exception( 'Could not create a thumbnail from that video!' ) numpy_image = HydrusImageHandling.ResizeNumPyImage( numpy_image, target_resolution ) # just in case ffmpeg doesn't deliver right thumbnail_bytes = HydrusImageHandling.GenerateThumbnailBytesNumPy( numpy_image, mime ) renderer.Stop() del renderer return thumbnail_bytes
def DeletePath(path, always_delete_fully=False): if HC.options['delete_to_recycle_bin'] == True and not always_delete_fully: HydrusPaths.RecyclePath(path) else: HydrusPaths.DeletePath(path)
def test_import_folders_daemon( self ): test_dir = HydrusTemp.GetTempDir() try: HG.test_controller.SetRead( 'hash_status', ClientImportFiles.FileImportStatus.STATICGetUnknownStatus() ) HydrusPaths.MakeSureDirectoryExists( test_dir ) hydrus_png_path = os.path.join( HC.STATIC_DIR, 'hydrus.png' ) HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '0' ) ) HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '1' ) ) # previously imported HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '2' ) ) with open( os.path.join( test_dir, '3' ), 'wb' ) as f: f.write( b'blarg' ) # broken with open( os.path.join( test_dir, '4' ), 'wb' ) as f: f.write( b'blarg' ) # previously failed # actions = {} actions[ CC.STATUS_SUCCESSFUL_AND_NEW ] = CC.IMPORT_FOLDER_DELETE actions[ CC.STATUS_SUCCESSFUL_BUT_REDUNDANT ] = CC.IMPORT_FOLDER_DELETE actions[ CC.STATUS_DELETED ] = CC.IMPORT_FOLDER_DELETE actions[ CC.STATUS_ERROR ] = CC.IMPORT_FOLDER_IGNORE import_folder = ClientImportLocal.ImportFolder( 'imp', path = test_dir, actions = actions ) HG.test_controller.SetRead( 'serialisable_names', [ 'imp' ] ) HG.test_controller.SetRead( 'serialisable_named', import_folder ) HG.test_controller.ClearWrites( 'import_file' ) HG.test_controller.ClearWrites( 'serialisable' ) ClientDaemons.DAEMONCheckImportFolders() import_file = HG.test_controller.GetWrite( 'import_file' ) self.assertEqual( len( import_file ), 3 ) # I need to expand tests here with the new file system [ ( ( updated_import_folder, ), empty_dict ) ] = HG.test_controller.GetWrite( 'serialisable' ) self.assertEqual( updated_import_folder, import_folder ) self.assertTrue( not os.path.exists( os.path.join( test_dir, '0' ) ) ) self.assertTrue( not os.path.exists( os.path.join( test_dir, '1' ) ) ) self.assertTrue( not os.path.exists( os.path.join( test_dir, '2' ) ) ) self.assertTrue( os.path.exists( os.path.join( test_dir, '3' ) ) ) self.assertTrue( os.path.exists( os.path.join( test_dir, '4' ) ) ) finally: shutil.rmtree( test_dir )
def GetExportPath(): portable_path = HG.client_controller.options['export_path'] if portable_path is None: path = os.path.join(os.path.expanduser('~'), 'hydrus_export') HydrusPaths.MakeSureDirectoryExists(path) else: path = HydrusPaths.ConvertPortablePathToAbsPath(portable_path) return path
def _OpenSelectedFileSeedData(self): file_seeds = self._list_ctrl.GetData(only_selected=True) if len(file_seeds) > 0: if len(file_seeds) > 10: message = 'You have many objects selected--are you sure you want to open them all?' result = ClientGUIDialogsQuick.GetYesNo(self, message) if result != QW.QDialog.Accepted: return if file_seeds[0].file_seed_data.startswith('http'): for file_seed in file_seeds: ClientPaths.LaunchURLInWebBrowser(file_seed.file_seed_data) else: try: for file_seed in file_seeds: HydrusPaths.OpenFileLocation(file_seed.file_seed_data) except Exception as e: QW.QMessageBox.critical(self, 'Error', str(e))
def CheckCanVacuumCursor(db_path, c, stop_time=None): (page_size, ) = c.execute('PRAGMA page_size;').fetchone() (page_count, ) = c.execute('PRAGMA page_count;').fetchone() (freelist_count, ) = c.execute('PRAGMA freelist_count;').fetchone() db_size = (page_count - freelist_count) * page_size vacuum_estimate = int(db_size * 1.2) if stop_time is not None: approx_vacuum_speed_mb_per_s = 1048576 * 1 approx_vacuum_duration = vacuum_estimate // approx_vacuum_speed_mb_per_s time_i_will_have_to_start = stop_time - approx_vacuum_duration if HydrusData.TimeHasPassed(time_i_will_have_to_start): raise Exception( 'I believe you need about ' + HydrusData.TimeDeltaToPrettyTimeDelta(approx_vacuum_duration) + ' to vacuum, but there is not enough time allotted.') db_dir = os.path.dirname(db_path) HydrusPaths.CheckHasSpaceForDBTransaction(db_dir, vacuum_estimate)
def EventOpenLocation(self): directory = self._directory_picker.GetPath() if directory is not None and directory != '': HydrusPaths.LaunchDirectory(directory)
def CheckCanVacuum( db_path, stop_time = None ): db = sqlite3.connect( db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES ) c = db.cursor() ( page_size, ) = c.execute( 'PRAGMA page_size;' ).fetchone() ( page_count, ) = c.execute( 'PRAGMA page_count;' ).fetchone() ( freelist_count, ) = c.execute( 'PRAGMA freelist_count;' ).fetchone() db_size = ( page_count - freelist_count ) * page_size if stop_time is not None: approx_vacuum_speed_mb_per_s = 1048576 * 1 approx_vacuum_duration = db_size // approx_vacuum_speed_mb_per_s time_i_will_have_to_start = stop_time - approx_vacuum_duration if HydrusData.TimeHasPassed( time_i_will_have_to_start ): raise Exception( 'I believe you need about ' + HydrusData.TimeDeltaToPrettyTimeDelta( approx_vacuum_duration ) + ' to vacuum, but there is not enough time allotted.' ) ( db_dir, db_filename ) = os.path.split( db_path ) HydrusPaths.CheckHasSpaceForDBTransaction( db_dir, db_size )
def tearDownClass(cls): for path in (cls._ssl_cert_path, cls._ssl_key_path): HydrusPaths.TryToMakeFileWriteable(path) os.unlink(path)
def MaintainMemorySlow( self ): gc.collect() HydrusPaths.CleanUpOldTempPaths() self._MaintainCallToThreads()
def _CheckFolder(self, job_key): all_paths = ClientFiles.GetAllFilePaths([self._path]) all_paths = HydrusPaths.FilterFreePaths(all_paths) file_seeds = [] for path in all_paths: if job_key.IsCancelled(): break if path.endswith('.txt'): continue file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_HDD, path) if not self._file_seed_cache.HasFileSeed(file_seed): file_seeds.append(file_seed) job_key.SetVariable( 'popup_text_1', 'checking: found ' + HydrusData.ToHumanInt(len(file_seeds)) + ' new files') self._file_seed_cache.AddFileSeeds(file_seeds) self._last_checked = HydrusData.GetNow() self._check_now = False
def boot(): if result.temp_dir is not None: HydrusPaths.SetEnvTempDir( result.temp_dir ) controller = None with HydrusLogger.HydrusLogger( db_dir, 'client' ) as logger: try: HydrusData.Print( 'hydrus client started' ) if not HG.twisted_is_broke: import threading threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start() from hydrus.client import ClientController controller = ClientController.Controller( db_dir ) controller.Run() except: HydrusData.Print( 'hydrus client failed' ) import traceback HydrusData.Print( traceback.format_exc() ) finally: HG.view_shutdown = True HG.model_shutdown = True if controller is not None: controller.pubimmediate( 'wake_daemons' ) if not HG.twisted_is_broke: reactor.callFromThread( reactor.stop ) HydrusData.Print( 'hydrus client shut down' ) HG.shutdown_complete = True if HG.restart: HydrusData.RestartProcess()
def DoDeferredPhysicalDeletes(self): num_files_deleted = 0 num_thumbnails_deleted = 0 pauser = HydrusData.BigJobPauser() (file_hash, thumbnail_hash) = self.Read('deferred_physical_delete') while (file_hash is not None or thumbnail_hash is not None) and not HG.view_shutdown: if file_hash is not None: path = ServerFiles.GetExpectedFilePath(file_hash) if os.path.exists(path): HydrusPaths.RecyclePath(path) num_files_deleted += 1 if thumbnail_hash is not None: path = ServerFiles.GetExpectedThumbnailPath(thumbnail_hash) if os.path.exists(path): HydrusPaths.RecyclePath(path) num_thumbnails_deleted += 1 self.WriteSynchronous('clear_deferred_physical_delete', file_hash=file_hash, thumbnail_hash=thumbnail_hash) (file_hash, thumbnail_hash) = self.Read('deferred_physical_delete') pauser.Pause() if num_files_deleted > 0 or num_thumbnails_deleted > 0: HydrusData.Print( 'Physically deleted {} files and {} thumbnails from file storage.' .format(HydrusData.ToHumanInt(num_files_deleted), HydrusData.ToHumanInt(num_files_deleted)))
def DumpToPNG(width, payload_bytes, title, payload_description, text, path): payload_bytes_length = len(payload_bytes) header_and_payload_bytes_length = payload_bytes_length + 4 payload_height = int(header_and_payload_bytes_length / width) if (header_and_payload_bytes_length / width) % 1.0 > 0: payload_height += 1 top_image = CreateTopImage(width, title, payload_description, text) payload_length_header = struct.pack('!I', payload_bytes_length) num_empty_bytes = payload_height * width - header_and_payload_bytes_length header_and_payload_bytes = payload_length_header + payload_bytes + b'\x00' * num_empty_bytes payload_image = numpy.fromstring(header_and_payload_bytes, dtype='uint8').reshape( (payload_height, width)) finished_image = numpy.concatenate((top_image, payload_image)) # this is to deal with unicode paths, which cv2 can't handle (os_file_handle, temp_path) = HydrusPaths.GetTempPath(suffix='.png') try: cv2.imwrite(temp_path, finished_image, [cv2.IMWRITE_PNG_COMPRESSION, 9]) HydrusPaths.MirrorFile(temp_path, path) except Exception as e: HydrusData.ShowException(e) raise Exception('Could not save the png!') finally: HydrusPaths.CleanUpTempPath(os_file_handle, temp_path)
def GetHashFromPath(path): h = hashlib.sha256() with open(path, 'rb') as f: for block in HydrusPaths.ReadFileLikeAsBlocks(f): h.update(block) return h.digest()
def LaunchURLInWebBrowser(url): web_browser_path = HG.client_controller.new_options.GetNoneableString( 'web_browser_path') if web_browser_path is None: webbrowser.open(url) else: HydrusPaths.LaunchFile(url, launch_path=web_browser_path)
def OpenExternally(media): hash = media.GetHash() mime = media.GetMime() client_files_manager = HG.client_controller.client_files_manager path = client_files_manager.GetFilePath(hash, mime) new_options = HG.client_controller.new_options launch_path = new_options.GetMimeLaunch(mime) HydrusPaths.LaunchFile(path, launch_path)
def ShutdownModel( self ) -> None: if self.db is not None: self.db.Shutdown() while not self.db.LoopIsFinished(): self._PublishShutdownSubtext( 'waiting for db to finish up\u2026' ) time.sleep( 0.1 ) if self._fast_job_scheduler is not None: self._fast_job_scheduler.shutdown() self._fast_job_scheduler = None if self._slow_job_scheduler is not None: self._slow_job_scheduler.shutdown() self._slow_job_scheduler = None if hasattr( self, 'temp_dir' ): HydrusPaths.DeletePath( self.temp_dir ) with self._call_to_thread_lock: for call_to_thread in self._call_to_threads: call_to_thread.shutdown() for long_running_call_to_thread in self._long_running_call_to_threads: long_running_call_to_thread.shutdown() HG.model_shutdown = True self._pubsub.Wake()
def UpdateConf(self): mpv_config_path = HG.client_controller.GetMPVConfPath() if not os.path.exists(mpv_config_path): default_mpv_config_path = HG.client_controller.GetDefaultMPVConfPath( ) if not os.path.exists(default_mpv_config_path): HydrusData.ShowText( 'There is no default mpv configuration file to load! Perhaps there is a problem with your install?' ) return else: HydrusPaths.MirrorFile(default_mpv_config_path, mpv_config_path) #To load an existing config file (by default it doesn't load the user/global config like standalone mpv does): load_f = getattr(mpv, '_mpv_load_config_file', None) if load_f is not None and callable(load_f): try: load_f(self._player.handle, mpv_config_path.encode('utf-8')) # pylint: disable=E1102 except Exception as e: HydrusData.ShowText( 'MPV could not load its configuration file! This was probably due to an invalid parameter value inside the conf. The error follows:' ) HydrusData.ShowException(e) else: HydrusData.Print( 'Was unable to load mpv.conf--has the MPV API changed?')
def GetExtraHashesFromPath(path): h_md5 = hashlib.md5() h_sha1 = hashlib.sha1() h_sha512 = hashlib.sha512() with open(path, 'rb') as f: for block in HydrusPaths.ReadFileLikeAsBlocks(f): h_md5.update(block) h_sha1.update(block) h_sha512.update(block) md5 = h_md5.digest() sha1 = h_sha1.digest() sha512 = h_sha512.digest() return (md5, sha1, sha512)
def LoadFromPNG( path ): # this is to deal with unicode paths, which cv2 can't handle ( os_file_handle, temp_path ) = HydrusTemp.GetTempPath() try: HydrusPaths.MirrorFile( path, temp_path ) try: # unchanged because we want exact byte data, no conversions or other gubbins numpy_image = cv2.imread( temp_path, flags = IMREAD_UNCHANGED ) if numpy_image is None: raise Exception() except Exception as e: try: # dequantize = False because we don't want to convert to RGB pil_image = HydrusImageHandling.GeneratePILImage( temp_path, dequantize = False ) numpy_image = HydrusImageHandling.GenerateNumPyImageFromPILImage( pil_image ) except Exception as e: HydrusData.ShowException( e ) raise Exception( '"{}" did not appear to be a valid image!'.format( path ) ) finally: HydrusTemp.CleanUpTempPath( os_file_handle, temp_path ) return LoadFromNumPyImage( numpy_image )
def _AttachExternalDatabases(self): for (name, filename) in self._db_filenames.items(): if name == 'main': continue db_path = os.path.join(self._db_dir, filename) if os.path.exists( db_path) and not HydrusPaths.FileisWriteable(db_path): raise HydrusExceptions.DBAccessException( '"{}" seems to be read-only!'.format(db_path)) self._Execute('ATTACH ? AS ' + name + ';', (db_path, )) db_path = os.path.join(self._db_dir, self._durable_temp_db_filename) self._Execute('ATTACH ? AS durable_temp;', (db_path, ))
def _test_repo(self, service): service_key = service.GetServiceKey() # num_petitions num_petitions = [[ HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_STATUS_PETITIONED, 23 ], [HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_STATUS_PENDING, 0]] HG.test_controller.SetRead('num_petitions', num_petitions) response = service.Request(HC.GET, 'num_petitions') self.assertEqual(response['num_petitions'], num_petitions) # petition action = HC.CONTENT_UPDATE_PETITION petitioner_account = HydrusNetwork.Account.GenerateUnknownAccount() reason = 'it sucks' contents = [ HydrusNetwork.Content( HC.CONTENT_TYPE_FILES, [HydrusData.GenerateKey() for i in range(10)]) ] petition = HydrusNetwork.Petition(action, petitioner_account, reason, contents) HG.test_controller.SetRead('petition', petition) response = service.Request( HC.GET, 'petition', { 'content_type': HC.CONTENT_TYPE_FILES, 'status': HC.CONTENT_UPDATE_PETITION }) self.assertEqual(response['petition'].GetSerialisableTuple(), petition.GetSerialisableTuple()) # definitions definitions_update = HydrusNetwork.DefinitionsUpdate() for i in range(100, 200): definitions_update.AddRow( (HC.DEFINITIONS_TYPE_TAGS, i, 'series:test ' + str(i))) definitions_update.AddRow((HC.DEFINITIONS_TYPE_HASHES, i + 500, HydrusData.GenerateKey())) definitions_update_network_bytes = definitions_update.DumpToNetworkBytes( ) definitions_update_hash = hashlib.sha256( definitions_update_network_bytes).digest() path = ServerFiles.GetExpectedFilePath(definitions_update_hash) HydrusPaths.MakeSureDirectoryExists(path) with open(path, 'wb') as f: f.write(definitions_update_network_bytes) response = service.Request(HC.GET, 'update', {'update_hash': definitions_update_hash}) try: os.remove(path) except: pass self.assertEqual(response, definitions_update_network_bytes) # content rows = [(random.randint(100, 1000), [random.randint(100, 1000) for i in range(50)]) for j in range(20)] content_update = HydrusNetwork.ContentUpdate() for row in rows: content_update.AddRow( (HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, row)) content_update_network_bytes = content_update.DumpToNetworkBytes() content_update_hash = hashlib.sha256( content_update_network_bytes).digest() path = ServerFiles.GetExpectedFilePath(content_update_hash) with open(path, 'wb') as f: f.write(content_update_network_bytes) response = service.Request(HC.GET, 'update', {'update_hash': content_update_hash}) try: os.remove(path) except: pass self.assertEqual(response, content_update_network_bytes) # metadata metadata = HydrusNetwork.Metadata() metadata.AppendUpdate([definitions_update_hash, content_update_hash], HydrusData.GetNow() - 101000, HydrusData.GetNow() - 1000, HydrusData.GetNow() + 100000) service._metadata = metadata response = service.Request(HC.GET, 'metadata_slice', {'since': 0}) self.assertEqual(response['metadata_slice'].GetSerialisableTuple(), metadata.GetSerialisableTuple()) # post content raise NotImplementedError() '''
def _test_file_repo(self, service): # file path = ServerFiles.GetExpectedFilePath(self._file_hash) HydrusPaths.MakeSureDirectoryExists(os.path.dirname(path)) with open(path, 'wb') as f: f.write(EXAMPLE_FILE) response = service.Request(HC.GET, 'file', {'hash': self._file_hash}) self.assertEqual(response, EXAMPLE_FILE) # try: os.remove(path) except: pass path = os.path.join(HC.STATIC_DIR, 'hydrus.png') with open(path, 'rb') as f: file_bytes = f.read() HG.test_controller.ClearWrites('file') service.Request(HC.POST, 'file', {'file': file_bytes}) written = HG.test_controller.GetWrite('file') [(args, kwargs)] = written (written_service_key, written_account, written_file_dict) = args hash = b'\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3' self.assertEqual(written_file_dict['hash'], hash) self.assertEqual(written_file_dict['ip'], '127.0.0.1') self.assertEqual(written_file_dict['height'], 200) self.assertEqual(written_file_dict['width'], 200) self.assertEqual(written_file_dict['mime'], 2) self.assertEqual(written_file_dict['size'], 5270) # ip (ip, timestamp) = ('94.45.87.123', HydrusData.GetNow() - 100000) HG.test_controller.SetRead('ip', (ip, timestamp)) response = service.Request(HC.GET, 'ip', {'hash': self._file_hash}) self.assertEqual(response['ip'], ip) self.assertEqual(response['timestamp'], timestamp) # account from hash subject_content = HydrusNetwork.Content( content_type=HC.CONTENT_TYPE_FILES, content_data=hash) subject_account_identifier = HydrusNetwork.AccountIdentifier( content=subject_content) HG.test_controller.SetRead('account', self._account) response = service.Request( HC.GET, 'other_account', {'subject_identifier': subject_account_identifier}) self.assertEqual(repr(response['account']), repr(self._account)) # thumbnail path = ServerFiles.GetExpectedThumbnailPath(self._file_hash) HydrusPaths.MakeSureDirectoryExists(os.path.dirname(path)) with open(path, 'wb') as f: f.write(EXAMPLE_THUMBNAIL) response = service.Request(HC.GET, 'thumbnail', {'hash': self._file_hash}) self.assertEqual(response, EXAMPLE_THUMBNAIL) try: os.remove(path) except: pass
action='store_true', help='run db journaling entirely in memory (DANGEROUS)') argparser.add_argument( '--db_synchronous_override', help='override SQLite Synchronous PRAGMA (range 0-3, default=2)') argparser.add_argument('--no_db_temp_files', action='store_true', help='run db temp operations entirely in memory') result = argparser.parse_args() if result.db_dir is None: db_dir = HC.DEFAULT_DB_DIR if not HydrusPaths.DirectoryIsWritable( db_dir) or HC.RUNNING_FROM_MACOS_APP: db_dir = HC.USERPATH_DB_DIR else: db_dir = result.db_dir db_dir = HydrusPaths.ConvertPortablePathToAbsPath(db_dir, HC.BASE_DIR) try: HydrusPaths.MakeSureDirectoryExists(db_dir) except:
def _ActionPaths(self): for status in (CC.STATUS_SUCCESSFUL_AND_NEW, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, CC.STATUS_DELETED, CC.STATUS_ERROR): action = self._actions[status] if action == CC.IMPORT_FOLDER_DELETE: while True: file_seed = self._file_seed_cache.GetNextFileSeed(status) if file_seed is None or HG.view_shutdown: break path = file_seed.file_seed_data try: if os.path.exists(path) and not os.path.isdir(path): ClientPaths.DeletePath(path) txt_path = path + '.txt' if os.path.exists(txt_path): ClientPaths.DeletePath(txt_path) self._file_seed_cache.RemoveFileSeeds((file_seed, )) except Exception as e: raise Exception( 'Tried to delete "{}", but could not.'.format( path)) elif action == CC.IMPORT_FOLDER_MOVE: while True: file_seed = self._file_seed_cache.GetNextFileSeed(status) if file_seed is None or HG.view_shutdown: break path = file_seed.file_seed_data try: dest_dir = self._action_locations[status] if not os.path.exists(dest_dir): raise Exception( 'Tried to move "{}" to "{}", but the destination directory did not exist.' .format(path, dest_dir)) if os.path.exists(path) and not os.path.isdir(path): filename = os.path.basename(path) dest_path = os.path.join(dest_dir, filename) dest_path = HydrusPaths.AppendPathUntilNoConflicts( dest_path) HydrusPaths.MergeFile(path, dest_path) txt_path = path + '.txt' if os.path.exists(txt_path): txt_filename = os.path.basename(txt_path) txt_dest_path = os.path.join( dest_dir, txt_filename) txt_dest_path = HydrusPaths.AppendPathUntilNoConflicts( txt_dest_path) HydrusPaths.MergeFile(txt_path, txt_dest_path) self._file_seed_cache.RemoveFileSeeds((file_seed, )) except Exception as e: HydrusData.ShowText('Import folder tried to move ' + path + ', but could not:') HydrusData.ShowException(e) HydrusData.ShowText('Import folder has been paused.') self._paused = True return elif status == CC.IMPORT_FOLDER_IGNORE: pass