def show_saved_figure(relative_loc, title=None): """ Display a saved figure. Behaviour: this simply opens a window with the figure, and then continues executing the code. :param relative_loc: Relative path (within the data directory) to the figure. Treated as an absolute path if it begins with "/" :return: """ fig_path, ext = os.path.splitext(relative_loc) if title is None: _, title = os.path.split(fig_path) abs_loc = get_artemis_data_path(relative_loc) assert os.path.exists( abs_loc), '"%s" did not exist. That is odd.' % (abs_loc, ) if ext in ('.jpg', '.png', '.tif'): try: from PIL import Image Image.open(abs_loc).show() except ImportError: ARTEMIS_LOGGER.error( "Cannot display image '%s', because PIL is not installed. Go pip install pillow to use this. Currently it is a soft requirement." ) elif ext == '.pkl': with interactive_matplotlib_context(): with open(abs_loc, "rb") as f: fig = pickle.load(f) fig.canvas.set_window_title(title) else: import webbrowser webbrowser.open('file://' + abs_loc)
def _get_record_rows_cached(record_id, headers, raise_display_errors, truncate_to, ignore_valid_keys = ()): """ We want to load the saved row only if: - The record is complete - :param record_id: :param headers: :return: """ cache_key = compute_fixed_hash((record_id, [h.value for h in headers], truncate_to, ignore_valid_keys)) path = get_artemis_data_path(os.path.join('_ui_cache', cache_key), make_local_dir=True) if os.path.exists(path): try: with open(path, 'rb') as f: record_rows = pickle.load(f) if len(record_rows)!=len(headers): os.remove(path) # This should never happen. But in case it somehow does, we just go ahead and compute again. else: return record_rows except: logging.warn('Failed to load cached record info: {}'.format(record_id)) info_plus_status = _get_record_rows(record_id=record_id, headers=headers+[ExpRecordDisplayFields.STATUS], raise_display_errors=raise_display_errors, truncate_to=truncate_to, ignore_valid_keys=ignore_valid_keys) record_rows, status = info_plus_status[:-1], info_plus_status[-1] if status == ExpStatusOptions.STARTED: # In this case it's still running (maybe) and we don't want to cache because it'll change return record_rows else: with open(path, 'wb') as f: pickle.dump(record_rows, f) return record_rows
def generate_random_model_path(code_gen_len=16, suffix='.pth'): code = ''.join( random.choice(string.ascii_uppercase + string.digits) for _ in range(code_gen_len)) model_path = get_artemis_data_path('models/{}{}'.format(code, suffix), make_local_dir=True) return model_path
def show_saved_figure(relative_loc, title=None): """ Display a saved figure. Behaviour: this simply opens a window with the figure, and then continues executing the code. :param relative_loc: Relative path (within the data directory) to the figure. Treated as an absolute path if it begins with "/" :return: """ fig_path, ext = os.path.splitext(relative_loc) if title is None: _, title = os.path.split(fig_path) abs_loc = get_artemis_data_path(relative_loc) assert os.path.exists(abs_loc), '"%s" did not exist. That is odd.' % (abs_loc, ) if ext in ('.jpg', '.png', '.tif'): try: from PIL import Image Image.open(abs_loc).show() except ImportError: ARTEMIS_LOGGER.error("Cannot display image '%s', because PIL is not installed. Go pip install pillow to use this. Currently it is a soft requirement.") elif ext == '.pkl': with interactive_matplotlib_context(): with open(abs_loc, "rb") as f: fig = pickle.load(f) fig.canvas.set_window_title(title) else: import webbrowser webbrowser.open('file://'+abs_loc)
def smart_file(location, use_cache=False, make_dir=False): """ :param location: Specifies where the file is. If it's formatted as a url, it's downloaded. If it begins with a "/", it's assumed to be a local path. Otherwise, it is assumed to be referenced relative to the data directory. :param use_cache: If True, and the location is a url, make a local cache of the file for future use (note: if the file at this url changes, the cached file will not). :param make_dir: Make the directory for this file, if it does not exist. :yield: The local path to the file. """ its_a_url = is_url(location) if its_a_url: assert not make_dir, "We cannot 'make the directory' for a URL" local_path = get_file(url=location, use_cache=use_cache) else: local_path = get_artemis_data_path(location) if make_dir: make_file_dir(local_path) yield local_path if its_a_url and not use_cache: os.remove(local_path)
def smart_file(location, use_cache = False, make_dir = False): """ :param location: Specifies where the file is. If it's formatted as a url, it's downloaded. If it begins with a "/", it's assumed to be a local path. Otherwise, it is assumed to be referenced relative to the data directory. :param use_cache: If True, and the location is a url, make a local cache of the file for future use (note: if the file at this url changes, the cached file will not). :param make_dir: Make the directory for this file, if it does not exist. :yield: The local path to the file. """ its_a_url = is_url(location) if its_a_url: assert not make_dir, "We cannot 'make the directory' for a URL" local_path = get_file(url=location, use_cache=use_cache) else: local_path = get_artemis_data_path(location) if make_dir: make_file_dir(local_path) yield local_path if its_a_url and not use_cache: os.remove(local_path)
def get_file(relative_name, url=None, data_transformation=None): relative_folder, file_name = os.path.split(relative_name) local_folder = get_artemis_data_path(relative_folder) try: # Best way to see if folder exists already - avoids race condition between processes os.makedirs(local_folder) except OSError: pass full_filename = os.path.join(local_folder, file_name) if not os.path.exists(full_filename): assert url is not None, "No local copy of '%s' was found, and you didn't provide a URL to fetch it from" % ( full_filename, ) print('Downloading file from url: "%s"...' % (url, )) response = urlopen(url) data = response.read() print('...Done.') if data_transformation is not None: print('Processing downloaded data...') data = data_transformation(data) with open(full_filename, 'wb') as f: f.write(data) return full_filename
def test_save_and_show_figure(): fig = plt.figure() plt.imshow(np.random.randn(10, 10)) plt.title('Test Figure') path = get_artemis_data_path('tests/test_fig.pdf') save_figure(fig, path=path) show_saved_figure(path)
def test_unpack_zip(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): assert not os.path.exists(get_artemis_data_path('tests/test_tar_zip')) for _ in xrange(2): # (Second time to check caching) local_file = get_file_in_archive( relative_path= 'tests/test_zip_zip', url = 'https://drive.google.com/uc?export=download&id=0B4IfiNtPKeSATWZXWjEyd1FsRG8', subpath = 'testzip/test_file.txt' ) with open(local_file) as f: txt = f.read() assert txt == 'blah blah blah'
def get_current_record_dir(default_if_none = True): """ The directory in which the results of the current experiment are recorded. """ if _CURRENT_EXPERIMENT_RECORD is None and default_if_none: return get_artemis_data_path('experiments/default/', make_local_dir=True) else: return get_current_experiment_record().get_dir()
def test_temp_file(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): file_path = get_file_path(make_folder=True) with open(file_path, 'w') as f: f.write('1,2,3') with open(file_path) as f: assert f.read() == '1,2,3'
def save_current_figure(path=""): fig = plt.gcf() file_name = format_filename(file_string = '%T', current_time = datetime.now()) if path != "": save_path = os.path.join(path,"%s.pdf"%file_name) else: save_path = get_artemis_data_path('output/{file_name}.png'.format(file_name=file_name)) save_figure(fig,path=save_path)
def get_archive(relative_path, url, force_extract=False, archive_type = None, force_download = False): """ Download a compressed archive and extract it into a folder. :param relative_path: Local name for the extracted folder. (Zip file will be named this with the appropriate zip extension) :param url: Url of the archive to download :param force_extract: Force the zip file to re-extract (rather than just reusing the extracted folder) :return: The full path to the extracted folder on your system. """ local_folder_path = get_artemis_data_path(relative_path) assert archive_type in ('.tar.gz', '.zip', None) if force_download: shutil.rmtree(local_folder_path) if not os.path.exists(local_folder_path) or force_download: # If the folder does not exist, download zip and extract. # (We also check force download here to avoid a race condition) response = urllib2.urlopen(url) # Need to infer if archive_type is None: if url.endswith('.tar.gz'): archive_type = '.tar.gz' elif url.endswith('.zip'): archive_type = '.zip' else: info = response.info() try: header = next(x for x in info.headers if x.startswith('Content-Disposition')) original_file_name = next(x for x in header.split(';') if x.startswith('filename')).split('=')[-1].lstrip('"\'').rstrip('"\'') archive_type = '.tar.gz' if original_file_name.endswith('.tar.gz') else '.zip' if original_file_name.endswith('.zip') else \ bad_value(original_file_name, 'Filename "%s" does not end with a familiar zip extension like .zip or .tar.gz' % (original_file_name, )) except StopIteration: raise Exception("Could not infer archive type from user argument, url-name, or file-header. Please specify archive type as either '.zip' or '.tar.gz'.") print 'Downloading archive from url: "%s"...' % (url, ) data = response.read() print '...Done.' local_zip_path = local_folder_path + archive_type make_file_dir(local_zip_path) with open(local_zip_path, 'w') as f: f.write(data) force_extract = True if force_extract: if archive_type == '.tar.gz': with tarfile.open(local_zip_path) as f: f.extractall(local_folder_path) elif archive_type == '.zip': with ZipFile(local_zip_path) as f: f.extractall(local_folder_path) else: raise Exception() return local_folder_path
def test_save_and_show_figure_3(): fig = plt.figure() plt.imshow(np.random.randn(10, 10)) plt.title('Test Figure') path = get_artemis_data_path( 'tests/test_fig.with.strangely.formatted.ending') path = save_figure(fig, path=path, ext='pdf') show_saved_figure(path)
def test_unpack_zip(): if os.path.exists(get_artemis_data_path('tests/test_zip_zip')): shutil.rmtree(get_artemis_data_path('tests/test_zip_zip')) if os.path.exists(get_artemis_data_path('tests/test_zip_zip.zip')): os.remove(get_artemis_data_path('tests/test_zip_zip.zip')) for _ in xrange(2): # (Second time to check caching) local_file = get_file_in_archive( relative_path= 'tests/test_zip_zip', url = 'https://drive.google.com/uc?export=download&id=0B4IfiNtPKeSATWZXWjEyd1FsRG8', subpath = 'testzip/test_file.txt' ) with open(local_file) as f: txt = f.read() assert txt == 'blah blah blah'
def test_simple_rsync(): from_path = get_artemis_data_path(relative_path="tmp/tests/", make_local_dir=True) with open(os.path.join(from_path, "test1"), "wb"): pass with open(os.path.join(from_path, "test2"), "wb"): pass remote_path = "~/PycharmProjects/Distributed-VI/" assert simple_rsync(local_path=from_path, remote_path=remote_path, ip_address=ip_address, verbose=True) shutil.rmtree(from_path)
def test_unpack_zip(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): assert not os.path.exists(get_artemis_data_path('tests/test_tar_zip')) for _ in xrange(2): # (Second time to check caching) local_file = get_file_in_archive( relative_path='tests/test_zip_zip', url= 'https://drive.google.com/uc?export=download&id=0B4IfiNtPKeSATWZXWjEyd1FsRG8', subpath='testzip/test_file.txt') with open(local_file) as f: txt = f.read() assert txt == 'blah blah blah'
def get_experiment_dir(): path = os.path.expanduser( get_artemis_config_value( section="experiments", option="experiment_directory", write_default=True, default_generator=lambda: get_artemis_data_path('experiments'))) if not os.path.exists(path): make_dir(path) return path
def write_port_to_file(port): atexit.register(remove_port_file) port_file_path = get_artemis_data_path("tmp/plot_server/port.info", make_local_dir=True) if os.path.exists(port_file_path): # print("port.info file already exists. This might either mean that you are running another plotting server in the background and want to start a second one.\nIn this case ignore " # "this message. Otherwise a previously run plotting server crashed without cleaning up afterwards. \nIn this case, please manually delete the file at {}".format(port_file_path), # file = sys.stderr) # Keep for later development pass with open(port_file_path, 'wb') as f: pickle.dump(port,f)
def test_get_unnamed_file_in_archive(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): path = get_file_in_archive( url= 'https://drive.google.com/uc?export=download&id=0B4IfiNtPKeSATWZXWjEyd1FsRG8', subpath='testzip/test_file.txt') with open(path) as f: txt = f.read() assert txt == 'blah blah blah'
def setup_web_plotting(update_period=1.): plot_directory = get_artemis_data_path( relative_path="tmp/web_backend/%s/" % (str(uuid.uuid4()), ), make_local_dir=True) # Temporary directory atexit.register(clean_up, plot_dir=plot_directory) _start_plotting_server(plot_directory=plot_directory, update_period=update_period) set_draw_callback( TimedFigureSaver(os.path.join(plot_directory, 'artemis_figure.png'), update_period=update_period)) set_show_callback( TimedFigureSaver(os.path.join(plot_directory, 'artemis_figure.png'), update_period=update_period))
def test_rsync(): options = ["-r"] username = get_artemis_config_value(section=ip_address, option="username") from_path = get_artemis_data_path(relative_path="tmp/tests/", make_local_dir=True) with open(os.path.join(from_path, "test1"), "wb"): pass with open(os.path.join(from_path, "test2"), "wb"): pass to_path = "%s@%s:/home/%s/temp/"%(username, ip_address, username) assert rsync(options, from_path, to_path) shutil.rmtree(from_path)
def test_rsync(): options = ["-r"] username = get_artemis_config_value(section=ip_address, option="username") from_path = get_artemis_data_path(relative_path="tmp/tests/", make_local_dir=True) with open(os.path.join(from_path, "test1"), "wb"): pass with open(os.path.join(from_path, "test2"), "wb"): pass to_path = "%s@%s:/home/%s/temp/" % (username, ip_address, username) assert rsync(options, from_path, to_path) shutil.rmtree(from_path)
def test_new_log_file(): # Just a shorthand for persistent print. log_file_loc = new_log_file('dump/test_file') print('eee') print('fff') stop_capturing_print() local_log_loc = get_artemis_data_path(log_file_loc) with open(local_log_loc) as f: text = f.read() assert text == 'eee\nfff\n' os.remove(local_log_loc)
def test_proper_persistent_print_file_logging(): log_file_path = get_artemis_data_path('tests/test_log.txt') with CaptureStdOut(log_file_path) as ps: print('fff') print('ggg') print('hhh') assert ps.read() == 'fff\nggg\n' # You can verify that the log has also been written. log_path = ps.get_log_file_path() with open(log_path) as f: txt = f.read() assert txt == 'fff\nggg\n'
def test_persistent_print(): test_log_path = capture_print() print('aaa') print('bbb') assert read_print() == 'aaa\nbbb\n' stop_capturing_print() capture_print() assert read_print() == '' print('ccc') print('ddd') assert read_print() == 'ccc\nddd\n' os.remove(get_artemis_data_path(test_log_path))
def capture_print(log_file_path = 'logs/dump/%T-log.txt', print_to_console=True): """ :param log_file_path: Path of file to print to, if (state and to_file). If path does not start with a "/", it will be relative to the data directory. You can use placeholders such as %T, %R, ... in the path name (see format filename) :param print_to_console: :param print_to_console: Also continue printing to console. :return: The absolute path to the log file. """ local_log_file_path = get_artemis_data_path(log_file_path) logger = CaptureStdOut(log_file_path=local_log_file_path, print_to_console=print_to_console) logger.__enter__() sys.stdout = logger sys.stderr = logger return local_log_file_path
def capture_print(log_file_path='logs/dump/%T-log.txt', print_to_console=True): """ :param log_file_path: Path of file to print to, if (state and to_file). If path does not start with a "/", it will be relative to the data directory. You can use placeholders such as %T, %R, ... in the path name (see format filename) :param print_to_console: :param print_to_console: Also continue printing to console. :return: The absolute path to the log file. """ local_log_file_path = get_artemis_data_path(log_file_path) logger = CaptureStdOut(log_file_path=local_log_file_path, print_to_console=print_to_console) logger.__enter__() sys.stdout = logger sys.stderr = logger return local_log_file_path
def send_port_if_running_and_join(): port_file_path = get_artemis_data_path("tmp/plot_server/port.info", make_local_dir=True) if os.path.exists(port_file_path): with open(port_file_path, 'r') as f: port = pickle.load(f) print(port) print("Your dbplot call is attached to an existing plotting server. \nAll stdout and stderr of this existing plotting server " "is forwarded to the process that first created this plotting server. \nIn the future we might try to hijack this and provide you " "with these data streams") print("Use with care, this functionality might have unexpected side issues") try: while(True): time.sleep(20) except KeyboardInterrupt: print(" Redirected Server killed") sys.exit() else: with open(port_file_path,"w") as f: pass
def test_persistent_ordered_dict(): file_path = get_artemis_data_path('tests/podtest.pkl') if os.path.exists(file_path): os.remove(file_path) with PersistentOrderedDict(file_path) as pod: assert pod.items() == [] pod['a'] = [1, 2, 3] pod['b'] = [4, 5, 6] pod['c'] = [7, 8] pod['d'] = [9, 10] # Should not be recorded with PersistentOrderedDict(file_path) as pod: assert pod.items() == [('a', [1, 2, 3]), ('b', [4, 5, 6]), ('c', [7, 8])] pod['e']=11 with PersistentOrderedDict(file_path) as pod: assert pod.items() == [('a', [1, 2, 3]), ('b', [4, 5, 6]), ('c', [7, 8]), ('e', 11)]
def get_file_and_cache(url, data_transformation = None, enable_cache_write = True, enable_cache_read = True): _, ext = os.path.splitext(url) if enable_cache_read or enable_cache_write: hasher = hashlib.md5() hasher.update(url) code = hasher.hexdigest() local_cache_path = os.path.join(get_artemis_data_path('caches'), code + ext) if enable_cache_read and os.path.exists(local_cache_path): return local_cache_path elif enable_cache_write: full_path = get_file( relative_name = os.path.join('caches', code+ext), url = url, data_transformation=data_transformation ) return full_path else: return get_temp_file(url, data_transformation=data_transformation)
def test_get_file(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): print('Testing get_file on unnamed file') path = get_file(url='https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp') with open(path) as f: assert f.read()=='a,b,c' # Should not download this time path = get_file(url='https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp') with open(path) as f: assert f.read()=='a,b,c' print('Testing get_file on named file') path = get_file(relative_name='my-test.txt', url='https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp') with open(path) as f: assert f.read()=='a,b,c' # Should not download this time path = get_file(relative_name='my-test.txt', url='https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp') with open(path) as f: assert f.read()=='a,b,c'
def test_get_file(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): print('Testing get_file on unnamed file') path = get_file( url= 'https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp' ) with open(path) as f: assert f.read() == 'a,b,c' # Should not download this time path = get_file( url= 'https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp' ) with open(path) as f: assert f.read() == 'a,b,c' print('Testing get_file on named file') path = get_file( relative_name='my-test.txt', url= 'https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp' ) with open(path) as f: assert f.read() == 'a,b,c' # Should not download this time path = get_file( relative_name='my-test.txt', url= 'https://drive.google.com/uc?export=download&id=1uC9sJ04V7VjzMj32q4-OLEnRFPvQpYtp' ) with open(path) as f: assert f.read() == 'a,b,c'
def _get_random_finder_path(relative_path = ''): return get_artemis_data_path(os.path.join('random_finder', relative_path), make_local_dir=True)
from artemis.fileman.local_dir import get_artemis_data_path, make_file_dir from artemis.general.functional import infer_arg_values from artemis.general.hashing import compute_fixed_hash from artemis.general.test_mode import is_test_mode logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) __author__ = 'peter' MEMO_WRITE_ENABLED = True MEMO_READ_ENABLED = True MEMO_DIR = get_artemis_data_path('memoize_to_disk') def memoize_to_disk(fcn, local_cache = False, disable_on_tests=False, use_cpickle = False, suppress_info = False): """ Save (memoize) computed results to disk, so that the same function, called with the same arguments, does not need to be recomputed. This is useful if you have a long-running function that is often being given the same arguments. Note: this does NOT check for the state of Global variables/time/whatever else the function may use, so you need to make sure your function is truly a function in that outputs only depend on inputs. Otherwise, this will give you misleading results. Usage: @memoize_to_disk def fcn(a, b, c = None): results = ...
from contextlib import contextmanager from shutil import rmtree from six.moves import StringIO import gzip import tarfile from zipfile import ZipFile import shutil import os from six.moves.urllib.request import urlopen from artemis.fileman.local_dir import get_artemis_data_path, make_file_dir from artemis.general.should_be_builtins import bad_value __author__ = 'peter' FILE_ROOT = get_artemis_data_path() def set_file_root(path, make_dir=True): if make_dir: try: # Best way to see if folder exists already - avoids race condition between processes os.makedirs(path) except OSError: pass else: assert os.path.isdir(path) global FILE_ROOT FILE_ROOT = path @contextmanager
def clear_ui_cache(): shutil.rmtree(get_artemis_data_path('_ui_cache/'))
from shutil import rmtree import sys from six.moves import StringIO import gzip import tarfile from zipfile import ZipFile import shutil import os from six.moves.urllib.request import urlopen from artemis.fileman.local_dir import get_artemis_data_path, make_file_dir from artemis.general.should_be_builtins import bad_value __author__ = 'peter' FILE_ROOT = get_artemis_data_path() def set_file_root(path, make_dir=True): if make_dir: try: # Best way to see if folder exists already - avoids race condition between processes os.makedirs(path) except OSError: pass else: assert os.path.isdir(path) global FILE_ROOT FILE_ROOT = path @contextmanager
def remove_port_file(): port_file_path = get_artemis_data_path("tmp/plot_server/port.info", make_local_dir=True) if os.path.exists(port_file_path): os.remove(port_file_path)
def setup_web_plotting(update_period = 1.): plot_directory = get_artemis_data_path(relative_path="tmp/web_backend/%s/" % (str(uuid.uuid4()),), make_local_dir=True) # Temporary directory atexit.register(clean_up,plot_dir=plot_directory) _start_plotting_server(plot_directory=plot_directory, update_period=update_period) set_draw_callback(TimedFigureSaver(os.path.join(plot_directory, 'artemis_figure.png'), update_period=update_period)) set_show_callback(TimedFigureSaver(os.path.join(plot_directory, 'artemis_figure.png'), update_period=update_period))
def get_experiment_dir(): path = os.path.expanduser(get_artemis_config_value(section="experiments", option="experiment_directory", write_default=True, default_generator=lambda: get_artemis_data_path('experiments'))) if not os.path.exists(path): make_dir(path) return path
def test_get_unnamed_file_in_archive(): with hold_file_root(get_artemis_data_path('file_getter_tests'), delete_after=True, delete_before=True): path = get_file_in_archive(url='https://drive.google.com/uc?export=download&id=0B4IfiNtPKeSATWZXWjEyd1FsRG8', subpath='testzip/test_file.txt') with open(path) as f: txt = f.read() assert txt == 'blah blah blah'
def _get_paths_from_range(self, user_range): if user_range=='all': files = self._files elif '-' in user_range: start, end = user_range.split('-') files = self._files[int(start): int(end)+1] else: files = [self._files[int(user_range)]] return [self.dc.get_path(filename) for filename in files] def delete(self, user_range): paths = self._get_paths_from_range(user_range) response = input('\n'.join(paths) + '\n Will be deleted. Type "yes" to confirm>>') if response=='yes': for p in paths: rmtree(p) def cd(self, rel_path): try: subpath = self._files[int(rel_path)] except ValueError: subpath = rel_path self.dc = self.dc[subpath] if __name__ == '__main__': from artemis.fileman.local_dir import get_artemis_data_path dcu = DirectoryCrawlerUI(get_artemis_data_path(), show_num_items=True, sortby='mtime') dcu.launch()