def remove_garbage_lock_files(): lockfilename = "" # HUB SINGLE INSTANCE MODE lockfilename = os.path.join(_find_home(), ".samp") hub_is_running, lockfiledict = check_running_hub(lockfilename) if not hub_is_running: # If lockfilename belongs to a dead hub, then it is deleted if os.path.isfile(lockfilename): with suppress(OSError): os.remove(lockfilename) # HUB MULTIPLE INSTANCE MODE lockfiledir = os.path.join(_find_home(), ".samp-1") if os.path.isdir(lockfiledir): for filename in os.listdir(lockfiledir): if filename.startswith('samp-hub'): lockfilename = os.path.join(lockfiledir, filename) hub_is_running, lockfiledict = check_running_hub(lockfilename) if not hub_is_running: # If lockfilename belongs to a dead hub, then it is deleted if os.path.isfile(lockfilename): with suppress(OSError): os.remove(lockfilename)
def get_running_hubs(): """ Return a dictionary containing the lock-file contents of all the currently running hubs (single and/or multiple mode). The dictionary format is: ``{<lock-file>: {<token-name>: <token-string>, ...}, ...}`` where ``{<lock-file>}`` is the lock-file name, ``{<token-name>}`` and ``{<token-string>}`` are the lock-file tokens (name and content). Returns ------- running_hubs : dict Lock-file contents of all the currently running hubs. """ hubs = {} lockfilename = "" # HUB SINGLE INSTANCE MODE # CHECK FOR SAMP_HUB ENVIRONMENT VARIABLE if "SAMP_HUB" in os.environ: # For the time being I assume just the std profile supported. if os.environ["SAMP_HUB"].startswith("std-lockurl:"): lockfilename = os.environ["SAMP_HUB"][len("std-lockurl:"):] else: lockfilename = os.path.join(_find_home(), ".samp") hub_is_running, lockfiledict = check_running_hub(lockfilename) if hub_is_running: hubs[lockfilename] = lockfiledict # HUB MULTIPLE INSTANCE MODE lockfiledir = "" lockfiledir = os.path.join(_find_home(), ".samp-1") if os.path.isdir(lockfiledir): for filename in os.listdir(lockfiledir): if filename.startswith('samp-hub'): lockfilename = os.path.join(lockfiledir, filename) hub_is_running, lockfiledict = check_running_hub(lockfilename) if hub_is_running: hubs[lockfilename] = lockfiledict return hubs
def setUp(self): self.tmpdir = os.path.join(_find_home(), 'Desktop', 'tmp_testingdir') try: os.makedirs(self.tmpdir) except OSError: pass basename = 'abc.txt' self.dummy_fname = os.path.join(self.tmpdir, basename) os.system('touch '+self.dummy_fname)
def setUp(self): self.tmpdir = os.path.join(_find_home(), '.temp_halotools_testing_dir') try: os.makedirs(self.tmpdir) except OSError: pass basename = 'abc.txt' self.dummy_fname = os.path.join(self.tmpdir, basename) _t = Table({'x': [0]}) _t.write(self.dummy_fname, format='ascii')
def setUp(self): self.tmpdir = os.path.join(_find_home(), 'Desktop', 'tmp_testingdir') try: os.makedirs(self.tmpdir) except OSError: pass basename = 'abc.txt' self.dummy_fname = os.path.join(self.tmpdir, basename) _t = Table({'x': [0]}) _t.write(self.dummy_fname, format='ascii') self.good_columns_to_keep_dict = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') }) self.good_output_fname = os.path.join(self.tmpdir, 'def.hdf5') self.bad_columns_to_keep_dict1 = ({ 'halo_x': (1, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') }) self.bad_columns_to_keep_dict2 = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_mvir': (5, 'f4') }) self.bad_columns_to_keep_dict3 = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), }) self.bad_columns_to_keep_dict4 = ({ 'halo_x': (1, 'f4'), 'halo_y': (1, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') })
def setUp(self): self.tmpdir = os.path.join(_find_home(), '.tmp_testingdir') try: os.makedirs(self.tmpdir) except OSError: pass basename = 'abc.txt' self.dummy_fname = os.path.join(self.tmpdir, basename) _t = Table({'x': [0]}) _t.write(self.dummy_fname, format='ascii') self.good_columns_to_keep_dict = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') }) self.good_output_fname = os.path.join(self.tmpdir, 'def.hdf5') self.bad_columns_to_keep_dict1 = ({ 'halo_x': (1, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') }) self.bad_columns_to_keep_dict2 = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_mvir': (5, 'f4') }) self.bad_columns_to_keep_dict3 = ({ 'halo_x': (1, 'f4'), 'halo_y': (2, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), }) self.bad_columns_to_keep_dict4 = ({ 'halo_x': (1, 'f4'), 'halo_y': (1, 'f4'), 'halo_z': (3, 'f4'), 'halo_id': (4, 'i8'), 'halo_mvir': (5, 'f4') })
def setUp(self): self.tmpdir = os.path.join(_find_home(), "Desktop", "tmp_testingdir") try: os.makedirs(self.tmpdir) except OSError: pass basename = "abc.txt" self.dummy_fname = os.path.join(self.tmpdir, basename) os.system("touch " + self.dummy_fname) self.good_columns_to_keep_dict = { "halo_x": (1, "f4"), "halo_y": (2, "f4"), "halo_z": (3, "f4"), "halo_id": (4, "i8"), "halo_mvir": (5, "f4"), } self.good_output_fname = os.path.join(self.tmpdir, "def.hdf5") self.bad_columns_to_keep_dict1 = { "halo_x": (1, "f4"), "halo_z": (3, "f4"), "halo_id": (4, "i8"), "halo_mvir": (5, "f4"), } self.bad_columns_to_keep_dict2 = { "halo_x": (1, "f4"), "halo_y": (2, "f4"), "halo_z": (3, "f4"), "halo_mvir": (5, "f4"), } self.bad_columns_to_keep_dict3 = { "halo_x": (1, "f4"), "halo_y": (2, "f4"), "halo_z": (3, "f4"), "halo_id": (4, "i8"), } self.bad_columns_to_keep_dict4 = { "halo_x": (1, "f4"), "halo_y": (1, "f4"), "halo_z": (3, "f4"), "halo_id": (4, "i8"), "halo_mvir": (5, "f4"), }
def test_cache_config(): """ Verify that the Astropy and Halotools cache directories are detected, and that the latter is a subdirectory of the former. """ homedir = _find_home() astropy_cache_dir = os.path.join(homedir, '.astropy', 'cache') if not os.path.isdir(astropy_cache_dir): os.mkdir(astropy_cache_dir) halotools_cache = cache_config.get_catalogs_dir() assert os.path.exists(halotools_cache) assert os.path.join(astropy_cache_dir, 'halotools') == halotools_cache
def get_main_running_hub(): """ Get either the hub given by the environment variable SAMP_HUB, or the one given by the lockfile .samp in the user home directory. """ hubs = get_running_hubs() if not hubs: raise SAMPHubError("Unable to find a running SAMP Hub.") # CHECK FOR SAMP_HUB ENVIRONMENT VARIABLE if "SAMP_HUB" in os.environ: # For the time being I assume just the std profile supported. if os.environ["SAMP_HUB"].startswith("std-lockurl:"): lockfilename = os.environ["SAMP_HUB"][len("std-lockurl:"):] else: raise SAMPHubError("SAMP Hub profile not supported.") else: lockfilename = os.path.join(_find_home(), ".samp") return hubs[lockfilename]
from unittest import TestCase from astropy.tests.helper import pytest from ...factories import PrebuiltHodModelFactory from ....sim_manager import CachedHaloCatalog, FakeSim from ....custom_exceptions import HalotoolsError # Determine whether the machine is mine # This will be used to select tests whose # returned values depend on the configuration # of my personal cache directory files from astropy.config.paths import _find_home aph_home = '/Users/aphearin' detected_home = _find_home() if aph_home == detected_home: APH_MACHINE = True else: APH_MACHINE = False __all__ = ('TestHearin15', ) class TestHearin15(TestCase): def setup_class(self): pass def test_Hearin15(self): model = PrebuiltHodModelFactory('hearin15')
def get_catalogs_dir(**kwargs): """ Find the path to the subdirectory of the halotools cache directory where `catalog_type` are stored. If the directory doesn't exist, make it, then return the path. Parameters ---------- catalog_type : string, optional String giving the type of catalog. Should be 'particles', 'subhalos', or 'raw_halos'. simname : string, optional Nickname of the simulation. Currently supported simulations are Bolshoi (simname = ``bolshoi``), Consuelo (simname = ``consuelo``), MultiDark (simname = ``multidark``), and Bolshoi-Planck (simname = ``bolplanck``). halo_finder : string, optional Nickname of the halo-finder, e.g., `rockstar` or `bdm`. external_cache_loc : string, optional Absolute path to an alternative source of halo catalogs. Method assumes that ``external_cache_loc`` is organized in the same way that the normal Halotools cache is. Specifically: * Particle tables should located in ``external_cache_loc/particle_catalogs/simname`` * Processed halo tables should located in ``external_cache_loc/halo_catalogs/simname/halo_finder`` * Raw halo tables (unprocessed ASCII) should located in ``external_cache_loc/raw_halo_catalogs/simname/halo_finder`` Returns ------- dirname : str Path to the halotools directory storing simulation data. """ # Identify the root cache directory if 'external_cache_loc' in kwargs.keys(): if os.path.isdir(kwargs['external_cache_loc']) is False: raise KeyError("Input external_cache_loc directory = %s \n Directory does not exist" % kwargs['external_cache_loc']) else: halotools_cache_dir = kwargs['external_cache_loc'] else: homedir = _find_home() astropy_cache_dir = os.path.join(homedir, '.astropy', 'cache') defensively_create_subdir(astropy_cache_dir) halotools_cache_dir = os.path.join(astropy_cache_dir, 'halotools') defensively_create_subdir(halotools_cache_dir) if 'catalog_type' not in kwargs.keys(): return halotools_cache_dir else: catalog_type = kwargs['catalog_type'] acceptable_processed_halos_arguments = ( ['subhalos', 'subhalo', 'halo', 'halos', 'halo_catalogs', 'subhalo_catalogs', 'subhalo_catalog', 'halo_catalog', 'halos_catalogs', 'subhalos_catalogs', 'subhalos_catalog', 'halos_catalog'] ) acceptable_particles_arguments = ( ['particle', 'particles', 'particle_catalog', 'particle_catalogs', 'particles_catalog', 'particles_catalogs'] ) acceptable_raw_halos_arguments = ( ['raw_halos', 'raw_subhalos', 'raw_halo', 'raw_subhalo', 'raw_halos_catalog', 'raw_subhalos_catalog', 'raw_halo_catalog', 'raw_subhalo_catalog', 'raw_halos_catalogs', 'raw_subhalos_catalogs', 'raw_halo_catalogs', 'raw_subhalo_catalogs'] ) if catalog_type in acceptable_processed_halos_arguments: subdir_name = 'halo_catalogs' elif catalog_type in acceptable_particles_arguments: subdir_name = 'particle_catalogs' elif catalog_type in acceptable_raw_halos_arguments: subdir_name = 'raw_halo_catalogs' else: raise CatalogTypeError(catalog_type) # Create the directory .astropy/cache/halotools/subdir_name catalog_type_dirname = os.path.join(halotools_cache_dir, subdir_name) defensively_create_subdir(catalog_type_dirname) # Now check to see if there exists a cache subdirectory for simname if 'simname' not in kwargs.keys(): return catalog_type_dirname else: if kwargs['simname'] not in supported_sim_list: raise UnsupportedSimError(kwargs['simname']) simname_dirname = os.path.join(catalog_type_dirname, kwargs['simname']) defensively_create_subdir(simname_dirname) if 'halo_finder' not in kwargs.keys(): return simname_dirname else: halo_finder_dirname = os.path.join(simname_dirname, kwargs['halo_finder']) defensively_create_subdir(halo_finder_dirname) return halo_finder_dirname
def setUp(self): homedir = _find_home() self.downman = DownloadManager() def defensively_create_empty_dir(dirname): if os.path.isdir(dirname) is False: os.mkdir(dirname) else: shutil.rmtree(dirname) os.mkdir(dirname) # First create an empty directory where we will # temporarily store a collection of empty files self.base_dummydir = os.path.join( homedir, 'temp_directory_for_halotools_testing') defensively_create_empty_dir(self.base_dummydir) self.dummyloc = os.path.join(self.base_dummydir, 'halotools') defensively_create_empty_dir(self.dummyloc) self.halocat_dir = os.path.join(self.dummyloc, 'halo_catalogs') defensively_create_empty_dir(self.halocat_dir) self.ptclcat_dir = os.path.join(self.dummyloc, 'particle_catalogs') defensively_create_empty_dir(self.ptclcat_dir) self.raw_halo_table_dir = os.path.join(self.dummyloc, 'raw_halo_catalogs') defensively_create_empty_dir(self.raw_halo_table_dir) self.simnames = ['bolshoi', 'bolplanck', 'multidark', 'consuelo'] self.halo_finders = ['rockstar', 'bdm'] self.dummy_version_names = ['halotools.alpha'] self.extension = '.hdf5' self.bolshoi_fnames = [ 'hlist_0.33035', 'hlist_0.54435', 'hlist_0.67035', 'hlist_1.00035' ] self.bolshoi_bdm_fnames = [ 'hlist_0.33030', 'hlist_0.49830', 'hlist_0.66430', 'hlist_1.00035' ] self.bolplanck_fnames = [ 'hlist_0.33035', 'hlist_0.54435', 'hlist_0.67035', 'hlist_1.00035' ] self.consuelo_fnames = [ 'hlist_0.33324', 'hlist_0.50648', 'hlist_0.67540', 'hlist_1.00000' ] self.multidark_fnames = [ 'hlist_0.31765', 'hlist_0.49990', 'hlist_0.68215', 'hlist_1.00109' ] # make all relevant subdirectories and dummy files for simname in self.simnames: simdir = os.path.join(self.halocat_dir, simname) defensively_create_empty_dir(simdir) rockstardir = os.path.join(simdir, 'rockstar') defensively_create_empty_dir(rockstardir) if simname == 'bolshoi': fnames = self.bolshoi_fnames elif simname == 'bolplanck': fnames = self.bolplanck_fnames elif simname == 'consuelo': fnames = self.consuelo_fnames elif simname == 'multidark': fnames = self.multidark_fnames for name in fnames: for version in self.dummy_version_names: full_fname = name + '.' + version + self.extension abs_fname = os.path.join(rockstardir, full_fname) _t = Table({'x': [0]}) _t.write(abs_fname, format='ascii') if simname == 'bolshoi': simdir = os.path.join(self.halocat_dir, simname) bdmdir = os.path.join(simdir, 'bdm') defensively_create_empty_dir(bdmdir) fnames = self.bolshoi_bdm_fnames for name in fnames: for version in self.dummy_version_names: full_fname = name + '.' + version + self.extension abs_fname = os.path.join(bdmdir, full_fname) _t = Table({'x': [0]}) _t.write(abs_fname, format='ascii') p = os.path.join(self.halocat_dir, 'bolshoi', 'bdm') assert os.path.isdir(p) f = 'hlist_0.33030.halotools.alpha.hdf5' full_fname = os.path.join(p, f) assert os.path.isfile(full_fname) self.clear_APH_MACHINE_of_highz_file()
""" This module is used to search the user's disk to see whether data files are present to conduct unit-tests in which Halotools results are compared against results obtained from independently-written code bases. """ from __future__ import absolute_import, division, print_function, unicode_literals import os from astropy.config.paths import _find_home halotools_cache_dirname = os.path.join(_find_home(), '.astropy', 'cache', 'halotools') halotool_unit_testing_dirname = os.path.join(halotools_cache_dirname, 'unit_testing_files') __all__ = ('tpcf_corrfunc_comparison_files_exist', 'wp_corrfunc_comparison_files_exist') def tpcf_corrfunc_comparison_files_exist(return_fnames=False): """ """ aph_fname1 = os.path.join(halotool_unit_testing_dirname, 'sample1_position_array.npy') aph_fname2 = os.path.join(halotool_unit_testing_dirname, 'sample2_position_array.npy') aph_fname3 = os.path.join(halotool_unit_testing_dirname, 'rp_bins_array.npy') deep_fname1 = os.path.join(halotool_unit_testing_dirname, 'sinha_corrfunc_results', 'sample1_position_array_xi.npy') deep_fname2 = os.path.join(halotool_unit_testing_dirname,
import numpy as np from astropy.config.paths import _find_home from astropy.tests.helper import remote_data, pytest from unittest import TestCase from ..download_manager import DownloadManager from ..halo_table_cache import HaloTableCache from .. import sim_defaults from ...custom_exceptions import UnsupportedSimError, HalotoolsError ### Determine whether the machine is mine # This will be used to select tests whose # returned values depend on the configuration # of my personal cache directory files aph_home = u'/Users/aphearin' detected_home = _find_home() if aph_home == detected_home: APH_MACHINE = True else: APH_MACHINE = False __all__ = ('TestDownloadManager', ) class TestDownloadManager(TestCase): def setUp(self): homedir = _find_home()
def create_lock_file(lockfilename=None, mode=None, hub_id=None, hub_params=None): # Remove lock-files of dead hubs remove_garbage_lock_files() lockfiledir = "" # CHECK FOR SAMP_HUB ENVIRONMENT VARIABLE if "SAMP_HUB" in os.environ: # For the time being I assume just the std profile supported. if os.environ["SAMP_HUB"].startswith("std-lockurl:"): lockfilename = os.environ["SAMP_HUB"][len("std-lockurl:"):] lockfile_parsed = urlparse(lockfilename) if lockfile_parsed[0] != 'file': warnings.warn("Unable to start a Hub with lockfile {}. " "Start-up process aborted.".format(lockfilename), SAMPWarning) return False else: lockfilename = lockfile_parsed[2] else: # If it is a fresh Hub instance if lockfilename is None: log.debug("Running mode: " + mode) if mode == 'single': lockfilename = os.path.join(_find_home(), ".samp") else: lockfiledir = os.path.join(_find_home(), ".samp-1") # If missing create .samp-1 directory try: os.mkdir(lockfiledir) except OSError: pass # directory already exists finally: os.chmod(lockfiledir, stat.S_IREAD + stat.S_IWRITE + stat.S_IEXEC) lockfilename = os.path.join(lockfiledir, "samp-hub-{}".format(hub_id)) else: log.debug("Running mode: multiple") hub_is_running, lockfiledict = check_running_hub(lockfilename) if hub_is_running: warnings.warn("Another SAMP Hub is already running. Start-up process " "aborted.", SAMPWarning) return False log.debug("Lock-file: " + lockfilename) write_lockfile(lockfilename, hub_params) return lockfilename
def setUp(self): homedir = _find_home() self.downman = DownloadManager() def defensively_create_empty_dir(dirname): if os.path.isdir(dirname) is False: os.mkdir(dirname) else: shutil.rmtree(dirname) os.mkdir(dirname) # First create an empty directory where we will # temporarily store a collection of empty files self.base_dummydir = os.path.join(homedir, 'temp_directory_for_halotools_testing') defensively_create_empty_dir(self.base_dummydir) self.dummyloc = os.path.join(self.base_dummydir, 'halotools') defensively_create_empty_dir(self.dummyloc) self.halocat_dir = os.path.join(self.dummyloc, 'halo_catalogs') defensively_create_empty_dir(self.halocat_dir) self.ptclcat_dir = os.path.join(self.dummyloc, 'particle_catalogs') defensively_create_empty_dir(self.ptclcat_dir) self.raw_halo_table_dir = os.path.join(self.dummyloc, 'raw_halo_catalogs') defensively_create_empty_dir(self.raw_halo_table_dir) self.simnames = ['bolshoi', 'bolplanck', 'multidark', 'consuelo'] self.halo_finders = ['rockstar', 'bdm'] self.dummy_version_names = ['halotools.alpha'] self.extension = '.hdf5' self.bolshoi_fnames = ['hlist_0.33035', 'hlist_0.54435', 'hlist_0.67035', 'hlist_1.00035'] self.bolshoi_bdm_fnames = ['hlist_0.33030', 'hlist_0.49830', 'hlist_0.66430', 'hlist_1.00035'] self.bolplanck_fnames = ['hlist_0.33035', 'hlist_0.54435', 'hlist_0.67035', 'hlist_1.00035'] self.consuelo_fnames = ['hlist_0.33324', 'hlist_0.50648', 'hlist_0.67540', 'hlist_1.00000'] self.multidark_fnames = ['hlist_0.31765', 'hlist_0.49990', 'hlist_0.68215', 'hlist_1.00109'] # make all relevant subdirectories and dummy files for simname in self.simnames: simdir = os.path.join(self.halocat_dir, simname) defensively_create_empty_dir(simdir) rockstardir = os.path.join(simdir, 'rockstar') defensively_create_empty_dir(rockstardir) if simname == 'bolshoi': fnames = self.bolshoi_fnames elif simname == 'bolplanck': fnames = self.bolplanck_fnames elif simname == 'consuelo': fnames = self.consuelo_fnames elif simname == 'multidark': fnames = self.multidark_fnames for name in fnames: for version in self.dummy_version_names: full_fname = name + '.' + version + self.extension abs_fname = os.path.join(rockstardir, full_fname) os.system('touch ' + abs_fname) if simname == 'bolshoi': simdir = os.path.join(self.halocat_dir, simname) bdmdir = os.path.join(simdir, 'bdm') defensively_create_empty_dir(bdmdir) fnames = self.bolshoi_bdm_fnames for name in fnames: for version in self.dummy_version_names: full_fname = name + '.' + version + self.extension abs_fname = os.path.join(bdmdir, full_fname) os.system('touch ' + abs_fname) p = os.path.join(self.halocat_dir, 'bolshoi', 'bdm') assert os.path.isdir(p) f = 'hlist_0.33030.halotools.alpha.hdf5' full_fname = os.path.join(p, f) assert os.path.isfile(full_fname) self.clear_APH_MACHINE_of_highz_file()
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The `~halotools.sim_manager` sub-package is responsible for downloading halo catalogs, reading ascii data, storing hdf5 binaries and keeping a persistent memory of their location on disk and associated metadata. """ from __future__ import (absolute_import, division, print_function, unicode_literals) import os from astropy.config.paths import _find_home try: halotools_cache_dirname = os.path.join(_find_home(), '.astropy', 'cache', 'halotools') os.makedirs(halotools_cache_dirname) except OSError: pass from .fake_sim import FakeSim from .download_manager import DownloadManager from .cached_halo_catalog import CachedHaloCatalog from .user_supplied_halo_catalog import UserSuppliedHaloCatalog from .user_supplied_ptcl_catalog import UserSuppliedPtclCatalog from .rockstar_hlist_reader import RockstarHlistReader from .tabular_ascii_reader import TabularAsciiReader from .halo_table_cache import HaloTableCache from .ptcl_table_cache import PtclTableCache