def resolve_local(document_loader, uri): if uri.startswith("/") and os.path.exists(uri): return Path(uri).as_uri() if os.path.exists( urllib.parse.urlparse( urllib.parse.urldefrag("{}/{}".format(Path.cwd().as_uri(), uri))[0])[2]): return "{}/{}".format(Path.cwd().as_uri(), uri) sharepaths = [ os.environ.get( "XDG_DATA_HOME", os.path.join(os.path.expanduser('~'), ".local", "share")) ] sharepaths.extend( os.environ.get("XDG_DATA_DIRS", "/usr/local/share/:/usr/share/").split(":")) shares = [os.path.join(s, "commonwl", uri) for s in sharepaths] _logger.debug("Search path is %s", shares) for path in shares: if os.path.exists(path): return Path(uri).as_uri() if os.path.exists("{}.cwl".format(path)): return Path("{}.cwl".format(path)).as_uri() return None
def _normalize_root(root): """ Convert to absolute and squash 'path/../folder' """ try: return os.path.abspath((Path.cwd() / root).absolute().as_posix()) except: return Path.cwd()
def specifications_for_revision(self, revision): # type: (Revision) -> [Specification] assert not revision or isinstance(revision, Revision) # logger.debug('Getting cartfile from revision {} of {})'.format(revision, self)) if revision in self.specifications_cache: return self.specifications_cache[revision] elif revision is None and self == self.punic.root_project: cartfile = Cartfile(use_ssh=self.config.use_ssh, overrides=config.repo_overrides) specifications = [] cartfile_path = self.path / 'Cartfile' cartfile_private_path = self.path / 'Cartfile.private' if cartfile_path.exists(): cartfile.read(cartfile_path) specifications += cartfile.specifications if cartfile_private_path.exists(): cartfile.read(cartfile_private_path) if set(specifications).intersection(cartfile.specifications): raise PunicRepresentableError( "Specifications in your Cartfile.private conflict with specifications within your Cartfile." ) specifications += cartfile.specifications if not specifications: raise PunicRepresentableError( "No specifications found in {} or {}".format( cartfile_path.relative_to(Path.cwd()), cartfile_private_path.relative_to(Path.cwd()))) else: self.check_work_directory() try: parsed_revision = self.rev_parse(revision) except: print("FAILED") # JIWTODO return [] result = runner.run( 'git show "{}:Cartfile"'.format(parsed_revision), cwd=self.path) if result.return_code != 0: specifications = [] else: data = result.stdout cartfile = Cartfile(use_ssh=self.config.use_ssh, overrides=config.repo_overrides) cartfile.read(data) specifications = cartfile.specifications self.specifications_cache[revision] = specifications return specifications
def _normalize_root(root): """ Convert to absolute and squash 'path/../folder' """ # noinspection PyBroadException try: return os.path.abspath((Path.cwd() / root).absolute().as_posix()) except Exception: return Path.cwd()
def get_project_dir(): try: project_dir = Path.cwd( ) / '/' / 'My Drive' / 'Jotham' / 'Personal Docs' / 'ML for finance' / 'SEC Sentiment Analysis - Github Upload' / 'sec-sentiment' os.chdir(project_dir) except BaseException as e: project_dir = Path.cwd( ) / '/' / 'Volumes' / 'GoogleDrive' / 'My Drive' / 'Jotham' / 'Personal Docs' / 'ML for finance' / 'SEC Sentiment Analysis - Github Upload' / 'sec-sentiment' os.chdir(project_dir) return project_dir
def _get_jupyter_notebook_filename(cls): if not sys.argv[0].endswith( os.path.sep + 'ipykernel_launcher.py') or len( sys.argv) < 3 or not sys.argv[2].endswith('.json'): return None # we can safely assume that we can import the notebook package here # noinspection PyBroadException try: from notebook.notebookapp import list_running_servers import requests current_kernel = sys.argv[2].split(os.path.sep)[-1].replace( 'kernel-', '').replace('.json', '') server_info = next(list_running_servers()) r = requests.get(url=server_info['url'] + 'api/sessions', headers={ 'Authorization': 'token {}'.format(server_info.get( 'token', '')), }) r.raise_for_status() notebooks = r.json() cur_notebook = None for n in notebooks: if n['kernel']['id'] == current_kernel: cur_notebook = n break notebook_path = cur_notebook['notebook']['path'] # always slash, because this is from uri (so never backslash not even oon windows) entry_point_filename = notebook_path.split('/')[-1] # now we should try to find the actual file entry_point = (Path.cwd() / entry_point_filename).absolute() if not entry_point.is_file(): entry_point = (Path.cwd() / notebook_path).absolute() # install the post store hook, so always have a synced file in the system cls._jupyter_install_post_store_hook(entry_point.as_posix()) # now replace the .ipynb with .py # we assume we will have that file available with the Jupyter notebook plugin entry_point = entry_point.with_suffix('.py') return entry_point.as_posix() except Exception: return None
def __init__(self, testcase_context): testcase_name = get_testcase_name(testcase_context) relative_report_dir = testcase_context.config.getoption("--reports") absolute_framework_dir = Path.cwd() self.testcase_parameters = { "dirs": { "working_dir": Path(absolute_framework_dir, relative_report_dir, testcase_name), "relative_working_dir": Path(relative_report_dir, testcase_name), "install_dir": Path(testcase_context.config.getoption("--installdir")), "shared_dir": Path(absolute_framework_dir, "shared_files") }, "file_paths": { "report_file": Path(absolute_framework_dir, relative_report_dir, testcase_name, "testcase_{}.log".format(testcase_name)), "testcase_file": Path(testcase_context.node.fspath), }, "testcase_name": testcase_name, "loglevel": testcase_context.config.getoption("--loglevel"), "valgrind_usage": testcase_context.config.getoption("--run-with-valgrind"), }
def __init__(self, testcase_context): testcase_name = get_testcase_name(testcase_context) relative_report_dir = testcase_context.getfixturevalue("reports") absolute_framework_dir = Path.cwd() self.testcase_parameters = { "dirs": { "working_dir": Path(absolute_framework_dir, relative_report_dir, testcase_name), "relative_working_dir": Path(relative_report_dir, testcase_name), }, "file_paths": { "report_file": Path(absolute_framework_dir, relative_report_dir, testcase_name, "testcase_{}.log".format(testcase_name)), "testcase_file": Path(testcase_context.node.fspath), }, "testcase_name": testcase_name, "loglevel": testcase_context.getfixturevalue("loglevel"), "valgrind_usage": testcase_context.getfixturevalue("runwithvalgrind"), }
def __enter__(self): if not self.folder_ok: # print("Warning - non-ascii folder", self.folder) print("Setting the cwd to folder") self.old_folder = Path.cwd() os.chdir(str(self.folder)) self.access_path = Path(self.access_path.name) if not self.name_ok: # print("Warning - non-ascii file name '{}'. Will give file a temporary name then change back afterwards."\ # .format(self.name)) self.access_path.suffix.encode(self.ALLOWED_ENCODING) while True: self.access_path = self.access_path.with_name( str(np.random.randint(1 << 31, )) + self.access_path.suffix) if not self.access_path.exists(): break if self.mode == "r": os.rename(self.path, self.access_path) self.access_path = str(self.access_path) return self
def main(): proj_dir = Path.cwd().parent.parent metafile = proj_dir / 'source' / 'metadata.csv' # path to save json annotatons to pmc_save_dir = proj_dir / 'data' / 'pmc' pm_save_dir = proj_dir / 'data' / 'pm' # create path for rdf files for later processing pmc_rdf_dir = proj_dir / 'data' / 'ttl' / 'pmc' pm_rdf_dir = proj_dir / 'data' / 'ttl' / 'pm' # create directories for dir in (pmc_save_dir, pm_save_dir, pmc_rdf_dir, pm_rdf_dir): os.makedirs(dir, exist_ok=True) metadata = pd.read_csv(metafile, keep_default_na=False) pmcids = list(set(metadata['pmcid'])) pmids = list(set(metadata['pubmed_id'])) pmcids.remove('') pmids.remove('') pmc_to_sha = {k: v for k, v in zip(metadata['pmcid'], metadata['sha'])} pm_to_sha = {k: v for k, v in zip(metadata['pubmed_id'], metadata['sha'])} # list of already downloaded article id's existing_pmc = [file.stem for file in pmc_save_dir.glob('*.json')] existing_pm = [file.stem for file in pm_save_dir.glob('*.json')] for i in range(len(pmcids) // BATCH_SIZE): pmc_batch = pmcids[i:(i + 1) * BATCH_SIZE] pm_batch = pmids[i:(i + 1) * BATCH_SIZE] # only get new articles pmc_batch = [i for i in pmc_batch if i not in existing_pmc] pm_batch = [i for i in pm_batch if i not in existing_pm] # Pubmed Central if pmc_batch and PULL_PMC: pmc_query = PMC_URL + ','.join(pmc_batch) r = requests.get(pmc_query) write_json_files(r.text, pmc_to_sha, pm_to_sha, pmc_save_dir, pmc=True) sleep(SLEEPTIME) # Pubmed if pm_batch and PULL_PM: pm_query = PM_URL + ','.join(pm_batch) r = requests.get(pm_query) write_json_files(r.text, pmc_to_sha, pm_to_sha, pm_save_dir, pmc=False) sleep(SLEEPTIME)
def test_runs_without_errors(self, model_type): if Path.cwd() != Path(with_fixed_marker_names_folder): os.chdir(str(with_fixed_marker_names_folder)) try: model = process_and_return_model(model_type) except Exception as e: assert 0, "Unexpected error: {}".format(e)
def find_root(path=None): """ Find's the root of a python project. Traverses directories upwards, iteratively searching for root_indicators. If no match is found, the system root is returned and a warning is thrown. Parameters ---------- path : Path, str or None The starting directory to begin the search. If none is set, uses Path.cwd() Returns ------- Path Either the path where a root_indicator was found or the system root. """ if path is None: return find_root(Path.cwd()) else: for root_indicator in root_indicators: if path.joinpath(root_indicator).exists(): return path.resolve() next_path = path / ".." # if we've hit the system root if (next_path.resolve() != path.resolve()): return find_root(next_path) else: warnings.warn( "No project indicator found - returning root system directory") return path.resolve()
def __init__(self, DEBUG=False): # import settings or use these as defaults self.DEBUG = DEBUG self.per_push_min_commits = 5 self.per_push_max_commits = 20 self.per_push_range_of_commits = random.randrange( self.per_push_min_commits, self.per_push_max_commits) self.per_commit_additions = 10 self.per_commit_subtractions = 2 self.per_commit_edits = self.per_commit_additions + self.per_commit_subtractions self.current_path = Path.cwd() self.parent_path = self.current_path.parent # dummy repo we will use to make commits self.repo = 'https://github.com/CROOOME/automated_bughit.git' self.repo_name = self.repo.split("/")[-1].split(".")[ 0] # default: repo_name [excluding extension] self.repo_dir = Path.joinpath(self.parent_path, self.repo_name) self.commands = [ 'git add {}'.format('new_file'), # make_file "git commit -m '{}'".format('commit message to be filled here') ] self.extensions = [ 'c', 'cpp', 'py', 'html', 'css', 'js', 'txt', 'json', 'xml' ]
def __init__(self): self.defaults = { 'configuration': None, 'platforms': [], } self.xcode = None self.repo_overrides = dict() self.root_path = Path.cwd() # type: Path self.library_directory = Path(os.path.expanduser('~/Library/io.schwa.Punic')) if not self.library_directory.exists(): self.library_directory.mkdir(parents=True) self.repo_cache_directory = self.library_directory / 'repo_cache' if not self.repo_cache_directory.exists(): self.repo_cache_directory.mkdir(parents=True) self.punic_path = self.root_path / 'Carthage' self.build_path = self.punic_path / 'Build' self.checkouts_path = self.punic_path / 'Checkouts' self.derived_data_path = self.library_directory / "DerivedData" runner.cache_path = self.library_directory / "cache.shelf" self.can_fetch = False self.xcode = Xcode.default() # Read in defaults from punic.yaml self.read(Path('punic.yaml'))
def pytest_runtest_setup(item): logging_plugin = item.config.pluginmanager.get_plugin("logging-plugin") tc_parameters.WORKING_DIR = working_dir = calculate_working_dir( item.config, item.name) logging_plugin.set_log_path(calculate_report_file_path(working_dir)) item.user_properties.append(("working_dir", working_dir)) item.user_properties.append( ("relative_working_dir", working_dir.relative_to(Path.cwd())))
def test_update_and_build(): if quick_tests_only: return temp_dir = setup() with work_directory(temp_dir): runner = Runner() output = runner.check_run('punic update') assert (Path.cwd() / 'Carthage/Build/Mac/SwiftIO.framework').exists() assert (Path.cwd() / 'Carthage/Build/Mac/SwiftUtilities.framework').exists() assert (Path.cwd() / 'Carthage/Build/Mac/SwiftIO.dSYM').exists() assert (Path.cwd() / 'Carthage/Build/Mac/SwiftUtilities.dSYM').exists() output = runner.check_run('punic build')
def _get_working_dir(cls, repo_root): repo_root = Path(repo_root).absolute() try: return Path.cwd().relative_to(repo_root).as_posix() except ValueError: # Working directory not under repository root return os.path.curdir
def execute_alm(self): self.logger.info(f'Executing ALM Processing.') file_generator_alm = FileGeneratorALM(self.file_name, self.alm_dict) file_generator_alm.write_alm_file() # store text2drs tool's path original_path = Path.cwd() alm_file_path = Path(Path.cwd(), FilePaths.TEXT2ALM_OUTPUT_DIR, self.file_name, self.file_name) os.chdir(FilePaths.CALM) cmd = f'java -jar calm.jar {alm_file_path}.tp' return_code = subprocess.call(cmd, shell=True) # TODO os.chdir(original_path) if return_code == 0: self._move_calm_output() return return_code
def cd(path): if not path: return prev_cwd = Path.cwd().as_posix() if isinstance(path, Path): path = path.as_posix() os.chdir(str(path)) try: yield finally: os.chdir(prev_cwd)
def _get_entry_point(cls, repo_root, script_path): repo_root = Path(repo_root).absolute() try: # Use os.path.relpath as it calculates up dir movements (../) entry_point = os.path.relpath(str(script_path), str(Path.cwd())) except ValueError: # Working directory not under repository root entry_point = script_path.relative_to(repo_root) return Path(entry_point).as_posix()
def _cwd(cls): # return the current working directory (solve for hydra changing it) # check if running with hydra if sys.modules.get('hydra'): # noinspection PyBroadException try: # noinspection PyPackageRequirements import hydra return Path(hydra.utils.get_original_cwd()).absolute() except Exception: pass return Path.cwd().absolute()
def __init__(self, root_path=None): if not root_path: root_path = Path.cwd() self.config = config root_project_identifier = ProjectIdentifier(overrides=None, project_name=self.config.root_path.name) self.all_repositories = {root_project_identifier: Repository(punic=self, identifier=root_project_identifier, repo_path=self.config.root_path),} self.root_project = self._repository_for_identifier(root_project_identifier)
def process_bin_files(platform): cwd = Path.cwd() obj_names = [] for f in chain(cwd.glob("*.bin"), cwd.glob("*.dat")): obj_names.append(process_bin_file(f, platform)) os.chdir("obj") _arch, _obj_suffix, lib_pattern = get_bin_type(platform) ar = "x86_64-w64-mingw32-ar" if platform == "win32" else "ar" cmd = [ar, "rvs", lib_pattern.format("v8_data")] cmd += obj_names logger.info(' '.join(cmd)) call(cmd)
def main(): # pylint: disable=missing-docstring options = parseOptions(sys.argv[1:]) cwd_prefix = Path.cwd() / "m" res = ShellResult(options, options.jsengineWithArgs, cwd_prefix, False) # pylint: disable=no-member print(res.lev) if options.submit: # pylint: disable=no-member if res.lev >= options.minimumInterestingLevel: # pylint: disable=no-member testcaseFilename = options.jsengineWithArgs[-1] # pylint: disable=invalid-name,no-member print("Submitting %s" % testcaseFilename) quality = 0 options.collector.submit(res.crashInfo, str(testcaseFilename), quality) # pylint: disable=no-member else: print("Not submitting (not interesting)")
def test_save(self): plots = vpl.scatter(np.random.uniform(-10, 10, (30, 3))) # I can't get python2 to cooperate with unicode here. # The os functions just don't like them. if sys.version[0] == "3": path = Path.cwd() / u"ҢघԝઌƔࢳܢˀા\\Հએࡓ\u061cཪЈतயଯ\u0886.png" try: os.mkdir(str(path.parent)) vpl.save_fig(path) self.assertTrue(path.exists()) os.remove(str(path)) finally: if path.parent.exists(): os.rmdir(str(path.parent)) else: path = Path.cwd() / "image.png" vpl.save_fig(path) os.remove(str(path)) array = vpl.screenshot_fig(2) self.assertEqual(array.shape, tuple(i * 2 for i in vpl.gcf().render_size) + (3, )) plt.imshow(array) plt.show() shape = tuple(i * j for (i, j) in zip(vpl.gcf().render_size, (2, 3))) vpl.screenshot_fig(pixels=shape).shape # The following will fail depending on VTK version # self.assertEqual(vpl.screenshot_fig(pixels=shape).shape, # shape[::-1] + (3,)) vpl.close() fig = vpl.figure() for plot in plots: fig += plot vpl.show()
def pytest_runtest_setup(item): logging_plugin = item.config.pluginmanager.get_plugin("logging-plugin") tc_parameters.WORKING_DIR = working_dir = calculate_working_dir(item.config, item.name) logging_plugin.set_log_path(calculate_report_file_path(working_dir)) item.user_properties.append(("working_dir", working_dir)) if working_dir_and_current_dir_has_common_base(working_dir): # relative path for working dir could be calculeted from current directory relative_working_dir = working_dir.relative_to(Path.cwd()) item.user_properties.append(("relative_working_dir", relative_working_dir)) else: # relative path for working dir could not be calculeted from current directory if len(str(working_dir)) + len("syslog_ng_server.ctl") > 108: # #define UNIX_PATH_MAX 108 (cat /usr/include/linux/un.h | grep "define UNIX_PATH_MAX)" raise ValueError("Working directory lenght is too long, some socket files could not be saved, please make it shorter") item.user_properties.append(("relative_working_dir", working_dir))
def __init__(self, root_path=None): if not root_path: root_path = Path.cwd() self.config = config root_project_identifier = ProjectIdentifier(overrides=None, project_name=self.config.root_path.name) self.all_repositories = { root_project_identifier: Repository(punic=self, identifier=root_project_identifier, repo_path=self.config.root_path), } self.root_project = self._repository_for_identifier(root_project_identifier)
def _normalize_root(root): """ Get the absolute location of the parent folder (where .git resides) """ root_parts = list(reversed(Path(root).parts)) cwd_abs = list(reversed(Path.cwd().parts)) count = len(cwd_abs) for i, p in enumerate(cwd_abs): if i >= len(root_parts): break if p == root_parts[i]: count -= 1 cwd_abs.reverse() root_abs_path = Path().joinpath(*cwd_abs[:count]) return str(root_abs_path)
def make_gdb_cmd(prog_full_path, crashed_pid): """Construct a command that uses the POSIX debugger (gdb) to turn a minidump file into a stack trace. Args: prog_full_path (Path): Full path to the program crashed_pid (int): PID of the program Returns: list: gdb command list """ assert os.name == "posix" # On Mac and Linux, look for a core file. core_name = "" core_name_path = Path() if platform.system() == "Darwin": # Core files will be generated if you do: # mkdir -p /cores/ # ulimit -c 2147483648 (or call resource.setrlimit from a preexec_fn hook) core_name = "/cores/core." + str(crashed_pid) core_name_path = Path(core_name) elif platform.system() == "Linux": is_pid_used = False core_uses_pid_path = Path("/proc/sys/kernel/core_uses_pid") if core_uses_pid_path.is_file(): with io.open(str(core_uses_pid_path), "r", encoding="utf-8", errors="replace") as f: is_pid_used = bool(int( f.read()[0])) # Setting [0] turns the input to a str. core_name = "core." + str(crashed_pid) if is_pid_used else "core" core_name_path = Path.cwd() / core_name if not core_name_path.is_file(): # try the home dir core_name_path = Path.home() / core_name # pylint: disable=redefined-variable-type if core_name and core_name_path.is_file(): dbggr_cmd_path = Path(__file__).parent / "gdb_cmds.txt" assert dbggr_cmd_path.is_file() # pylint: disable=no-member # Run gdb and move the core file. Tip: gdb gives more info for: # (debug with intact build dir > debug > opt with frame pointers > opt) return [ "gdb", "-n", "-batch", "-x", str(dbggr_cmd_path), str(prog_full_path), str(core_name) ] return []
def __init__(self): self.xcode = None self.repo_overrides = dict() self.root_path = Path.cwd() # type: Path self.library_directory = Path( '~/Library/Application Support/io.schwa.Punic').expanduser() if not self.library_directory.exists(): self.library_directory.mkdir(parents=True) self.repo_cache_directory = self.library_directory / 'repo_cache' if not self.repo_cache_directory.exists(): self.repo_cache_directory.mkdir(parents=True) self.punic_path = self.root_path / 'Carthage' self.build_path = self.punic_path / 'Build' self.checkouts_path = self.punic_path / 'Checkouts' self.derived_data_path = self.library_directory / "DerivedData" self.platforms = Platform.all self.configuration = None self.fetch = False self.xcode = Xcode.default() self.toolchain = None self.dry_run = False self.use_submodules = False self.use_ssh = False self.skips = [] self.verbose = False self.echo = False self.override_logs = [] self.continuous_integration = 'CI' in os.environ if self.continuous_integration: logging.info("Running on continuous integration") # Read in defaults from punic.yaml (or punic.yml if that exists) punic_configuration_path = Path('punic.yaml') if not punic_configuration_path.exists(): punic_configuration_path = Path('punic.yml') if punic_configuration_path.exists(): self.read(punic_configuration_path) runner.cache_path = self.library_directory / "cache.shelf"
def __init__(self, pytest_request): testcase_name = calculate_testcase_name(pytest_request) relative_report_dir = pytest_request.config.getoption("--reports") absolute_framework_dir = Path.cwd() self.testcase_parameters = { "dirs": { "working_dir": Path(absolute_framework_dir, relative_report_dir, testcase_name), "relative_working_dir": Path(relative_report_dir, testcase_name), "install_dir": Path(pytest_request.config.getoption("--installdir")), "shared_dir": Path(absolute_framework_dir, "shared_files") }, "file_paths": { "testcase_file": Path(pytest_request.fspath), }, "testcase_name": testcase_name, "valgrind_usage": pytest_request.config.getoption("--run-with-valgrind"), }
def __init__(self, current_date, stock_name, not_crypto=True): self.__stock = stock_name.upper() self.current_date = current_date if not_crypto: folder = 'StockData' else: folder = 'CryptoData' self.__file = Path.joinpath(Path.cwd(), 'Data', folder, self.__stock + ".csv") try: self.__df = pd.read_csv(self.__file) except IOError: print(f'Unable to open data from \"{self.__file}\"') sys.exit(1)
def __init__(self): self.xcode = None self.repo_overrides = dict() self.root_path = Path.cwd() # type: Path self.library_directory = Path('~/Library/Application Support/io.schwa.Punic').expanduser() if not self.library_directory.exists(): self.library_directory.mkdir(parents=True) self.repo_cache_directory = self.library_directory / 'repo_cache' if not self.repo_cache_directory.exists(): self.repo_cache_directory.mkdir(parents=True) self.punic_path = self.root_path / 'Carthage' self.build_path = self.punic_path / 'Build' self.checkouts_path = self.punic_path / 'Checkouts' self.derived_data_path = self.library_directory / "DerivedData" self.platforms = Platform.all self.configuration = None self.fetch = False self.xcode = Xcode.default() self.toolchain = None self.dry_run = False self.use_submodules = False self.use_ssh = False self.skips = [] self.verbose = False self.echo = False self.continuous_integration = 'CI' in os.environ if self.continuous_integration: logging.info("Running on continuous integration") # Read in defaults from punic.yaml (or punic.yml if that exists) punic_configuration_path = Path('punic.yaml') if not punic_configuration_path.exists(): punic_configuration_path = Path('punic.yml') if punic_configuration_path.exists(): self.read(punic_configuration_path) runner.cache_path = self.library_directory / "cache.shelf"
def set_here(wd=None): """ Creates a .here file at the passed directory. Parameters ---------- wd : Path object or string The directory that a .here file will be created in. If none is set, uses Path.cwd() """ if wd is None: wd = Path.cwd() elif type(wd) is str: wd = Path(wd) wd.parent.mkdir(parents=True, exist_ok=True) wd.joinpath(".here").touch()
import logging from datetime import datetime from six import integer_types,string_types from pandas import DataFrame,Panel from numpy import hstack,degrees,zeros_like,ndarray,atleast_1d,float32 from os import chdir # from histutils.fortrandates import datetime2yd from histutils.findnearest import find_nearest from gridaurora.readApF107 import readmonthlyApF107 from gridaurora.zglow import glowalt import glowaurora from glowaurora import glowfort # glowpath=glowaurora.__path__[0] oldcwd = Path.cwd() def runglowaurora(eflux,e0,t0,glat,glon,f107apfn=None,f107a=None,f107=None,f107p=None,ap=None): #%% (-2) check/process user inputs assert isinstance(eflux,(float,integer_types,ndarray)) assert isinstance(e0, (float,float32,integer_types)) assert isinstance(t0, (datetime,string_types)) assert isinstance(glat, (float,integer_types)) assert isinstance(glon, (float,integer_types)) #%% (-1) if no manual f10.7 and ap, autoload by date if not(f107a and f107 and f107p and ap): f107Ap=readmonthlyApF107(t0,f107apfn) f107a = f107p = f107Ap['f107s'] f107 = f107Ap['f107o'] ap = (f107Ap['Apo'],)*7 chdir(glowpath) #FIXME: hack for path issue
def specifications_for_revision(self, revision): # type: (Revision) -> [Specification] assert not revision or isinstance(revision, Revision) # logger.debug('Getting cartfile from revision {} of {})'.format(revision, self)) if revision in self.specifications_cache: return self.specifications_cache[revision] elif revision is None and self == self.punic.root_project: cartfile = Cartfile(use_ssh=self.config.use_ssh, overrides=config.repo_overrides) specifications = [] cartfile_path = self.path / 'Cartfile' cartfile_private_path = self.path / 'Cartfile.private' if cartfile_path.exists(): cartfile.read(cartfile_path) specifications += cartfile.specifications if cartfile_private_path.exists(): cartfile.read(cartfile_private_path) if set(specifications).intersection(cartfile.specifications): raise PunicRepresentableError( "Specifications in your Cartfile.private conflict with specifications within your Cartfile.") specifications += cartfile.specifications if not specifications: raise PunicRepresentableError( "No specifications found in {} or {}".format(cartfile_path.relative_to(Path.cwd()), cartfile_private_path.relative_to(Path.cwd()))) else: self.check_work_directory() try: parsed_revision = self.rev_parse(revision) except: print("FAILED") # JIWTODO return [] result = runner.run('git show "{}:Cartfile"'.format(parsed_revision), cwd=self.path) if result.return_code != 0: specifications = [] else: data = result.stdout cartfile = Cartfile(use_ssh=self.config.use_ssh, overrides=config.repo_overrides) cartfile.read(data) specifications = cartfile.specifications self.specifications_cache[revision] = specifications return specifications