def build(self, output_dir=""): """ :type output_dir: path """ self.dedup() for separator in self.separators: self.split(separator) self.root.export_source_tree() if not output_dir: return elif isdir(output_dir): print("Error: Output directory {} already exists.".format( output_dir)) return for node in self.root.iter_dfs(): if node.orig_file == "__VIRTUAL_TOP__": continue elif node.orig_file == self.root.childs[0][1].orig_file: target_dir = opjoin(output_dir, 'CONSTRAINT') is_top = True else: target_dir = opjoin(output_dir, self.mapping[dirname(node.orig_file)]) is_top = False node.build(target_dir=target_dir, is_top=is_top)
def init_cam_vari(self, ip, port): self.npix = THERMAL_WIDTH * THERMAL_HEIGHT self.ip = ip self.port = port dll_path = opjoin('dll', 'CG_ThermalCamDll_2018.dll') self.dll = windll.LoadLibrary(dll_path) self.dll_2015 = windll.LoadLibrary( opjoin('dll', 'ThermalCamDll_2015.dll')) self.mHandle = wintypes.HANDLE() self.keepAlive = c_uint() self.camData = td.IRF_IR_CAM_DATA_T() #self.ushort_ptr = (c_ushort *(THERMAL_WIDTH * THERMAL_HEIGHT ))() #self.camData.ir_image = cast(self.ushort_ptr, POINTER(c_ushort)) self.m16 = np.zeros((THERMAL_HEIGHT, THERMAL_WIDTH), dtype=np.uint16) self.acm32 = np.zeros((THERMAL_HEIGHT, THERMAL_WIDTH), dtype=np.uint32) self.camData.ir_image = self.m16.ctypes.data_as(POINTER(c_ushort)) self.camData.image_buffer_size = 4 * THERMAL_WIDTH * THERMAL_HEIGHT self.lpsize = (c_byte * 8192)() self.camData.lpNextData = cast(self.lpsize, POINTER(c_byte)) self.camData.dwSize = 0 self.camData.dwPosition = 0 if COX_MODEL == 'CG': self.corrPara = td.IRF_TEMP_CORRECTION_PAR_T_CG() else: self.corrPara = td.IRF_TEMP_CORRECTION_PAR_T() self.corrPara.atmTemp = 25.0 self.corrPara.atmTrans = 1.0 self.corrPara.emissivity = 1.0 self.pfloat_lut = (c_float * 65536)() self.dll.SendCameraMessage.restype = c_short self.dll.SendCameraMessage.argtypes = [ wintypes.HANDLE, POINTER(c_uint), c_int, c_ushort, c_ushort ]
def build_logging(loglevel: str, logfile: str, message_format: str, time_format: str): if logfile: logpath = opjoin(PROJECT_DIR, 'logs') if not os.path.exists(logpath): os.mkdir(logpath) path = opjoin(logpath, logfile) loghandler = RotatingFileHandler(path, mode='a', maxBytes=10 * 1024 * 1024, backupCount=10, encoding=None, delay=0) loghandler.setFormatter(logging.Formatter(message_format, time_format)) loghandler.setLevel(loglevel) logger = logging.getLogger() logger.setLevel(loglevel) logger.addHandler(loghandler) else: logging.basicConfig( format=message_format, datefmt=time_format, level=loglevel, )
def set_time_interval(from_day: float, to_day: float): """ Sets start time and stop time to param.in file. :param from_day: :param to_day: :return: """ def _edit_file(filepath: str, callback: Callable[[Iterable[str], TextIO], None]): with open(filepath) as f: out_fname = filepath + ".tmp" out = open(out_fname, "w") callback(f, out) out.close() os.rename(out_fname, filepath) def _update_params(infile: Iterable[str], outfile: TextIO): startday_pattern = ' start time (days)= ' stopday_pattern = ' stop time (days) = ' for line in infile: if line.startswith(startday_pattern): line = '%s%f\n' % (startday_pattern, from_day) if line.startswith(stopday_pattern): line = '%s%f\n' % (stopday_pattern, to_day) outfile.write(line) integrator_path = opjoin(Config.get_project_dir(), CONFIG['integrator']['dir']) param_in_filepath = opjoin(integrator_path, CONFIG.INTEGRATOR_PARAM_FILENAME) _edit_file(param_in_filepath, _update_params)
def install (self, opts = []): if rutils.is_true(self.get('BUILD')): res = super(CppUnitProject, self).install(opts = opts) else: res = [] res += self.install_files(self.rc_path, self.install_rc_path, ['.*']) if self.get_lower(self.test_run_var_name) == 'yes': if self.type == 'shared': raise CppUnitError(self, "Cannot run a shared CppUnit test.") run_env = self.env.Clone() dirs = racy.renv.dirs install_bin = opjoin(dirs.install,"bin") install_lib = opjoin(dirs.install,"lib") run_env.Append(ENV = os.environ) run_env.AppendENVPath(racy.renv.LD_VAR, install_lib) execpath = opjoin(install_bin, self.full_name) run_test = run_env.Alias('run-'+self.name, res, execpath) if res: run_env.Depends(run_test, res[0]) run_env.AlwaysBuild(run_test) res += run_test return res
def clean(): image_names = read_image_dir() removed = 0 for image_name in image_names: basename, ext = get_base_ext(image_name) if len(ext) < 2 or ext[1] not in ['jpg', 'jpeg', 'png']: os.remove(opjoin(image_dir, image_name)) removed += 1 continue # Try to open try: im = Image.open(opjoin(image_dir, image_name)) # Check resolution w, h = im.size if min(w, h) < args.clean: removed += 1 im.close() os.remove(opjoin(image_dir, image_name)) continue # Convert to RGB, save as JPEG im = im.convert('RGB') im.save(opjoin(image_dir, image_name), "JPEG") except: os.remove(opjoin(image_dir, image_name)) removed += 1 print(f'Removed {removed} images during cleaning process')
def session_info(self, where, name): """ Get information on the specified session If where is '', will auto try the webscarab temp dir If where is not '', will try session nicknames @param where: directory name where the sessions are located @param name: specific name of subdirectory containing desired session @return: C{dict} containing id, domains, transactions, date, seentids and whether the session is currently active """ if not where: where = self.get_tmpdir() if not opexists(opjoin(where, name)): wstmp = 'webscarab%s.tmp' % name if not opexists(opjoin(where, wstmp)): return {} name = wstmp session = {} wsdir = opjoin(where, name) session['id'] = wsdir m = re.search(r'webscarab(\d+).tmp', wsdir) if m: session['nickname'] = m.group(1) elif wsdir.rfind(os.sep) > -1: session['nickname'] = wsdir[wsdir.rfind(os.sep):] session['domains'] = self.domains_in_dir(wsdir) session['transactions'] = self.transactions(wsdir) session['date'] = os.path.getctime(wsdir) session['seentids'] = [] if os.path.exists(opjoin(wsdir, 'conversationlog')): session['active'] = False else: session['active'] = True return session
def mc2reco(fi,version="v5r4p0",newpath=""): """ converts allGamma-vXrYpZ_100GeV_10TeV-p2.noOrb.740485.mc.root to allGamma-v5r4p0_100GeV_10TeV-p2.noOrb.740485.reco.root""" #print '*** DEBUG: file: ',fi vtag = research("v\dr\dp\d",fi) if vtag is None: vtag = research("r\d{4}",fi) vtag = vtag.group(0) # lastly, replace the path if fi.startswith("root:"): #print fi fi = ("/%s"%fi.split("//")[2]) fname = basename(fi) path = dirname(fi) task = basename(path) npath = opjoin(newpath,task) fout = opjoin(npath,fname) fout = fout.replace(".mc",".reco") max_occ = 10 # version tag shouldn't be there more than 10 times; # if we do not include this criterion, if MC-version == reco version this would yield an infinite loop! occ = 0 while vtag in fout: fout = fout.replace(vtag,version) occ+=1 if occ >= max_occ: break #print "*** DBG ***: ",fout return fout
def load_np(self, scan_id): path2scan = opjoin(self.augmented_data_path, 'generated_{}.mhd.npy'.format(scan_id)) scan = np.load(path2scan) mask, origin, spacing, is_flip = load_itk_image( opjoin(segment_path, '{}.mhd'.format(scan_id))) return scan, origin, spacing, is_flip, mask, scan_id
def _make_res(by_resonance: ResonanceMixin, filepaths: List[str], planets: tuple, integers: List[str]): asteroid_name = by_resonance.small_body.name resonance_id = by_resonance.id phase_storage = PhaseStorage.file phase_builder = PhaseBuilder(phase_storage) phase_loader = PhaseLoader(phase_storage) phase_cleaner = PhaseCleaner(phase_storage) print('Loading aei files.') builder = FilepathBuilder(filepaths, True) planet_aei_paths = [builder.build('%s.aei' % x) for x in planets] resmaker = ResfileMaker(planets, planet_aei_paths) getter = AEIDataGetter(builder) orbital_element_sets = build_bigbody_elements(planet_aei_paths) orbital_elem_set_facade = ResonanceOrbitalElementSetFacade(orbital_element_sets, by_resonance) aei_data = getter.get_aei_matrix(asteroid_name) phase_builder.build(aei_data, resonance_id, orbital_elem_set_facade) phases = phase_loader.load(resonance_id) folder = opjoin(getcwd(), 'res') if not exists(folder): mkdir(folder) resmaker.make(phases, aei_data, opjoin( folder, '%s_%s_%s.res' % (asteroid_name, '_'.join(planets), '_'.join([str(x) for x in integers])) )) phase_cleaner.delete(resonance_id)
def test_filepathbuilder(aei_fixture, is_recursive: bool): builder = FilepathBuilder(aei_fixture, is_recursive) if not is_recursive: with pytest.raises(FilepathException): assert builder.build(TEST_FILE) == opjoin(SUB_FIXTURES, TEST_FILE) else: assert builder.build(TEST_FILE) == opjoin(SUB_FIXTURES, TEST_FILE) assert builder.build(TEST2_FILE) == opjoin(TEMP_FIXTURES, TEST2_FILE)
def load(self, subfolder=None): self.baseline_mc = FROCMetricsCalculator(label=self.baseline_label) subfolder = '' if subfolder is None else subfolder baseline_load_path = opjoin(subfolder, self.baseline_label) self.baseline_mc.load(baseline_load_path + '.npy') self.augmented_mc = FROCMetricsCalculator(label=self.augmented_label) aug_load_path = opjoin(subfolder, self.augmented_label) self.augmented_mc.load(aug_load_path + '.npy')
def ready(self): # Create directories os.makedirs(opjoin(settings.TRAINING_DIR, 'ckpts'), exist_ok=True) os.makedirs(opjoin(settings.TRAINING_DIR, 'logs'), exist_ok=True) os.makedirs(opjoin(settings.INFERENCE_DIR, 'ckpts'), exist_ok=True) os.makedirs(opjoin(settings.INFERENCE_DIR, 'logs'), exist_ok=True) os.makedirs(settings.DATASETS_DIR, exist_ok=True) os.makedirs(settings.OUTPUTS_DIR, exist_ok=True)
def save_h(self): path=opjoin('cfg','x_h.cfg') fid=open(path,'w') fid.write('%.3f'%self.x_h) fid.close() path=opjoin('cfg','y_h.cfg') fid=open(path,'w') fid.write('%.3f'%self.y_h) fid.close()
class SmallBodiesFileBuilderTestCase(unittest.TestCase): FILEPATH = opjoin(PROJECT_DIR, 'small.in') SYMLINK = opjoin(PROJECT_DIR, 'small.in.link') def tearDown(self): if opexists(self.FILEPATH): os.remove(self.FILEPATH) def test_create_small_body_file(self): from resonances.integrator import SmallBodiesFileBuilder def _test_with_symlink(): builder = SmallBodiesFileBuilder(self.FILEPATH, self.SYMLINK) builder.create_small_body_file() self.assertTrue(opexists(self.FILEPATH)) self.assertTrue(opexists(self.SYMLINK)) os.remove(self.FILEPATH) os.remove(self.SYMLINK) def _test_without_symlink(): builder = SmallBodiesFileBuilder(self.FILEPATH) builder.create_small_body_file() self.assertTrue(opexists(self.FILEPATH)) self.assertFalse(opexists(self.SYMLINK)) os.remove(self.FILEPATH) _test_with_symlink() _test_without_symlink() def test_flush(self): from resonances.integrator import SmallBodiesFileBuilder def _test_file_existance(): builder = SmallBodiesFileBuilder(self.FILEPATH) builder.add_body(1, [1., 2., 3., 4., 5., 6., 7.]) with self.assertRaises(FileNotFoundError): builder.flush() def _test_success_flush(): builder = SmallBodiesFileBuilder(self.FILEPATH) builder.create_small_body_file() builder.add_body(1, [1., 2., 3., 4., 5., 6., 7.]) builder.flush() with open(self.FILEPATH) as f: total_lines_count = sum(1 for _ in f) self.assertEqual(total_lines_count, 7) os.remove(self.FILEPATH) _test_file_existance() _test_success_flush()
def get_pyg_dataset(dataroot, dataset): if dataset in ['cora', 'citeseer', 'pubmed']: graph = pyg.datasets.Planetoid(root=opjoin(dataroot, dataset), name=dataset.capitalize()) elif dataset == 'coauthorcs': graph = pyg.datasets.Coauthor(root=opjoin(dataroot, dataset), name='CS') else: raise NotImplementedError return graph
def load_bbody_emv(self): path=opjoin('cfg','bbody_emv_h.cfg') fid=open(path,'r') self.bbody_emv=[0,0] self.bbody_emv[0]=float(fid.read()) fid.close() path=opjoin('cfg','bbody_emv_l.cfg') fid=open(path,'r') self.bbody_emv[1]=float(fid.read()) fid.close()
def runner_src(self): sources = { 'shared':'testRunnerShared.cpp', 'exec' :'testRunnerExec.cpp' , 'xml' :'testRunnerXML.cpp' , } runner = sources.get(self.test_type) runner = [opjoin(self.runner_build_dir, runner)] if self.associated_prj.is_bundle: runner += [ opjoin(self.runner_build_dir, 'testBundle.cpp') ] return runner
def create_index(data_path): ids = [] for root, _, files in os.walk(data_path): if glob.glob(opjoin(data_path, root, '*xml')): nodules = parseXML(opjoin(data_path, root)) id2roi = create_map_from_nodules(nodules) if len(id2roi) == 0: continue ids.append(root) with open('index.json', 'w') as write_file: json.dump(ids, write_file)
def select_brand(): mappings = get_all_brands(return_type=dict) targets_path = get_targets_path() msg = "" for k, v in mappings.items(): msg += f"{k}: {v}\n" select = input(f"Please Input 'Crawler Number' Or 'all' \n{msg}") if select != "all": return opjoin(targets_path, mappings[select]) elif select == "all": return list(map(lambda c: opjoin(targets_path, c), mappings.values()))
def __init__(self, output_dir: str): outoption = CONFIG['output'] self.output_dir = output_dir self.output_images = opjoin(output_dir, outoption['images']) self.output_res_path = opjoin(output_dir, outoption['angle']) self.output_gnu_path = opjoin(output_dir, outoption['gnuplot']) if not os.path.exists(self.output_images): os.makedirs(self.output_images) if not os.path.exists(self.output_gnu_path): os.makedirs(self.output_gnu_path)
def save_npy(self, start, end): for i in range(start, end): print('processing %d' % i) try: imgs, bbox = self.get_data_from_dcm(i) except: imgs, bbox = self.get_data_from_dcm(i - 1) save_imgs_path = opjoin(self.lidc_npy_path, 'imgs_%d.npy' % i) save_bbox_path = opjoin(self.lidc_npy_path, 'bbox_%d.npy' % i) np.save(save_bbox_path, bbox) np.save(save_imgs_path, imgs)
def extract_trans(t): if not 199 < int(t['code']) < 300: return if not 'respbody' in t: return if not t['respbody']: return try: if not opexists(Extract): os.mkdir(Extract) pdir = opjoin(Extract, t['hostname'] + '_' + t['port']) if not opexists(pdir): os.mkdir(pdir) os.makedirs(opjoin(pdir, t['dir'][1:])) except OSError, e: if e.errno != 17: log.error('Error: %s' % e.strerror)
def target_file(self): """ Query command of target file name. """ if self.stage: segments = basename(self.orig_file).split(".") segments.insert(-1, self.stage) target_file = opjoin(dirname(self.orig_file), ".".join(segments)) else: target_file = self.orig_file if self.target_dir: target_file = opjoin(self.target_dir, basename(target_file)) return target_file
def test_make_plot(asteroid_number): resfilepath = opjoin(PROJECT_DIR, 'tests', 'fixtures', 'A%s.res' % asteroid_number) gnufilepath = opjoin(PROJECT_DIR, 'tests', 'fixtures', 'A%s.gnu' % asteroid_number) pngfilepath = opjoin(PROJECT_DIR, 'tests', 'fixtures', 'A%s.png' % asteroid_number) make_plot(resfilepath, gnufilepath, pngfilepath) assert os.path.exists(pngfilepath) assert os.path.exists(gnufilepath) os.remove(pngfilepath) os.remove(gnufilepath)
def save_thd(self): if COX_MODEL == 'CG': fname = opjoin('cfg', 'thd_cg.cfg') else: fname = opjoin('cfg', 'thd.cfg') fid = open(fname, 'w') self.change_thd(self.cfg_data.thd_cels) fid.write('%d' % self.cfg_data.thd) fid.close() path = opjoin('cfg', 'thd_cels.cfg') fid = open(path, 'w') fid.write('%.1f' % self.cfg_data.thd_cels) fid.close()
def get(self, session, tinfo): t = {} t['id'] = tinfo['id'] t['source'] = self.proxy_name t['request'] = open( opjoin(session['id'], 'conversations', tinfo['id'] + '-request'), 'rb').read() t['response'] = open( opjoin(session['id'], 'conversations', tinfo['id'] + '-response'), 'rb').read() # XXX: this needs to verify that the transaction are actually complete session['seentids'].append(tinfo['id']) return t
def extract_trans(t): if not 199 < int(t['code']) < 300: return if not 'respbody' in t: return if not t['respbody']: return try: if not opexists(Extract): os.mkdir(Extract) pdir = opjoin(Extract, t['hostname']+'_'+t['port']) if not opexists(pdir): os.mkdir(pdir) os.makedirs(opjoin(pdir, t['dir'][1:])) except OSError, e: if e.errno != 17: log.error('Error: %s' % e.strerror)
def get_files_with_nodules(nodules, root): files = os.listdir(root) image_ids_with_nodules = set() for nodule in nodules: for roi in nodule['roi']: image_ids_with_nodules.add(roi['sop_uid']) result = [] for file in files: if not file.endswith('dcm'): continue _, ds = imread(opjoin(root, file)) if ds.SOPInstanceUID in image_ids_with_nodules: result.append(opjoin(root, file)) return result
def main(): gfpath = sys.argv[1] dirpath = sys.argv[2] with open(gfpath, 'r') as f: lines = [line.split(',') for line in f] for line in lines: if int(line[1]) > 4: if not os.path.exists(opjoin('temp_forms', line[0])): os.makedirs(opjoin('temp_forms', line[0])) lst = [ f for f in os.listdir(opjoin(dirpath, line[0])) if '.jpg' in f ] shuffle(lst) for f in lst[:5]: # print(opjoin(dirpath, line[0], f)) copyfile(opjoin(dirpath, line[0], f), opjoin('temp_forms', line[0], f)) for temp in os.listdir(opjoin(dirpath, line[0])): if 'template' in temp: copyfile(opjoin(dirpath, line[0], temp), opjoin('temp_forms/trains', line[0], temp)) print(line[0])
def _build_from_dirs(self, for_name: str) -> str: res = None if self._is_recursive: for path_base in self._dirs: for filepath in glob.iglob(opjoin(path_base, '**', for_name), recursive=True): res = filepath break else: for path_base in self._dirs: filepath = opjoin(path_base, for_name) if opexists(filepath): res = filepath return res
def rel_file(self): """ Query command of relative file name. Relative file name is used when rendering script names in source commands. """ if self.stage: segments = basename(self.orig_file).split(".") segments.insert(-1, self.stage) rel_file = opjoin(dirname(self.orig_file), ".".join(segments)) else: rel_file = self.orig_file if self.target_dir: rel_file = opjoin(self.rel_dir, basename(rel_file)) return rel_file
def install_nodes(self): prj = self env = self.env initmodel = opjoin(prj.rc_path,'__init__.py') if os.path.isfile(initmodel): content = rutils.get_file_content(initmodel) initfile = opjoin(prj.local_dir, '__init__.py') write = prj.WriteBuilder(initfile, content) copy = prj.CopyBuilder( '${LOCAL_DIR}', prj.install_pkg_path, marker_name='InstallPkgs' ) env.Depends(copy, write) return [copy, write]
def get(self, request, *args, **kwargs): """Retrieve results about an inference process This API provides information about an `inference` process.In classification task it returns the list \ of images and an array composed of the classes prediction scores. In segmentation task it returns the URLs of the segmented images. """ if not self.request.query_params.get('process_id'): error = {'Error': f'Missing required parameter `process_id`'} return Response(data=error, status=status.HTTP_400_BAD_REQUEST) process_id = self.request.query_params.get('process_id') infer = models.Inference.objects.filter(celery_id=process_id) if not infer: # already deleted weight/training or inference return Response( { "result": "Process stopped before finishing or non existing." }, status=status.HTTP_404_NOT_FOUND) if AsyncResult(process_id).status == 'PENDING': return Response( { "result": "Process in execution. Try later for output results." }, status=status.HTTP_200_OK) infer = infer.first() if not os.path.exists(opjoin(settings.OUTPUTS_DIR, infer.outputfile)): return Response({"result": "Output file not found"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) outputs = open(opjoin(settings.OUTPUTS_DIR, infer.outputfile), 'r') # Differentiate classification and segmentation if infer.modelweights_id.model_id.task_id.name.lower( ) == 'classification': lines = outputs.read().splitlines() lines = [line.split(';') for line in lines] # preds = self.trunc(preds, decs=8) else: # Segmentation # output file contains path of files uri = request.build_absolute_uri(settings.MEDIA_URL) lines = outputs.read().splitlines() lines = [l.replace(settings.OUTPUTS_DIR, uri) for l in lines] response = {'outputs': lines} return Response(response, status=status.HTTP_200_OK)
def genres(asteroid_number: int, integers: List[int], filepaths: List[str], planets: Tuple): t1 = aliased(Planet) t2 = aliased(Planet) cond = len(integers) == 3 resonance_cls = ThreeBodyResonance if cond else TwoBodyResonance query = ( session.query(resonance_cls).outerjoin(t1, t1.id == resonance_cls.first_body_id).filter(t1.name == planets[0]) ) if cond: query = query.outerjoin(t2, t2.id == resonance_cls.second_body_id).filter(t2.name == planets[1]) query = query.outerjoin(Asteroid, Asteroid.id == resonance_cls.small_body_id).filter( Asteroid.name == "A%i" % asteroid_number ) resonance = query.first() if not resonance: logging.warning("There is no resonance by pointed filter.") return resonance_id = resonance.id phase_storage = PhaseStorage.file phase_builder = PhaseBuilder(phase_storage) phase_loader = PhaseLoader(phase_storage) phase_cleaner = PhaseCleaner(phase_storage) print("Loading aei files.") builder = FilepathBuilder(filepaths, True) planet_aei_paths = [builder.build("%s.aei" % x) for x in planets] resmaker = ResfileMaker(planets, planet_aei_paths) getter = AEIDataGetter(builder) orbital_element_sets = build_bigbody_elements(planet_aei_paths) orbital_elem_set_facade = ResonanceOrbitalElementSetFacade(orbital_element_sets, resonance) aei_data = getter.get_aei_data(asteroid_number) phase_builder.build(aei_data, resonance_id, orbital_elem_set_facade) phases = phase_loader.load(resonance_id) folder = opjoin(getcwd(), "res") if not exists(folder): mkdir(folder) resmaker.make( phases, aei_data, opjoin(folder, "A%i_%s_%s.res" % (asteroid_number, "_".join(planets), "_".join([str(x) for x in integers]))), ) phase_cleaner.delete(resonance_id)
def aei_fixture(request): os.mkdir(TEMP_FIXTURES) os.mkdir(SUB_FIXTURES) test_filepaths = [opjoin(SUB_FIXTURES, TEST_FILE), opjoin(TEMP_FIXTURES, TEST2_FILE)] for path in test_filepaths: with open(path, 'w') as test_file: test_file.write('123') paths = (FIXTURES, TEMP_FIXTURES) def fin(): for item in test_filepaths: os.remove(item) os.rmdir(TEMP_FIXTURES) os.rmdir(SUB_FIXTURES) request.addfinalizer(fin) return paths
def load_resonances(start: int, stop: int, file: str, axis_swing: float, planets: Tuple[str]): assert axis_swing > 0. from commands import load_resonances as _load_resonances if not os.path.isabs(file): file = os.path.normpath(opjoin(os.getcwd(), file)) if file == RESONANCE_FILEPATH: logging.info('%s will be used as source of integers' % file) for i in range(start, stop, STEP): end = i + STEP if i + STEP < stop else stop _load_resonances(file, i, end, planets, axis_swing)
def simple_clean(with_aei=True): """Execute simple_clean.sh :param with_aei: :rtype bool: """ for ext in EXTENSIONS: for filename in glob(opjoin(INTEGRATOR_DIR, '*.%s' % ext)): os.remove(filename) if with_aei: aei_clean()
def gimme(root, wanted): try: for f in find(root, wanted): yield opjoin(f + '.sdc', 'Data.h5') return except ValueError, e: if not str(e).startswith('not a directory'): raise with open(root) as h: for line in h: yield line.rstrip('\n')
def _build_from_tars(self, for_name: str) -> str: archives = [self._last_tar] + self._archives if self._last_tar else self._archives for tarname in archives: with taropen(tarname) as tarfile: # type: TarFile for taritem in tarfile: # type: TarInfo filepath = taritem.name if for_name not in filepath: continue tarfile.extract(taritem, EXTRACT_PATH) self._last_tar = tarname return opjoin(EXTRACT_PATH, filepath) return None
def create_install_targets(self,list_dir): # list targets = [(dir, list_targets),...] list_targets = [] install_dir = racy.renv.dirs.install for tdir in list_dir: dir_path = opjoin(install_dir,tdir) if os.path.exists(dir_path): targets = self.create_targets(dir_path) list_targets.append((tdir,targets)) return list_targets
def result (self, deps_results): res = super(CppUnitProject, self).result(deps_results=deps_results) if self.associated_prj.is_bundle: env = self.env if self.type == 'shared': raise CppUnitError(self, "Cannot build a bundle test as a shared library.") prj_version = r'\"{0.version}\"'.format(self) env.AppendUnique(CPPDEFINES=('CPPUNIT_TEST_VERSION', prj_version)) bundle_test_header = opjoin(self.runner_src_path, 'testBundle.hpp') env['FORCE_INCLUDE'] = bundle_test_header env.Depends( res, env.File(bundle_test_header) ) return res
def test_orbital_elements(): filepath = opjoin(PROJECT_DIR, PARAMS['integrator']['dir'], 'A1.aei') collection = OrbitalElementSetCollection(filepath) assert len(collection.orbital_elements) == A1_AEI_FILE_LEN assert len(collection) == A1_AEI_FILE_LEN orbitalelements = collection.orbital_elements[0] assert orbitalelements == collection[0] assert orbitalelements.time == 0.0000000 assert orbitalelements.p_longitude == radians(1.541309E+02) assert orbitalelements.mean_anomaly == radians(3.172742E+02) assert orbitalelements.semi_axis == 2.76503 assert orbitalelements.eccentricity == 0.077237 assert orbitalelements.inclination == radians(int(10.6047)) assert orbitalelements.node == radians(int(80.4757))
def remove_export_directory(start: int, stop: int=None) -> bool: """Removes directory, that created by method package. :param start: :param stop: :return: flag says about succesful of operation """ if not stop: stop = start + BODIES_COUNTER export_dir = opjoin(EXPORT_BASE_DIR, '%i-%i' % (start, stop)) if os.path.exists(export_dir): logging.info('Clear directory %s...' % export_dir) shutil.rmtree(export_dir) logging_done() return True else: logging.info('Nothing to delete') return False
def find(root, wanted=None, sortkey=None): if not os.path.isdir(root): raise ValueError('not a directory: %s' % root) for r, ds, fs in os.walk(root): if sortkey: ks = sortkey else: ks = (None, None) for seq, sk in zip((ds, fs), ks): if sk: seq.sort(key=sk) else: seq.sort() for s in (ds, fs): for i in s: if wanted is None or wanted(i, r, isdir=(s==ds)): yield opjoin(r, i)
def _get_from_s3(filepaths: List[str]) -> List[str]: new_paths = [] if any([is_s3(x) for x in filepaths]): conn = S3Connection(CONFIG['s3']['access_key'], CONFIG['s3']['secret_key']) bucket = conn.get_bucket(BUCKET) for path in filepaths: if not is_s3(path): continue start = path.index(BUCKET) filename = path[start + len(BUCKET) + 1:] if not opexists(S3_FILES_DIR): makedirs(S3_FILES_DIR) local_path = opjoin(S3_FILES_DIR, basename(filename)) if not opexists(local_path): s3key = bucket.get_key(filename, validate=False) with open(local_path, 'wb') as f: s3key.get_contents_to_file(f) if not is_tarfile(local_path): raise FilepathInvalidException('%s is not tar. Local copy %s' % (path, local_path)) new_paths.append(local_path) return new_paths
if not 'respbody' in t: return if not t['respbody']: return try: if not opexists(Extract): os.mkdir(Extract) pdir = opjoin(Extract, t['hostname']+'_'+t['port']) if not opexists(pdir): os.mkdir(pdir) os.makedirs(opjoin(pdir, t['dir'][1:])) except OSError, e: if e.errno != 17: log.error('Error: %s' % e.strerror) fn = t['file'].rsplit('.', 1) if len(fn[0]) > 30: fn[0] = fn[0][:30] outfn = opjoin(pdir, t['dir'][1:], fn[0].translate(FNTrans)) outfn += '.'+ str(t['id']) if len(fn) > 1: if len(fn[1]) > 10: fn[1] = fn[1][:10] outfn += '.' + fn[1].translate(FNTrans) #log.info('extract_trans: writing to %s' % outfn) f = open(outfn, 'wb') f.write(t['respbody']) f.close() def scan(wproxy, session, checks, pmd, urlfilter, hostfilter): """ Parse all transactions in a given directory @param wproxy: pmproxy subclass
def runner_build_dir(self): return opjoin(self.build_dir, 'CPPUnit_runner')
def runner_src_path(self): return opjoin(CPPUNIT_PLUGIN_PATH, 'rc')
from os.path import join as opjoin from shutil import copyfile import os from settings import Config PROJECT_DIR = Config.get_project_dir() INTEGRATOR_PATH = opjoin(PROJECT_DIR, Config.get_params()['integrator']['dir']) PARAMS = Config.get_params() def test_set_time_interval(): from integrator import set_time_interval def _copyfile(name: str): path = opjoin(INTEGRATOR_PATH, name) target = opjoin(INTEGRATOR_PATH, name + '.backup') copyfile(path, target) return path param_in_filepath = _copyfile(PARAMS.INTEGRATOR_PARAM_FILENAME) set_time_interval(1, 2) startday_assert_flag = False stopday_assert_flag = False with open(param_in_filepath) as f: for line in f: startday_assert_flag = startday_assert_flag or (' start time (days)= 1' in line) stopday_assert_flag = stopday_assert_flag or (' stop time (days) = 2' in line) assert startday_assert_flag
from os.path import exists as opexists from os.path import join as opjoin from os.path import isabs from os import makedirs import glob from tarfile import is_tarfile from tarfile import open as taropen import os from settings import Config import shutil from os.path import basename PROJECT_DIR = Config.get_project_dir() CONFIG = Config.get_params() _ex_folder = CONFIG['extract_dir'] EXTRACT_PATH = _ex_folder if isabs(_ex_folder) else opjoin(PROJECT_DIR, _ex_folder) BUCKET = CONFIG['s3']['bucket'] _s3_folder = CONFIG['s3files_dir'] S3_FILES_DIR = _s3_folder if isabs(_s3_folder) else opjoin(PROJECT_DIR, _s3_folder) class FilepathException(Exception): pass class FilepathInvalidException(Exception): pass def _get_from_s3(filepaths: List[str]) -> List[str]:
def _copyfile(name: str): path = opjoin(INTEGRATOR_PATH, name) target = opjoin(INTEGRATOR_PATH, name + '.backup') copyfile(path, target) return path
def get_path (self, path = ""): """Returns <project>/test/[path]""" root = super(CppUnitProject, self).get_path(constants.TEST_PATH) path = opjoin(root, self.cppunit_test_dir, path) return os.path.abspath(os.path.normpath(path))
def get_options_file(prj): opt_file = opjoin( prj.get_path(constants.TEST_PATH), CppUnitProject.cppunit_option_file ) return opt_file
from .internal import asteroid_interval_options from .internal import asteroid_time_intervals_options from .internal import Path from .internal import report_interval_options from settings import Config from os.path import join as opjoin LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] PHASE_STORAGE = ['REDIS', 'DB', 'FILE'] PLANETS = ['EARTHMOO', 'JUPITER', 'MARS', 'MERCURY', 'NEPTUNE', 'PLUTO', 'SATURN', 'URANUS', 'VENUS'] CONFIG = Config.get_params() PROJECT_DIR = Config.get_project_dir() RESONANCE_TABLE_FILE = CONFIG['resonance_table']['file'] RESONANCE_FILEPATH = opjoin(PROJECT_DIR, 'axis', RESONANCE_TABLE_FILE) STEP = CONFIG['integrator']['number_of_bodies'] INTEGRATOR_DIR = CONFIG['integrator']['dir'] BodyCountType = TypeVar('T', str, int) @click.group() @click.option('--loglevel', default='DEBUG', help='default: DEBUG', type=click.Choice(LEVELS)) @click.option('--logfile', default=None, help='default: None', type=str) def cli(loglevel: str = 'DEBUG', logfile: str = None): build_logging(getattr(logging, loglevel), logfile, '%(asctime)s %(levelname)s %(message)s', '%Y-%m-%d %H:%M:%S')
def result(self, deps_results=True): prj = self env = self.env result = [] class ConfigureMethods(object): prj = self for name, f in self.env_functions.items(): locals()[name] = f prj.configure_env() prj.configure_consumer(ConfigureMethods, False) command = CommandWrapper(prj,'SysCommand') prj.prj_locals['generate']() if racy.renv.is_darwin(): install_tool = opjoin(racy.get_bin_path(),'..','Utils', 'osx_install_name_tool.py') libs = [self.lib_path] deps_lib = self.ENV['DEPS_LIB'] if deps_lib: libs += deps_lib.split(':') command(['python', install_tool, '-i', '-a', '-P','*', '-s',self.build_bin_path] + libs , pwd = '.') #import defined strings and functions from generate method for k,v in self.ENV.items(): if isinstance(v, basestring) or callable(v): env[k] = v res = [ self.MkdirBuilder('${LOCAL_DIR}'), self.MkdirBuilder('${LOCAL_DIR}/bin'), self.MkdirBuilder('${LOCAL_DIR}/lib'), self.MkdirBuilder('${LOCAL_DIR}/include'), ] res += BuilderWrapper.apply_calls( prj, **self.ENV ) downloads = [x for x in res if self.download_target in str(x)] map(res.remove, downloads) res = downloads + res previous_node = [] for nodes in res: if not isinstance(nodes, LibextProject): for node in nodes: #HACK: scons need a name attribute to manage dependencies if not hasattr(node, "name"): node.name = '' env.Depends( node, previous_node ) previous_node = node elif deps_results: previous_node = [previous_node, nodes.deps_build_nodes] if not isinstance(nodes, LibextProject): result += nodes else: result += previous_node for node in [prj.extract_dir]: env.Clean(node, node) alias = 'result-{prj.type}-{prj.full_name}' result = env.Alias (alias.format(prj=self), result) return result