def processJclTemplate(setup): # setup['in_sample_file'] = PureWindowsPath(ats_dir_win / dp_dir / setup['in_sample_file']) # setup['out_of_sample_file'] = PureWindowsPath(ats_dir_win / dp_dir / setup['out_of_sample_file']) search_path = f"{ats_dir}/Code/Templates/JCL/" logger.debug(f"platofrm = {platform.system()}") if platform.system() == "Windows": search_path = str(PureWindowsPath(ats_dir_win / jcl_tplt_dir)) logger.debug(f"search_path={search_path}") templateLoader = FileSystemLoader(searchpath=search_path) # templateLoader = FileSystemLoader(searchpath=f"{ats_dir}/Code/Templates/JCL/") templateEnv = Environment(loader=templateLoader) TEMPLATE_FILE = setup["jcl_version"] logger.debug(f"JCL template file: {TEMPLATE_FILE}") template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render( setup=setup, # in_sample_file=in_sample_file, # out_of_sample_file=out_of_sample_file, ) return outputText
def read_directory(file_nr, first_time): filepath_list = [] if (first_time == 1): global filepath filepath = input("ange sökväg till mapp: ") print('\n') filename = Path(filepath) #input file path path_on_windows = PureWindowsPath(filename) onlyfiles = [ f for f in listdir(path_on_windows) if isfile(os.path.join(path_on_windows, f)) ] filepath_list = (([filepath + "\\" + s for s in onlyfiles])) first_time = 0 global initcounter initcounter = len(filepath_list) global data data = [] pre_init_csvfile(filepath_list[file_nr]) data += init_csvfile(filepath_list[file_nr]) return data
def __init__(self, host_info_dict): self.machine_id = get_checked_value(host_info_dict, KEY_MACHINE_ID) self.host_name = get_checked_value(host_info_dict, KEY_HOST) self.user_name = get_checked_value(host_info_dict, KEY_USER) self.user_password = '' if KEY_PASSWORD in host_info_dict: # password is optional self.user_password = host_info_dict[KEY_PASSWORD] self.os_type = get_checked_value(host_info_dict, KEY_OSTYPE) self.temp_dir = None if self.os_type == "Windows": self.temp_dir = PureWindowsPath( get_checked_value(host_info_dict, KEY_TEMPDIR)) elif self.os_type == "Linux": self.temp_dir = PurePosixPath( get_checked_value(host_info_dict, KEY_TEMPDIR)) else: raise Exception( 'Function needs to be extended to handle os type ' + self.os_type)
def save_active_set(self, path): # must include suffix path = self.__add_set_suffix(path) # directory dir = PureWindowsPath(path).parent is_dir = str(dir) != '.' # change directory restore_dir = None if not is_dir: restore_dir = self.file.directory() self.file.cd(Directory.recall_sets) scpi = ":MMEM:STOR:STAT 1,'{0}'" scpi = scpi.format(path) self.write(scpi) self.pause() if restore_dir: self.file.cd(restore_dir)
def __scan_all_files(self, root): """ Give all file of a folder """ # give a limit of number of scanned files to avoid pathological cases limit = 200000 result = set() # find package package = None if self.packages: # there may be several packages containing the path... for p in self.packages: if str(PureWindowsPath(p.get_path())) in root: if package: # ambiguity # take the longest one if len(p.get_path()) > len(package.get_path()): package = p else: package = p for dirname, _, filenames in os.walk(root): # print path to all filenames. for filename in filenames: result.add(File(os.path.join(dirname, filename), package)) # paranoid if len(result) > limit: self.root_limit[root] = True return result return result
def processStrategyTemplate(template, hdr, desc, setup, logic): logger.debug("processStrategyTemplate ....") search_path = f"{ats_dir}/Code/Templates/Strategy/" if platform.system() == "Windows": search_path = str(PureWindowsPath(ats_dir_win / strat_tplt_dir)) templateLoader = FileSystemLoader(searchpath=search_path) templateEnv = Environment(loader=templateLoader) TEMPLATE_FILE = template logger.debug(f"process template: {TEMPLATE_FILE}") template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render( header=hdr, chart_setup=desc["chart_setup"], prototype_info=desc["prototype_info"], # dp_data=dp, comments=desc, poi="more testing for poi", filters=logic["filters"], setup=setup, logic=logic, ) return outputText
def get_fileNum(self, path, day, picCacheCheck, fileCheck, picCheck, videoCheck): dir_name = PureWindowsPath(path) # Convert path to the right format for the current operating system correct_path = Path(dir_name) now = datetime.datetime.now() if picCacheCheck: path_one = correct_path / 'Attachment' path_two = correct_path / 'FileStorage/Cache' self.getPathFileNum(now, day, path_one, path_two) if fileCheck: path_one = correct_path / 'Files' path_two = correct_path / 'FileStorage/File' self.getPathFileNum(now, day, path_one, path_two) if picCheck: path_one = correct_path / 'Image/Image' path_two = correct_path / 'FileStorage/Image' self.getPathFileNum(now, day, path_one, path_two) if videoCheck: path_one = correct_path / 'Video' path_two = correct_path / 'FileStorage/Video' self.getPathFileNum(now, day, path_one, path_two)
def __init__(self): current_date = datetime.datetime.today().strftime('%Y%m%d') #log_name='logs\\'+current_date +'_test_etl.log' file_path = Path("C:/Radni/File_from_site/logs/" + current_date + ".txt") win_file_name = PureWindowsPath(file_path) #test_db = NewDatabaseManager('DESKTOP-QP2G51N\SQLEXPRESS','Examples',log_name) test_db = NewDatabaseManager( 'sqlservertestmladen.database.windows.net', 'dbtestmladen', win_file_name) sql_data = """INSERT INTO dbo.table1 (CAP_ACC,UNIT,SCP,STATION,LINENAME,DIVISION,[DATE],[TIME],[DESC],ENTRIES,EXITS) VALUES ( ? , ? , ? , ? , ? , ?, ?, ?, ?, ?, ?)""" with open('C:/Radni/File_from_site/txt_file_36.txt', 'r') as f: reader = csv.reader(f) #columns = next(reader) #query = open(file[4],'r') #query = query.read() for row in reader: #test_db.insert_sql_data(query,row) test_db.insert_sql_data(sql_data, row) return 1
def get_pregenerated_events(bpod_trials, settings): num = settings.get("PRELOADED_SESSION_NUM", None) if num is None: num = settings.get("PREGENERATED_SESSION_NUM", None) if num is None: fn = settings.get('SESSION_LOADED_FILE_PATH', '') fn = PureWindowsPath(fn).name num = ''.join([d for d in fn if d.isdigit()]) if num == '': raise ValueError("Can't extract left probability behaviour.") # Load the pregenerated file ntrials = len(bpod_trials) sessions_folder = Path(raw.__file__).parent.joinpath( "extractors", "ephys_sessions") fname = f"session_{num}_ephys_pcqs.npy" pcqsp = np.load(sessions_folder.joinpath(fname)) pos = pcqsp[:, 0] con = pcqsp[:, 1] pos = pos[: ntrials] con = con[: ntrials] contrastRight = con.copy() contrastLeft = con.copy() contrastRight[pos < 0] = np.nan contrastLeft[pos > 0] = np.nan qui = pcqsp[:, 2] qui = qui[: ntrials] phase = pcqsp[:, 3] phase = phase[: ntrials] pLeft = pcqsp[:, 4] pLeft = pLeft[: ntrials] phase_path = sessions_folder.joinpath(f"session_{num}_stim_phase.npy") is_patched_version = parse_version( settings.get('IBLRIG_VERSION_TAG', 0)) > parse_version('6.4.0') if phase_path.exists() and is_patched_version: phase = np.load(phase_path)[:ntrials] return {'position': pos, 'quiescence': qui, 'phase': phase, 'probabilityLeft': pLeft, 'contrastRight': contrastRight, 'contrastLeft': contrastLeft}
def _compress(root_data_folder, command, flag_pattern, dry=False, max_sessions=None): # runs a command of the form command = "ls -1 {file_name}.avi" c = 0 for flag_file in Path(root_data_folder).rglob(flag_pattern): ses_path = flag_file.parent files2compress = flags.read_flag_file(flag_file) for f2c in files2compress: cfile = ses_path.joinpath(PureWindowsPath(f2c)) c += 1 if max_sessions and c > max_sessions: return print(cfile) if dry: continue if not cfile.exists(): logger.error('NON-EXISTING RAW FILE: ' + str(cfile)) # run the compression command redirecting output cfile.parent.joinpath(cfile.stem) # if the output file already exists, overwrite it outfile = cfile.parent / (cfile.stem + '.mp4') if outfile.exists(): outfile.unlink() command2run = command.format(file_name=cfile.parent.joinpath(cfile.stem)) process = subprocess.Popen(command2run, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) info, error = process.communicate() if process.returncode != 0: logger.error('COMPRESSION FAILED FOR ' + str(cfile)) flags.excise_flag_file(flag_file, removed_files=f2c) with open(cfile.parent.joinpath('extract.error'), 'w+') as fid: fid.write(command2run) fid.write(error.decode()) continue # if the command was successful delete the original file cfile.unlink() # then remove the file from the compress flag file flags.excise_flag_file(flag_file, removed_files=f2c) # and add the file to register_me.flag flags.write_flag_file(ses_path.joinpath('register_me.flag'), file_list=cfile.stem)
def upload(username): user = User.query.filter_by(username=username).first_or_404() if request.method == 'POST': if 'photo' not in request.files: flash('No file part') return redirect(url_for('user', username=username)) file = request.files['photo'] if file.filename == '': flash("No selected file") return redirect(url_for('user', username=username)) if file and allowed_file(file.filename): # TODO # language to delete old photos using OS neutral path # if user.profile_photo: # delete_old = os.path.normpath(user.profile_photo) # flash(str(delete_old)) # os.remove(user.profile_photo) # save requested img to server filename = Path(secure_filename(file.filename)).as_posix() filename_2 = photos.save(request.files['photo']) # establish OS neutral file path to save to DB pure_path = Path(PureWindowsPath('\\static\\img\\')) user.profile_photo = os.path.join(pure_path.as_posix(), filename) #commit filepath string to db db.session.commit() flash("New Avatar Uploaded Sucessfully!") return redirect(url_for('user', username=username)) return render_template('upload.html', username=username)
def find_pairs(root_data_folder): """Find all passive sessions that needs transfer and where to""" root_data_folder = Path(root_data_folder) settings_files = list(root_data_folder.rglob("_iblrig_taskSettings.raw.json")) n_settings_files = len(settings_files) if n_settings_files == 0: log.warning(f"Found {n_settings_files} sessions") else: log.info(f"Found {n_settings_files} sessions") # Load the corresponding ephys session path form settings file if exists pairs = [] for sf in settings_files: # Get session path form settings file source_spath = session_path(sf) if source_spath is None: continue # Find the root_data_path for session subjects_folder_path = Path(*Path(source_spath).parts[:-3]) # Load reference to corresponding ephys session (ces) which comes in windows format ces = load_settings_file(sf, key="CORRESPONDING_EPHYS_SESSION") # if CORRESPONDING_EPHYS_SESSION does not exist, it's not a passive session if ces is None: continue # Convert windows path to corresponsding session name (csn) in native Path format csn = Path(*PureWindowsPath(ces).parts[-3:]) target_spath = str(subjects_folder_path / csn) pairs.append((source_spath, target_spath)) # Remove sessions that are already transfered i.e. source and destination files are == from_to_pairs = [(x, y) for x, y in pairs if x != y] n_pairs = len(from_to_pairs) if n_pairs == 0: log.warning(f"Found {n_pairs} passive sessions to move") else: log.info(f"Found {n_pairs} passive sessions to move") return from_to_pairs
def get_filtered_tag_ids(self, halo_map): tag_index = halo_map.tag_index.STEPTREE tag_ids = self tag_index_ids = set() tag_ids = self.detokenize_tag_ids(halo_map) for tag_id in tag_ids: if isinstance(tag_id, int): tag_index_ids.add(tag_id) continue tag_path = PureWindowsPath(tag_id) tag_class = tag_path.suffix if len(tag_path.parts) > 1: # tag path has multiple parts. use the whole # path as the tag_path, minus the extension. tag_path = str(tag_path.with_suffix("")) else: # only one piece in the tag path. use the only # piece(minus the extension) as the tag_path. tag_path = tag_path.stem if set(tag_path) == set("*"): # tag path is all asterisks. This means we're only # matching based on extension, so match all tag paths tag_path = "" exact = tag_path and tag_class tag_index_ids.update( get_filtered_tag_index_ids(tag_index, tag_path, tag_class.lstrip("."), exact)) # make sure all the tag_ids are valid tag_id_range = range(len(tag_index)) for i in tuple(tag_index_ids): if i not in tag_id_range: tag_index_ids.remove(i) return list(tag_index_ids)
def get_host_info(host: str, user: str, path: str, port: int) -> Tuple[OS, PurePath]: client = paramiko.client.SSHClient() client.load_system_host_keys() client.connect(hostname=host, username=user, port=port) stdin, stdout, stderr = client.exec_command("uname -a") fout: str = stdout.readline().lower() ferr: str = stderr.readline().lower() if "linux" in fout: return OS.LINUX, PurePosixPath(path) elif "darwin" in fout: return OS.MACOS, PurePosixPath(path) elif "not recognized" in ferr: return OS.WINDOWS, PureWindowsPath(path) else: logging.error( "Found unsupported platform on remote host, assuming Linux and hope for the best." ) return OS.LINUX, PurePosixPath(path)
def write_path_template(self): rootp = self.reg_root if self.path_semantics == 'posix': ret = PurePosixPath(self._write_path_template) elif self.path_semantics == 'windows': ret = PureWindowsPath(self._write_path_template) elif self.path_semantics is None: # We are forced to guess which path semantics to use. # Guess that the AD driver is running on the same OS as this client. ret = PurePath(self._write_path_template) else: # This should never happen, but just for the sake of future-proofing... raise ValueError(f"Cannot handle path_semantics={self.path_semantics}") if self._read_path_template is None and rootp not in ret.parents: if not ret.is_absolute(): ret = rootp / ret else: raise ValueError( ('root: {!r} in not consistent with ' 'read_path_template: {!r}').format(rootp, ret)) return _ensure_trailing_slash(str(ret), path_semantics=self.path_semantics)
def load_material_data(self, material_name): material = self.all_materials.get_material(material_name) data_filepath = Path(PureWindowsPath(material.path)) df = pandas.read_csv(project_dir / data_filepath) freq_dict_key = [key for key in df.keys() if "freq" in key][0] eps_mat_r_key = [key for key in df.keys() if "epsilon_r" in key][0] eps_mat_i_key = [key for key in df.keys() if "epsilon_i" in key][0] frequencies = np.array(df[freq_dict_key]) data_slice = np.where((frequencies > self.frequency_range[0]) & (frequencies < self.frequency_range[1])) data_slice = data_slice[0][::int(1 // self.frequency_resolution_multiplier)] eps_mat_r = np.array(df[eps_mat_r_key])[data_slice] eps_mat_i = np.array(df[eps_mat_i_key])[data_slice] eps_mat1 = (eps_mat_r + eps_mat_i * 1j).reshape(len(data_slice), 1) return eps_mat1, frequencies[data_slice].reshape(len(data_slice), 1)
def _iter_dx10_files(self) -> Generator[ArchiveFile, None, None]: """Iterates over the parsed data for DX10 archives and yields instances of `ArchiveFile`. Raises: ValueError: If a filename cannot be determined for a specific file record Yields: :class:`.ArchiveFile`: A file contained within the archive """ filename_offset = 0 for file_container in self.container.files: filepath_content = self.content[( self.container.header.names_offset + filename_offset):] filepath = PascalString(Int16ul, "utf8").parse(filepath_content) filename_offset += len(filepath) + 2 (dds_header, dx10_header) = self._build_dds_headers(file_container) if dds_header: dds_content = b"DDS " dds_content += dds_header if dx10_header: dds_content += dx10_header for tex_chunk in file_container.chunks: if tex_chunk.packed_size > 0: dds_content += Compressed(GreedyBytes, "zlib").parse( self.content[tex_chunk.offset:( tex_chunk.offset + tex_chunk.packed_size)]) else: dds_content += self.content[tex_chunk.offset:( tex_chunk.offset + tex_chunk.unpacked_size)] yield ArchiveFile(filepath=PureWindowsPath(filepath), data=dds_content)
def test_convert_posix_to_nt(self): # test only on a Windows host if not is_nt_host: self.skipTest('NT host only') rootfs = PureWindowsPath(r'../examples/rootfs/x86_linux') expected = str(realpath(rootfs) / 'test') self.assertEqual(expected, posix_to_native(str(rootfs), '/', '/test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '/../test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '/../../test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '/../xxxx/../test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', 'test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '../test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '../../test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/', '../xxxx/../test')) expected = str(realpath(rootfs) / 'proc' / 'test') self.assertEqual(expected, posix_to_native(str(rootfs), '/proc', 'test')) self.assertEqual(expected, posix_to_native(str(rootfs), '/proc/sys', '../test')) self.assertEqual( expected, posix_to_native(str(rootfs), '/proc/sys/net', '../../test')) self.assertEqual( expected, posix_to_native(str(rootfs), '/proc/sys', '../xxxx/../test'))
def win2lin(self): # path path = self.ui.path_field.toPlainText() #print(path) filename = PureWindowsPath(path) # convert to posix path_on_linux = PurePosixPath(filename) # get drive_letter drive_letter = str(path_on_linux)[:2] #print(drive_letter) # remove drive_letter path_on_linux = str(path_on_linux)[3:] # replace spaces if self.ui.convert_space_box.checkState() == 2: path_on_linux = path_on_linux.replace(" ", "%20") # add prefix if self.ui.check_prefix_dict.checkState() == 0: path_on_linux = self.ui.prefix_box.currentText() + path_on_linux self.ui.label_custom_prefix.setEnabled(True) else: lin_prefix = config.prefix_dict[drive_letter] path_on_linux = lin_prefix + path_on_linux self.ui.label_custom_prefix.setEnabled(False) #print(path_on_linux) # put in new path field self.ui.converted_path_field.setPlainText(path_on_linux) #adjust win path letter winpath = self.ui.path_field.toPlainText() winpath = winpath[:2] self.ui.win_prefix_box.setCurrentText(winpath)
def parse_attachment(self): attachments = [] self.parse_has_attachment() if self._has_attachment: self.parse_attachment_type() attach_id = 0 for part in self.email.walk(): if part['content-disposition'] is not None: content_type = part[ 'content-type'] # text/html; CHARSET=ISO-8859-1; name=Horreur!.html _type = content_type.split(';')[0].split('/')[0].lower() if _type.lower() == 'null': # certain courriel on du content-type sans type. impossible a ouvrir continue content_ext = content_type.split(';')[0].split( '/')[1].lower() file_ext = find_ext(content_ext) if file_ext is None: # skip les fichiers qui ne sont pas dans doc_type_ext continue raw_content = part.get_payload(decode=True) output_path = PureWindowsPath( os.getcwd() + '\\output\\attachment_email_decrypteur') output = save_attachment(output_path, raw_content, self.email, attach_id, file_ext, save=False, get_output=True) attachments.append(output) attach_id += 1 self._attachments = attachments
def un_install_client_package_impl(self): """Uninstall client package""" uninstaller = "Uninstall.exe" tmp_uninstaller = Path("c:/tmp") / uninstaller uninstaller = self.cfg.install_prefix / uninstaller if uninstaller.exists(): # copy out the uninstaller as the windows facility would do: shutil.copyfile(uninstaller, tmp_uninstaller) cmd = [ tmp_uninstaller, "/S", "_?=" + str(PureWindowsPath(self.cfg.install_prefix)), ] logging.info("running windows package uninstaller") logging.info(str(cmd)) uninstall = psutil.Popen(cmd) try: uninstall.wait(600) except psutil.TimeoutExpired as exc: print("uninstall timed out, taking screenshot, re-raising!") filename = "windows_uninstall_client_package_screenshot.png" with mss() as sct: sct.shot(output=filename) attach( filename, name="Screenshot ({fn})".format(fn=filename), attachment_type=AttachmentType.PNG, ) uninstall.kill() raise Exception("uninstall failed to complete on time") from exc if self.cfg.log_dir.exists(): shutil.rmtree(self.cfg.log_dir) if tmp_uninstaller.exists(): tmp_uninstaller.unlink()
def un_install_server_package_for_upgrade(self): """hook to uninstall old package for upgrade""" # once we modify it, the uninstaller will leave it there... if self.get_arangod_conf().exists(): self.get_arangod_conf().unlink() uninstaller = "Uninstall.exe" tmp_uninstaller = Path("c:/tmp") / uninstaller uninstaller = self.cfg.install_prefix / uninstaller if uninstaller.exists(): # copy out the uninstaller as the windows facility would do: shutil.copyfile(uninstaller, tmp_uninstaller) cmd = [ tmp_uninstaller, "/PURGE_DB=0", "/S", "_?=" + str(PureWindowsPath(self.cfg.install_prefix)), ] logging.info("running windows package uninstaller") logging.info(str(cmd)) uninstall = psutil.Popen(cmd) try: uninstall.wait(600) except psutil.TimeoutExpired as exc: print("upgrade uninstall timed out, taking screenshot, re-raising!") filename = "windows_upgrade_screenshot.png" with mss() as sct: sct.shot(output=filename) attach( filename, name="Screenshot ({fn})".format(fn=filename), attachment_type=AttachmentType.PNG, ) uninstall.kill() raise Exception("upgrade uninstall failed to complete on time") from exc
def wsl_path(filein): """Convert Windows Path to a Windows Subsystem Linux Path Converts Windows file name path to a Windows Subsystem Linux path to run OPM Flow on WSL Parameters ---------- filein : str Windows file name. Returns ------- fileout : str WSL file name. """ if sg.running_windows(): filein = str(filein)[0].lower() + str(filein)[2:] fileout = Path('//mnt//') / Path(filein) fileout = PureWindowsPath(fileout).as_posix() else: fileout = filein return fileout
def test_trace_uuid_no_stream_class_id_no_stream_id(self): res = bt2.QueryExecutor( self._fs, "babeltrace.trace-infos", { "inputs": [os.path.join(test_ctf_traces_path, "succeed", "succeed1")] }, ).query() os_stream_path = PurePosixPath( '/tests/data/ctf-traces/succeed/succeed1/dummystream') if os.environ['BT_OS_TYPE'] == 'mingw': os_stream_path = PureWindowsPath(os_stream_path) self.assertEqual(len(res), 1) trace = res[0] streams = sorted(trace["stream-infos"], key=sort_predictably) self.assertEqual(len(streams), 1) self.assertRegex( str(streams[0]["port-name"]), r"^2a6422d0-6cee-11e0-8c08-cb07d7b3a564 \| .*" + re.escape(str(os_stream_path)) + r"$", )
def shelve_master_directory(master_directory, verbosity, rules): """Find and store the locations of git repos""" if master_directory: save_master(master_directory) show_verbose_output(verbosity, "Master directory set to ", master_directory, "Now Shelving") i = len(list(INDEX_SHELF.keys())) + 1 folder_paths = [ x for x in Path(master_directory).iterdir() if x.is_dir() ] for f in folder_paths: # log folders show_verbose_output(verbosity, f) for folder_name in folder_paths: path = Path(master_directory) / folder_name if enforce_exclusion(folder_name, verbosity): continue if match_rule(rules, path, verbosity): continue directory_absolute_path = Path(path).resolve() if is_git_repo(directory_absolute_path): if sys.platform == 'win32': name = PureWindowsPath(directory_absolute_path).parts[-1] if sys.platform == 'linux': name = PurePath(directory_absolute_path).parts[-1] show_verbose_output(verbosity, directory_absolute_path, " is a git repository *** shelving\n") NAME_SHELF[name] = directory_absolute_path INDEX_SHELF[str(i)] = name i += 1
def test_convert_source_path(): """Test source path gets converted to a location that's mounted into the Docker VM on Windows and allows running `sgr engine add` with `--inject-source` to bind mount Splitgraph source code into the engine.""" assert _convert_source_path( "/c/Users/username/splitgraph") == "/c/Users/username/splitgraph" # a lot of patching here because we're not actually running on Win but this is what # I observed happens there. path = PureWindowsPath("C:\\Projects\\Splitgraph") with patch.object(PureWindowsPath, "as_posix", return_value="C:/Projects/Splitgraph"): with patch("splitgraph.commandline.engine.logging") as log: # Check user is warned if the directory might not get bind mounted on Docker VM. with patch( "splitgraph.commandline.engine.Path", return_value=path, ): assert _convert_source_path( "C:\\Projects\\Splitgraph") == "/c/Projects/Splitgraph" assert log.warning.call_count == 1
def __init__(self, recipesRoot, uuid, scan): self.uuid = uuid self.isRoot = scan.isRoot self.packagePath = scan.stack self.workspacePath = recipesRoot.joinpath( PureWindowsPath(scan.workspacePath)) self.headers = [ recipesRoot.joinpath(PureWindowsPath(i)) for i in scan.headers ] self.sources = [ recipesRoot.joinpath(PureWindowsPath(i)) for i in scan.sources ] self.resources = [ recipesRoot.joinpath(PureWindowsPath(i)) for i in scan.resources ] self.incPaths = [ recipesRoot.joinpath(PureWindowsPath(i)) for i in scan.incPaths ] self.dependencies = scan.dependencies self.runTargets = [ recipesRoot.joinpath(PureWindowsPath(i)) for i in scan.runTargets ]
def resolve_gomod(app_source_path, request, dep_replacements=None, git_dir_path=None): """ Resolve and fetch gomod dependencies for given app source archive. :param str app_source_path: the full path to the application source code :param dict request: the Cachito request this is for :param list dep_replacements: dependency replacements with the keys "name" and "version"; this results in a series of `go mod edit -replace` commands :param dict git_dir_path: the full path to the application's git repository :return: a dict containing the Go module itself ("module" key), the list of dictionaries representing the dependencies ("module_deps" key), the top package level dependency ("pkg" key), and a list of dictionaries representing the package level dependencies ("pkg_deps" key) :rtype: dict :raises CachitoError: if fetching dependencies fails """ if git_dir_path is None: git_dir_path = app_source_path if not dep_replacements: dep_replacements = [] worker_config = get_worker_config() with GoCacheTemporaryDirectory(prefix="cachito-") as temp_dir: env = { "GOPATH": temp_dir, "GO111MODULE": "on", "GOCACHE": temp_dir, "GOPROXY": worker_config.cachito_athens_url, "PATH": os.environ.get("PATH", ""), "GOMODCACHE": "{}/pkg/mod".format(temp_dir), } run_params = {"env": env, "cwd": app_source_path} # Collect all the dependency names that are being replaced to later verify if they were # all used replaced_dep_names = set() for dep_replacement in dep_replacements: name = dep_replacement["name"] replaced_dep_names.add(name) new_name = dep_replacement.get("new_name", name) version = dep_replacement["version"] log.info("Applying the gomod replacement %s => %s@%s", name, new_name, version) run_gomod_cmd(("go", "mod", "edit", "-replace", f"{name}={new_name}@{version}"), run_params) # Vendor dependencies if the gomod-vendor flag is set flags = request.get("flags", []) if "gomod-vendor" in flags: log.info("Vendoring the gomod dependencies") run_gomod_cmd(("go", "mod", "vendor"), run_params) elif worker_config.cachito_gomod_strict_vendor and os.path.isdir( os.path.join(app_source_path, "vendor")): raise CachitoError( 'The "gomod-vendor" flag must be set when your repository has vendored' " dependencies.") else: log.info("Downloading the gomod dependencies") run_gomod_cmd(("go", "mod", "download"), run_params) if dep_replacements: run_gomod_cmd(("go", "mod", "tidy"), run_params) # module level dependencies output_format = "{{.Path}} {{.Version}} {{.Replace}}" go_list_output = run_gomod_cmd(("go", "list", "-mod", "readonly", "-m", "-f", output_format, "all"), run_params) module_level_deps = [] module_name = None # Keep track of which dependency replacements were actually applied to verify they were all # used later used_replaced_dep_names = set() go_module_name_error = "The Go module name could not be determined" for line in go_list_output.splitlines(): # If there is no "replace" directive used on the dependency, then the last column will # be "<nil>" parts = [ part for part in line.split(" ") if part not in ("", "<nil>") ] if len(parts) == 1: # This is the application itself, not a dependency if module_name is not None: log.error( 'go list produced two lines which look like module names: "%s" and "%s"', module_name, parts[0], ) raise CachitoError(go_module_name_error) module_name = parts[0] continue replaces = None if len(parts) == 3: # If a Go module uses a "replace" directive to a local path, it will be shown as: # k8s.io/metrics v0.0.0 ./staging/src/k8s.io/metrics # In this case, just take the left side. parts = parts[0:2] elif len(parts) == 4: # If a Go module uses a "replace" directive, then it will be in the format: # github.com/pkg/errors v0.8.0 github.com/pkg/errors v0.8.1 # In this case, just take the right side since that is the actual # dependency being used old_name, old_version = parts[0], parts[1] # Only keep track of user provided replaces. There could be existing "replace" # directives in the go.mod file, but they are an implementation detail specific to # Go and they don't need to be recorded in Cachito. if old_name in replaced_dep_names: used_replaced_dep_names.add(old_name) replaces = { "type": "gomod", "name": old_name, "version": old_version } parts = parts[2:] if len(parts) == 2: module_level_deps.append({ "name": parts[0], "replaces": replaces, "type": "gomod", "version": parts[1] }) else: log.warning("Unexpected go module output: %s", line) unused_dep_replacements = replaced_dep_names - used_replaced_dep_names if unused_dep_replacements: raise CachitoError( "The following gomod dependency replacements don't apply: " f'{", ".join(unused_dep_replacements)}') if not module_name: # This should never occur, but it's here as a precaution raise CachitoError(go_module_name_error) # NOTE: If there are multiple go modules in a single git repo, they will # all be versioned identically. module_version = get_golang_version(module_name, git_dir_path, request["ref"], update_tags=True) module = { "name": module_name, "type": "gomod", "version": module_version } bundle_dir = RequestBundleDir(request["id"]) if "gomod-vendor" in flags: # Create an empty gomod cache in the bundle directory so that any Cachito # user does not have to guard against this directory not existing bundle_dir.gomod_download_dir.mkdir(exist_ok=True, parents=True) else: # Add the gomod cache to the bundle the user will later download tmp_download_cache_dir = os.path.join( temp_dir, RequestBundleDir.go_mod_cache_download_part) if not os.path.exists(tmp_download_cache_dir): os.makedirs(tmp_download_cache_dir, exist_ok=True) log.debug( "Adding dependencies from %s to %s", tmp_download_cache_dir, bundle_dir.gomod_download_dir, ) _merge_bundle_dirs(tmp_download_cache_dir, str(bundle_dir.gomod_download_dir)) log.info("Retrieving the list of package level dependencies") list_pkgs_cmd = ("go", "list", "-find", "./...") go_list_pkgs_output = run_gomod_cmd(list_pkgs_cmd, run_params) packages = [] processed_pkg_deps = set() for package in go_list_pkgs_output.splitlines(): if package in processed_pkg_deps: # Go searches for packages in directories through a top-down approach. If a toplevel # package is already listed as a dependency, we do not list it here, since its # dependencies would also be listed in the parent package log.debug( "Package %s is already listed as a package dependency. Skipping...", package) continue list_deps_cmd = ( "go", "list", "-deps", "-f", "{{if not .Standard}}{{.ImportPath}} {{.Module}}{{end}}", package, ) go_list_deps_output = run_gomod_cmd(list_deps_cmd, run_params) pkg_level_deps = [] for line in go_list_deps_output.splitlines(): name, version = _parse_name_and_version(line) # If the line did not contain a version, we'll use the module version version = version or module_version if version.startswith("."): raise CachitoError( f"Local gomod dependencies are not yet supported: {version}" ) elif version.startswith("/") or PureWindowsPath(version).root: # This will disallow paths starting with '/', '\' or '<drive letter>:\' raise CachitoError( f"Absolute paths to gomod dependencies are not supported: {version}" ) pkg = { "name": name, "type": "go-package", "version": version, } processed_pkg_deps.add(name) pkg_level_deps.append(pkg) # The last item on `go list -deps` is the main package being evaluated pkg = pkg_level_deps.pop() packages.append({"pkg": pkg, "pkg_deps": pkg_level_deps}) return { "module": module, "module_deps": module_level_deps, "packages": packages }
from pathlib import PureWindowsPath, PurePosixPath import pytest from scanpy.readwrite import _slugify @pytest.mark.parametrize('path', [ PureWindowsPath(r'C:\foo\bar'), PureWindowsPath(r'.\C\foo\bar'), PureWindowsPath(r'C\foo\bar'), PurePosixPath('/C/foo/bar'), PurePosixPath('./C/foo/bar'), PurePosixPath('C/foo/bar'), ]) def test_slugify(path): assert _slugify(path) == 'C-foo-bar'
def _convert_paths_to_absolute_posix( project_path: Path, conf_dictionary: Dict[str, Any]) -> Dict[str, Any]: """Turn all relative paths inside ``conf_dictionary`` into absolute paths by appending them to ``project_path`` and convert absolute Windows paths to POSIX format. This is a hack to make sure that we don't have to change user's working directory for logging and datasets to work. It is important for non-standard workflows such as IPython notebook where users don't go through `kedro run` or `run.py` entrypoints. Example: :: >>> conf = _convert_paths_to_absolute_posix( >>> project_path=Path("/path/to/my/project"), >>> conf_dictionary={ >>> "handlers": { >>> "info_file_handler": { >>> "filename": "logs/info.log" >>> } >>> } >>> } >>> ) >>> print(conf['handlers']['info_file_handler']['filename']) "/path/to/my/project/logs/info.log" Args: project_path: The root directory to prepend to relative path to make absolute path. conf_dictionary: The configuration containing paths to expand. Returns: A dictionary containing only absolute paths. Raises: ValueError: If the provided ``project_path`` is not an absolute path. """ if not project_path.is_absolute(): raise ValueError( f"project_path must be an absolute path. Received: {project_path}") # only check a few conf keys that are known to specify a path string as value conf_keys_with_filepath = ("filename", "filepath", "path") for conf_key, conf_value in conf_dictionary.items(): # if the conf_value is another dictionary, absolutify its paths first. if isinstance(conf_value, dict): conf_dictionary[conf_key] = _convert_paths_to_absolute_posix( project_path, conf_value) continue # if the conf_value is not a dictionary nor a string, skip if not isinstance(conf_value, str): continue # if the conf_value is a string but the conf_key isn't one associated with filepath, skip if conf_key not in conf_keys_with_filepath: continue if _is_relative_path(conf_value): # Absolute local path should be in POSIX format conf_value_absolute_path = (project_path / conf_value).as_posix() conf_dictionary[conf_key] = conf_value_absolute_path elif PureWindowsPath(conf_value).drive: # Convert absolute Windows path to POSIX format conf_dictionary[conf_key] = PureWindowsPath(conf_value).as_posix() return conf_dictionary