def levels_list(self, path): """ Generate a list with all levels inside a path give than argument :param path: specify a path to scan the folder, it's a simple string :return: a dict with all levels and folder """ folders = {} levels = [] if isdir(path): obj_path = PureWindowsPath(path) tree = listdir(str(obj_path)) tree.sort(key=lambda s: s.find('.umap')) for item in tree: abs_path = obj_path.joinpath(item) if isdir(abs_path): key = obj_path.stem sub_levels = self.levels_list(abs_path) if len(sub_levels) and type(sub_levels) == dict: levels.append(sub_levels) folders[key] = levels else: if '.umap' in item: regex = r"^.*Content" relative_path = re.sub(regex, "", str(obj_path)) levels.append(join(relative_path, item)) key = basename(dirname(abs_path)) folders[key] = levels else: folders = {'No Data': 'Error Path'} return folders
def iter_files(self) -> Generator[ArchiveFile, None, None]: """Iterates over the parsed data and yields instances of :class:`.ArchiveFile`. Yields: :class:`.ArchiveFile`: An file contained within the archive """ file_index = 0 file_struct = self.uncompressed_file_struct if self.container.header.archive_flags.files_compressed: file_struct = self.compressed_file_struct for directory_block in self.container.directory_blocks: # get directory path from directory block directory_path = PureWindowsPath(directory_block.name[:-1]) for file_record in directory_block.file_records: # choose the compressed file structure if compressed mask is set if file_record.size > 0 and ( self.container.header.archive_flags.files_compressed != bool(file_record.size & self.COMPRESSED_MASK)): file_struct = self.compressed_file_struct file_container = file_struct.parse( self.content[file_record.offset:( file_record.offset + (file_record.size & self.SIZE_MASK))]) yield ArchiveFile( filepath=directory_path.joinpath( self.container.file_names[file_index]), data=file_container.data, ) file_index += 1
def _grant_jenkins_master_ssh_access_to_jenkins_windows_slave(self, slave_host_connection ): master_config = self.config.jenkins_master_host_config.container_conf master_container = master_config.container_name print(("----- Grant {0} ssh access to {1}").format(master_container, slave_host_connection.info.host_name)) # configure the script for adding an authorized key to the bitwise ssh server on the # windows machine authorized_keys_script = 'updateAuthorizedKeys.bat' full_authorized_keys_script = _SCRIPT_DIR.joinpath(authorized_keys_script) master_connection = self._get_jenkins_master_host_connection() public_key_file_master = config_data.JENKINS_HOME_JENKINS_MASTER_CONTAINER.joinpath('.ssh/' + _get_public_key_filename(master_container) ) public_key = dockerutil.run_command_in_container( master_connection, master_config, 'cat {0}'.format(public_key_file_master) )[0] configure_file(str(full_authorized_keys_script) + '.in', full_authorized_keys_script, { '@PUBLIC_KEY@' : public_key, '@JENKINS_MASTER_CONTAINER@' : master_container, '@SLAVE_MACHINE_USER@' : slave_host_connection.info.user_name, }) # copy the script to the windows slave machine ssh_dir = PureWindowsPath('C:/Users/' + slave_host_connection.info.user_name + '/.ssh') full_script_path_on_slave = ssh_dir.joinpath(full_authorized_keys_script.name) slave_host_connection.sftp_client.put(str(full_authorized_keys_script), str(full_script_path_on_slave)) # call the script try: call_script_command = '{0} {1}'.format(full_script_path_on_slave, slave_host_connection.info.user_password) slave_host_connection.run_command(call_script_command, print_command=True) except Exception as err: print( "Error: Updating the authorized ssh keys on " + slave_host_connection.info.host_name + " failed. Was the password correct?") raise err # clean up the generated script because of the included password os.remove(str(full_authorized_keys_script)) # Wait a little until the bitvise ssh server is ready to accept the key file. # This can possibly fail on slower machines. time.sleep(1) # Add the slave to the known hosts try: _accept_remote_container_host_key( master_connection, master_config, slave_host_connection.info.host_name, 22, slave_host_connection.info.user_name ) except Exception as err: # This is not really clean but I can not think of a better solution now. print("When this call fails, it is possible that the waiting time before using the ssh connection to the Bitvise SSH server is too short.") raise err
def get_wwise_mac_path(self, path: str) -> str: """Convert path to Wwise for Mac Args: path (str): Any path in any format Returns: str: Windows path formatted for Wwise for mac """ win_path = PureWindowsPath(f'{path}') win_path = win_path.joinpath() return str(win_path)
import numpy as np import pandas as pd import requests as r import matplotlib.pyplot as plt import seaborn from bs4 import BeautifulSoup import sqlite3 #------------------------------------------------------------------------- # lecture du fichier contenant les données working_dir = PureWindowsPath(os.getcwd()) data_dir = PureWindowsPath(working_dir.joinpath('Data')) data_file = PureWindowsPath(data_dir.joinpath('module_2_data_1.xls')) data_result_name = 'result.csv' # lecture des deux premières colonnes de la seconde feuille de calcul du fichier excel if 0: df = pd.read_excel(data_file.as_posix()) df = pd.read_excel(data_file.as_posix(), sheet_name='Sheet2', usecols=[0, 1]) #------------------------------------------------------------------------- # web scrapping # exraction du text contenu dans les balises h2 #-------------------------------------------------------------------------
import os from pathlib import PureWindowsPath import matplotlib.pyplot as plt import numpy as np import pandas as pd #---- charger les données try: xmlFileName = 'O:\\Dupont\\24 BenAdmin-Param\\05 Paiement\\SEPA\\5_adresse_paiement_correction_is_iban.xlsx' currentDir = PureWindowsPath(os.getcwd()) sampleDir = PureWindowsPath(currentDir.joinpath('WorldsTallestMountains')) dataFile = PureWindowsPath(sampleDir.joinpath('Mountains.csv')) data = pd.DataFrame() data = pd.read_csv(dataFile.as_posix()) dataxml = pd.read_excel(xmlFileName) # clean up des données data.set_index('Mountain', inplace=True) data.drop(['Rank', 'Height (ft)', 'Coordinates', 'Parent mountain'], axis=1, inplace=True) data.drop(['Mount Everest / Sagarmatha / Chomolungma', 'Muztagh Ata'], axis=0, inplace=True)
liste_45_file_name = 'liste_45.xlsx' result_merge_file_name = 'merge.csv' result_dta_file_name = 'dta.csv' result_comptes_file_name = 'comptes.csv' result_file_name = 'RESULT.csv' #-------------------------------------------------------------------------- # chemin d'accès aux fichiers #-------------------------------------------------------------------------- local = False if local: working_dir = PureWindowsPath(os.getcwd()) data_dir = PureWindowsPath(working_dir.joinpath('Data')) else: client_root_dir = PureWindowsPath('O:\\') data_dir = PureWindowsPath( client_root_dir.joinpath('Lausanne', '24 BenAdmin-Param', '05 Paiement', 'SEPA', 'Contrôles')) #-------------------------------------------------------------------------- def helper_get_file(file_name): return PureWindowsPath(data_dir.joinpath(file_name).as_posix()) #-------------------------------------------------------------------------- # analyse des éléments de paiement #--------------------------------------------------------------------------
print("\nListing blobs...") # List the blobs in the container blob_list = container_client.list_blobs() for blob in blob_list: print("\t" + blob.name) local_path = "./Downloads/Azure-Test" # Connect to the container container = ContainerClient.from_connection_string(connect_str, container_name="container_name") new = None # Get our list of blobs, and compare date modified values so we can find the report which ran this month blob_list = container.list_blobs() for blob in blob_list: #print(blob.name + '\n') if new == None: new = blob if new.last_modified > blob.last_modified: new = blob # Create the local filepath we are downloading the blob to download_file_path = PureWindowsPath(local_path, new.name) download_file_path.joinpath() print("Latest blob is: " + new.name) print("\nDownloading blob to \n\t" + (download_file_path)) with open(download_file_path, "wb") as download_file: download_file.write(new.download_blob().readall())
import os from pathlib import PureWindowsPath import matplotlib.pyplot as plt import numpy as np import pandas as pd import re as regexp try: clientRootDir = PureWindowsPath('O:\\') workingDir = PureWindowsPath(os.getcwd()) clientDir = PureWindowsPath( clientRootDir.joinpath('Lausanne', '24 BenAdmin-Param', '01 Analyse règlement', 'RGL_2018', 'Implementation', 'RSUPP')) dataFileOrigine = PureWindowsPath(clientDir.joinpath('data_rsupxx_QC.csv')) data = pd.read_csv(dataFileOrigine, sep=';', encoding="ascii") data.set_index(ibanData.NPERSO, inplace=True) except ValueError as e: print("ERROR")
a2.bar(x_pos, ci, bottom=c) c = c + ci a2.legend(labels=df_hap_10.columns[3:10], loc="upper right", bbox_to_anchor=(1.2, 1)) a2.set_xlabel('Country') a2.set_ylabel('Happiness Score') a2.set_title(title) plt.show() #------------------------------------------------------------------------- # lecture du fichier contenant les données du World working_dir = PureWindowsPath(os.getcwd()) data_dir = PureWindowsPath(working_dir.joinpath('Data')) data_file = PureWindowsPath(data_dir.joinpath('data.csv')) df = pd.read_csv(data_file.as_posix()) df.info() df.columns df.dtypes df.count() df.dropna(inplace=True) df.count() # selection des colonnes de type numérique don on produit une copie df_num_col = df.select_dtypes(include=['float64', 'int64']).copy() # produit une série somme / max / min des valeurs prises par colonne