def process_module(module):
    module_path = PurePath(module)
    module_yml = module_path.joinpath('zephyr/module.yml')

    # The input is a module if zephyr/module.yml is a valid yaml file
    # or if both zephyr/CMakeLists.txt and zephyr/Kconfig are present.

    if Path(module_yml).is_file():
        with Path(module_yml).open('r') as f:
            meta = yaml.safe_load(f.read())

        try:
            pykwalify.core.Core(source_data=meta, schema_data=schema)\
                .validate()
        except pykwalify.errors.SchemaError as e:
            sys.exit('ERROR: Malformed "build" section in file: {}\n{}'
                     .format(module_yml.as_posix(), e))

        return meta

    if Path(module_path.joinpath('zephyr/CMakeLists.txt')).is_file() and \
       Path(module_path.joinpath('zephyr/Kconfig')).is_file():
        return {'build': {'cmake': 'zephyr', 'kconfig': 'zephyr/Kconfig'}}

    return None
示例#2
0
    def setup_class(cls):
        """
        Setting up the objects to test functionality.

        Note:
        User running the tests must be a member of group 'family'.
        """
        cls.folder = Path(tempfile.mkdtemp())
        # populate folder
        f1 = PurePath.joinpath(cls.folder, "file1.txt")
        with f1.open(mode="w") as fh:
            fh.write("file1")
        chown(f1, group="family")
        f1.chmod(0o40400)
        f2 = PurePath.joinpath(cls.folder, "file2.txt")
        with f2.open(mode="w") as fh:
            fh.write("file2")
        f4 = PurePath.joinpath(cls.folder, "folder")
        f4.mkdir()
        chown(f4, group="family")
        f4.chmod(0o40750)
        f3 = PurePath.joinpath(cls.folder, "folder", "file3.txt")
        with f3.open(mode="w") as fh:
            fh.write("file3")
        cls.group = cls.folder.group()
        cls.owner = cls.folder.owner()
        cls.o1 = f1
        cls.o2 = f4
示例#3
0
    def generate_client_library(self):

        if os.path.isabs(self.args.protodir):
            client_libraries_base_dir_path = PurePath(self.args.protodir)
        else:
            cur_dir_path = PurePath(os.getcwd())
            client_libraries_base_dir_path = cur_dir_path.joinpath(self.args.protodir)

        os.makedirs(client_libraries_base_dir_path, exist_ok = True)

        # Create service client libraries path
        library_language = self.args.language
        library_org_id = self.args.org_id
        library_service_id = self.args.service_id

        library_dir_path = client_libraries_base_dir_path.joinpath(library_org_id, library_service_id, library_language)

        metadata = self._get_service_metadata_from_registry()
        model_ipfs_hash = metadata["model_ipfs_hash"]

        with TemporaryDirectory() as temp_dir: 
            temp_dir_path = PurePath(temp_dir)
            proto_temp_dir_path = temp_dir_path.joinpath(library_org_id, library_service_id, library_language)
            safe_extract_proto_from_ipfs(self._get_ipfs_client(), model_ipfs_hash, proto_temp_dir_path)

        # Compile proto files
            compile_proto(Path(proto_temp_dir_path), library_dir_path, target_language=self.args.language)

        self._printout('client libraries for service with id "{}" in org with id "{}" generated at {}'.format(library_service_id, library_org_id, library_dir_path))
示例#4
0
def copy_local_files_to_host(connection,
                             source_dir,
                             context_dir,
                             text_files,
                             binary_files=[]):
    """
    context_dir is the directory on the host that is used as a build context for the container.
    source_dir is the absolute directory on the machine executing the script that contains the files
    required for building the container.
    """
    source_dir = PurePath(source_dir)

    # copy text files to the host
    for file_path in text_files:
        source_path = source_dir.joinpath(file_path)
        target_path = context_dir.joinpath(file_path)
        # Copy the file.
        copy_textfile_from_local_to_linux(connection, source_path, target_path)

    # copy binary files to the host
    for file_path in binary_files:
        source_path = source_dir.joinpath(file_path)
        target_path = context_dir.joinpath(file_path)
        # Copy the file.
        copy_file_from_local_to_remote(connection.sftp_client, source_path,
                                       target_path)
示例#5
0
def getCR(filename, tallydic, cell, matnum, volume):
    nuclidelist = [
        '90232', '91233', '94239', '94240', '94241', '92233', '92234', '92235',
        '92238'
    ]
    captureratedic = {}
    fissratedic = {}
    atomdensitydic = {}
    for nuclide in nuclidelist:
        atomdensitydic[nuclide] = mtr.getNuclideDensity(
            PurePath.joinpath(path, filename), cell, matnum, nuclide)
    for tallynum, nuclide in tallydic.items():
        capturetally = readFmtally(PurePath.joinpath(path, filename), tallynum,
                                   '102')
        fisstally = readFmtally(PurePath.joinpath(path, filename), tallynum,
                                '-6')
        captureratedic[
            nuclide] = volum * atomdensitydic[nuclide] * capturetally
        fissratedic[nuclide] = volum * atomdensitydic[nuclide] * fisstally
    cr = (captureratedic['90232']+captureratedic['92234']+captureratedic['92238']+\
        captureratedic['94240']-captureratedic['91233']) / (captureratedic['92233']+\
        captureratedic['92235']+captureratedic['94239']+captureratedic['94241']+\
        fissratedic['92233']+fissratedic['92235']+fissratedic['94239']+\
        fissratedic['94241'])
    return cr
def rel_into_rel_to_proj(relative_to_project_path, path):
    p = PurePath(path)
    parts_1 = p.parts
    parts_2 = PurePath(relative_to_project_path).parts
    down = 0
    # Loop through the 2 path arguments and determine how much they go up
    # and down in order to find a common path factor.
    # TODO This does not account for symbolic links 'example/./path',
    # and maybe some other things, not sure if it will be a problem.
    for part in parts_1:
        if part == "..":
            down += 1
    return_path = PurePath('.')
    if down > len(parts_2) - 1:
        return
    else:
        depth = len(parts_2) - 1 - down
        counter = depth
        part_to_add = 0

        while counter is not 0:
            return_path = return_path.joinpath(parts_2[part_to_add])
            counter -= 1
            part_to_add += 1

        for counter, part in enumerate(parts_1):
            if (counter + 1) > down:
                return_path = return_path.joinpath(part)

        return str(return_path)
def try_to_compile_vcon(libpath):
    """ Workaround for compiling the Vcontacts library.

        The reason for using this instead of Extension directly in the setup.py script is that Extension works on
        Unix-like systems but is looking for some type of Cython/Python.h bindings when compiled on Windows (to enable
        importing the extension as a Python module), whereas the Vcontacts library is native C that is used with ctypes
        and does not need to be importable. This function is a simple hack that compiles then library upon the first
        execution of Vcontacts.
    """
    libpath = PurePath(libpath)
    op_system = platform.system()
    args = []
    args2 = None
    if op_system == "Windows":
        args = [
            'cl.exe', '/LD',
            str(libpath.joinpath('Vcontacts-v1.2_for_python_windows.c'))
        ]
        args2 = [
            'cp', 'Vcontacts-v1.2_for_python_windows.dll',
            str(libpath.joinpath('vconlib.dll'))
        ]
    elif op_system == "Darwin" or op_system == "Linux":
        args = [
            'gcc', '-shared', '-o',
            str(libpath.joinpath('vconlib.so')), '-fPIC',
            str(libpath.joinpath('Vcontacts-v1.2_for_python.c'))
        ]
    else:
        raise ValueError(
            "Unrecognized operating system: {0}".format(op_system))
    subprocess.call(args)
    if args2 is not None:
        subprocess.call(args2)
示例#8
0
def main():
    """Execute the three exploration methods on the dummy FPSelect example."""
    for data_directory in DATA_DIRECTORIES:
        data_path = PurePath(data_directory)
        logger.debug(f'Considering the data path {data_path}.')

        # Generate the fingerprint dataset
        dataset_path = data_path.joinpath(FINGERPRINT_DATASET_CSV)
        if not path.isfile(dataset_path):
            raise ValueError(f'No fingerprint dataset is at {dataset_path}.')
        dataset = FingerprintDatasetFromCSVFile(dataset_path)
        logger.info(f'Considering the dataset {dataset}.')

        # Read the average fingerprint size and instability of the attributes
        memory_result_path = data_path.joinpath(MEMORY_COST_CSV)
        memory_results = read_csv_analysis_as_dict(
            memory_result_path, dataset.candidate_attributes)
        instability_result_path = data_path.joinpath(INSTABILITY_COST_CSV)
        instability_results = read_csv_analysis_as_dict(
            instability_result_path, dataset.candidate_attributes)

        for usability_cost_weights in USABILITY_COST_WEIGHTS:
            # Generate the usability cost measure
            usability_cost_measure = MemoryInstability(memory_results,
                                                       instability_results,
                                                       usability_cost_weights)
            logger.info('Considering the usability cost measure '
                        f'{usability_cost_measure}.')

            for attacker_submissions in ATTACKER_SUBMISSIONS:
                execute_level_1(data_path, dataset, usability_cost_measure,
                                usability_cost_weights, attacker_submissions)
示例#9
0
def getThoriumPowerFraction(path, filename, cell, matnum, volume):
    mtr = McnpTallyReader()
    fission_energy_dic = defaultdict(lambda: 0)
    nuclidelist = [
        '90232', '91233', '94239', '94240', '94241', '92233', '92234', '92235',
        '92238'
    ]
    tallydic = {
        '1003': '94239',
        '1005': '94241',
        '1018': '92233',
        '1020': '92235'
    }
    atomdensitydic = {}
    for nuclide in nuclidelist:
        atomdensitydic[nuclide] = mtr.getNuclideDensity(
            PurePath.joinpath(path, filename), cell, matnum, nuclide)
    for tallynum, nuclide in tallydic.items():
        Z = float(nuclide[0:2])
        A = float(nuclide[2:])
        Q = 0.00129927 * Z**2 * A**0.5 + 33.12
        fisstally = readFmtally(PurePath.joinpath(path, filename), tallynum,
                                '-6')
        fission_energy_dic[
            nuclide] = volume * atomdensitydic[nuclide] * fisstally * Q
    tot_energy = 0
    for nuclide, fission_energy in fission_energy_dic.items():
        tot_energy += fission_energy
    print(tot_energy)
    return fission_energy_dic
示例#10
0
def get_command_list():
    optimize_Command_list = []
    queue = []
    cwd = '.'
    for root, dirs, files in os.walk(cwd):
        for name in dirs:
            queue.append(name)
    # print(queue)
    p = Path("./optimized")
    p.mkdir(parents=True, exist_ok=True)
    for item in queue:
        if item == 'optimized':
            continue
        print("Looking in folder ", item)
        new_p = Path(PurePath.joinpath(p, item))
        new_p.mkdir(parents=True, exist_ok=True)
        for root, dirs, files in os.walk(os.path.join(cwd, item)):
            for name in files:
                if name.endswith(".mp4"):
                    org_f = os.path.join(root, name)
                    new_f = PurePath.joinpath(new_p, name)
                    # print("video: "+new_f)
                    # Path.touch(new_f, exist_ok=True)
                    command = """ffmpeg -i '%s' -vcodec libx265 -crf 28 '%s'""" % (
                        org_f, new_f)
                    # print(command)
                    optimize_Command_list.append(command)
    return optimize_Command_list
示例#11
0
def test_events_with_file_open_access_nowrite():

    # Create a temporary directory to work in.
    path_to_watch = PurePath(tempfile.mkdtemp())

    # Create file to read from.
    with open(path_to_watch.joinpath("testfile"), "w") as fobj:
        fobj.write("stuff")

    # Fire up builder.assist thread.
    ctx = start_watching(str(path_to_watch))

    # Events generated from this read should be ignored.
    with open(path_to_watch.joinpath("testfile"), "r") as fobj:
        data = fobj.read()
    assert data == "stuff"

    # We need to poll to give the VFS time to trigger the event.
    tries = 0
    event_count = 0
    while tries < 30:
        sleep(0.1)
        is_event = ctx.is_event()
        if is_event == True:
            event_count += 1
        tries += 1

    assert event_count == 0
示例#12
0
def create_sub_folders(path_to_boxes, foldername_short):
    """
    To run this code :
    sys.path
    sys.path.insert(0, '/Users/ifeademolu-odeneye/Documents/GitHub/computation_hist
    /computation_hist') - replace with your file path
    import sort_pdfs
    sort_pdfs.create_sub_folders(sort_pdfs.path_to_boxes, "rockefeller")

    takes in box, foldername_short and uses django_database to:
    1. sub directory for each document - named doc1, doc2 etc
    2. a sub directory for each page in document- named pg1, pg2 etc
    :return:
    """

    box = str(models.Folder.objects.get(
        name=foldername_short).box)  #not this is a string

    root = PurePath.joinpath(path_to_boxes, box)
    if not os.path.exists(root):
        Path.mkdir(root)

    root = PurePath.joinpath(root, foldername_short)
    if not os.path.exists(root):
        Path.mkdir(root)

    for doc in models.Folder.objects.get(
            name=foldername_short).document_set.all():
        Path.mkdir(PurePath.joinpath(root, "doc_" + str(doc.id)))
        for i in range(1, doc.number_of_pages + 1):
            Path.mkdir(
                PurePath.joinpath(root, "doc_" + str(doc.id),
                                  "page_" + str(i)))
示例#13
0
    def _init_font_family(self):
        """Register the desired font with :py:mod:`reportlab`

        This ensures that `<i></i>` and `<b></b>` as cell content work well.
        """
        # Set root of fonts to the folder containing this file.
        path_to_fonts = PurePath(__file__).parent.joinpath("fonts")

        # Finally lead and register fonts with reportlab.
        registerFont(
            TTFont("normal_font", path_to_fonts.joinpath(self._font.normal)))
        registerFont(
            TTFont("bold_font", path_to_fonts.joinpath(self._font.bold)))
        registerFont(
            TTFont("italic_font", path_to_fonts.joinpath(self._font.italic)))
        registerFont(
            TTFont("bolditalic_font",
                   path_to_fonts.joinpath(self._font.bolditalic)))
        registerFontFamily(
            "custom_font",
            normal="normal_font",
            bold="bold_font",
            italic="italic_font",
            boldItalic="bolditalic_font",
        )
示例#14
0
文件: app.py 项目: slooppe/bbrecon
def key_configure():
    path = PurePath.joinpath(Path.home(), ".bbrecon")
    token_path = PurePath.joinpath(path, "token")
    Path(path).mkdir(parents=True, exist_ok=True)
    typer.echo("You can get a free API key from https://bugbountyrecon.com/")
    token = typer.prompt("Enter your API key")
    with open(token_path, "w+") as f:
        print(token, file=f)
示例#15
0
def generate_relative_mounts(pvc_param, files):
    """
    Maps a list of files as mounts, relative to the base volume mount.
    For example, given the pvc mount:
    {
        'name': 'my_pvc',
        'mountPath': '/galaxy/database/jobs',
        'subPath': 'data',
        'readOnly': False
    }

    and files: ['/galaxy/database/jobs/01/input.txt', '/galaxy/database/jobs/01/working']

    returns each file as a relative mount as follows:
    [
        {
          'name': 'my_pvc',
          'mountPath': '/galaxy/database/jobs/01/input.txt',
          'subPath': 'data/01/input.txt',
          'readOnly': False
        },
        {
          'name': 'my_pvc',
          'mountPath': '/galaxy/database/jobs/01/working',
          'subPath': 'data/01/working',
          'readOnly': False
        }
    ]

    :param pvc_param: the pvc claim dict
    :param files: a list of file or folder names
    :return: A list of volume mounts
    """
    if not pvc_param:
        return
    param_claim = parse_pvc_param_line(pvc_param)
    claim_name = param_claim['name']
    base_subpath = PurePath(param_claim.get('subPath', ""))
    base_mount = PurePath(param_claim["mountPath"])
    read_only = param_claim["readOnly"]
    volume_mounts = []
    for f in files:
        file_path = PurePath(str(f))
        if base_mount not in file_path.parents:
            # force relative directory, needed for the job working directory in particular
            file_path = base_mount.joinpath(
                file_path.relative_to("/") if file_path.is_absolute(
                ) else file_path)
        relpath = file_path.relative_to(base_mount)
        subpath = base_subpath.joinpath(relpath)
        volume_mounts.append({
            'name': claim_name,
            'mountPath': str(file_path),
            'subPath': str(subpath),
            'readOnly': read_only
        })
    return volume_mounts
def main(args):
    logger = init_logging('DEBUG')

    drf_request_list = 'linac_logger_drf_requests.txt'
    outputs_directory = Path.resolve('.')

    if len(args) > 1:
        outputs_directory = Path.resolve(args[1])

    # Download latest device request list if it doesn't exist
    # This means that the file must be deleted to get a newer version
    latest_version = get_latest_device_list_version(logger)

    # This always overwrites the file at DRF_REQUESTS_LIST
    get_latest_device_list(drf_request_list, logger)

    while True:
        # get_start_time always returns
        start_time, duration = get_start_time(outputs_directory, logger)
        end_time = start_time + duration

        structured_outputs_directory = create_structured_path(
            outputs_directory, start_time, logger)
        iso_datetime_duration = name_output_file(start_time, logger, duration)
        request_list_version = latest_version.replace('.', '_')
        output_filename = f'{iso_datetime_duration}-{request_list_version}.h5'
        temp_path_and_filename = PurePath.joinpath('.', output_filename)
        output_path_and_filename = PurePath.joinpath(
            structured_outputs_directory, output_filename)

        logger.debug('Output path and filename is: %s',
                     output_path_and_filename)
        logger.debug('Calling dpm_data.main...')
        # Being data request writing to local file
        dpm_data.main([
            '-s',
            str(start_time), '-e',
            str(end_time), '-f', drf_request_list, '-o', temp_path_and_filename
        ])

        empty_file = False
        with pd.HDFStore(temp_path_and_filename, 'r') as hdf:
            if len(hdf.keys()) == 0:
                empty_file = True
                logger.debug(('%s is an empty HDF5 file. '
                              'Removing %s and stopping data collection.'),
                             temp_path_and_filename, temp_path_and_filename)

        if empty_file:
            remove(temp_path_and_filename)
            break
        # Ensure that the folders exist
        if not Path.exists(structured_outputs_directory):
            makedirs(structured_outputs_directory, exist_ok=True)

        # Move local closed file to final destination
        shutil.move(temp_path_and_filename, output_path_and_filename)
示例#17
0
def copy_launcher_scripts_to_userdata():
    oldBasePath = PurePath(get_addon_install_path()).joinpath(
        'resources', 'scripts')
    if this.os_win:
        oldPath = PurePath.joinpath(oldBasePath, 'playnite-launcher.ahk')
        newPath = PurePath(scripts_path).joinpath('playnite-launcher.ahk')
        copy_file(oldPath, newPath)
        oldPath = PurePath.joinpath(oldBasePath, 'playnite-launcher.exe')
        newPath = PurePath(scripts_path).joinpath('playnite-launcher.exe')
        copy_file(oldPath, newPath)
示例#18
0
def CreateModule(module):
    """ Create the default terraform directory and files for a new module"""
    moduleDirectory = PurePath.joinpath(Path.cwd(), module)
    Path.mkdir(moduleDirectory)
    tfFiles = [
        "outputs.tf", "main.tf", "inputs.tf", "variables.tf", "README",
        "README.md"
    ]
    borders = ("#" + ('-' * 80))
    for tfFile in tfFiles:
        with Path.open(PurePath.joinpath(moduleDirectory, tfFile),
                       "w") as theFile:
            if tfFile == "inputs.tf":
                theFile.write("""{0}
{1}
{2}
""".format(borders,
                "# MODULE INPUTS (TF equiv of arguments, params, whatever)",
                borders))
                print("\tcreated inputs.tf...")
            elif tfFile == "main.tf":
                theFile.write("""terraform {{
  required_version = \"{0}\"
}}
{1}
{2}
{3}
""".format(TERRAFORM_VERSION_SUPPORTED, borders, "# {0}".format(module),
                borders))
                print("\tcreated main.tf...")
            elif tfFile == "variables.tf":
                theFile.write("""{0}
{1}
{2}
""".format(borders, "# LOCAL VARIABLES", borders))
                print("\tcreated variables.tf...")
            elif tfFile == "outputs.tf":
                theFile.write("""{0}
{1}
{2}
""".format(borders, "# MODULE OUTPUTS", borders))
                print("\tcreated outputs.tf...")
            elif tfFile == "README":
                theFile.write("""{0}
{1}
{2}
""".format(borders, "Module for {0}".format(module), borders))
                print("\tcreated README...")
            elif tfFile == "README.md":
                theFile.write("""{0}
{1}
>This is an autogenerated module stub.  Change me to something helpful!
""".format("## Module for {0}".format(module), "---"))
                print("\tcreated README.md...")
示例#19
0
def get_dir_content(directory):
    """
    Returns a list of all files and directories in a directory with pathes relative to the given directory.
    """
    items = []
    for dirpath, dirs, files in os.walk(str(directory)):
        relpath = PurePath(dirpath).relative_to(directory)
        for dir in dirs:
            items.append(relpath.joinpath(dir))
        for file in files:
            items.append(relpath.joinpath(file))
    return items
示例#20
0
def clearGatedFiles():
    try:
        files = os.listdir(gateddatadir)
        for file in files:
            os.remove(PurePath.joinpath(gateddatadir, file))

        heatmapFiles = os.listdir(heatmapdatadir)
        for file in heatmapFiles:
            os.remove(PurePath.joinpath(heatmapdatadir, file))

    except Exception as e:
        print(e)
示例#21
0
def get_anki_model(config):

    cfg_path = Path(config)

    if not cfg_path.exists():
        raise Exception(
            "Anki model configuration does not exist: {cfg_path}".format(
                cfg_path=cfg_path))

    with cfg_path.open(mode="r", encoding="utf-8") as f:
        cfg = json.load(f)

    model_id = cfg['id']
    name = cfg['name']
    fields = [{'name': f} for f in cfg['fields']]
    model_type = Model.CLOZE if cfg['type'] == 'cloze' else Model.FRONT_BACK

    stylesheet = Path(PurePath.joinpath(cfg_path.parent, cfg['styles']))
    if not stylesheet.exists():
        raise Exception(
            "Anki model stylesheet does not exist: {stylesheet}".format(
                stylesheet=stylesheet))

    with stylesheet.open(mode="r", encoding="utf-8") as f:
        css = f.read()

    templates = []
    for t in cfg['templates']:
        n = t['name']

        q = Path(PurePath.joinpath(cfg_path.parent, t['qfmt']))
        if not q.exists():
            raise Exception(
                "Missing qfmt template in Anki model '{name}', card '{card}': {qfmt}"
                .format(name=name, card=n, qfmt=q))
        with q.open(mode="r", encoding="utf-8") as f:
            qfmt = f.read()

        a = Path(PurePath.joinpath(cfg_path.parent, t['afmt']))
        if not a.exists():
            raise Exception(
                "Missing afmt template in Anki model '{name}', card '{card}': {afmt}"
                .format(name=name, card=n, afmt=a))
        with a.open(mode="r", encoding="utf-8") as f:
            afmt = f.read()
        templates.append({'name': n, 'qfmt': qfmt, 'afmt': afmt})

    return Model(model_id=model_id,
                 name=name,
                 fields=fields,
                 templates=templates,
                 css=css,
                 model_type=model_type)
示例#22
0
def test_delete_script(tmp_path: pathlib.PurePath):

    # Create paths
    for dataset_dir in [
            'extracted',  # DownloadManager directories should be preserved.
            'dataset/config/1.0.0',
            'my_dataset/other_dir',
            'my_dataset/1.0.0/other',
            'my_dataset/1.1.0',
            'my_dataset/1.2.0/diff',
            'my_dataset/1.2.0/other',
            'my_dataset/1.3.0',
            'my_other_dataset/config/1.2.0',
            'my_other_dataset/config/1.3.0',
            'my_other_dataset/other_config/1.2.0',
            'my_other_dataset/other_config/1.3.0',
            'my_other_dataset/yet_other_config',
            'my_other_dataset/config_deprecated/1.2.0',
            'old_dataset',
    ]:
        tmp_path.joinpath(dataset_dir).mkdir(parents=True)

    dirs_to_keep, dirs_to_delete = delete_old_versions._get_extra_dirs(
        data_dir=tmp_path,
        current_full_names=[
            'non_generated_dataset0/1.0.0',
            'non_generated_dataset1/config/1.0.0',
            'dataset/config/1.0.0',
            'my_dataset/1.2.0',
            'my_dataset/1.3.0',
            'my_other_dataset/config/1.3.0',
            'my_other_dataset/other_config/1.3.0',
            'another_dataset/config/1.2.0',
            'another_dataset/other_config/1.1.0',
        ],
    )
    assert dirs_to_keep == _norm_path(tmp_path, [
        'dataset/config/1.0.0',
        'my_dataset/1.2.0',
        'my_dataset/1.3.0',
        'my_other_dataset/config/1.3.0',
        'my_other_dataset/other_config/1.3.0',
    ])
    assert dirs_to_delete == _norm_path(tmp_path, [
        'my_dataset/1.0.0',
        'my_dataset/1.1.0',
        'my_dataset/other_dir',
        'my_other_dataset/config/1.2.0',
        'my_other_dataset/config_deprecated',
        'my_other_dataset/other_config/1.2.0',
        'my_other_dataset/yet_other_config',
        'old_dataset',
    ])
示例#23
0
def test_path(
    path_ref: pathlib.PurePath,
    force: bool = True,
    haz_bin: str = "HAZ",
    root_ref: str = "",
    root_test: str = "",
    rtol: float = 1e-3,
) -> bool:

    print(path_ref)
    path_test = pathlib.Path(root_test, path_ref.relative_to(root_ref))
    if not path_test.exists() or force:
        try:
            shutil.rmtree(path_test)
            # Wait for my slow computer :-/
            time.sleep(1)
        except FileNotFoundError:
            pass

        # Copy files over
        shutil.copytree(path_ref.joinpath("Input"), path_test)
        # Run HAZ and track the duration
        start = datetime.datetime.now()
        run_haz(path_test, haz_bin)
        time_diff = datetime.datetime.now() - start

        print("Calculation time: {} {}".format(path_ref.relative_to(root_ref),
                                               time_diff))

    ok = True
    for fpath_test in path_test.iterdir():
        ext = fpath_test.suffix
        fpath_ref = path_ref.joinpath("Output", fpath_test.name)

        if not fpath_ref.exists():
            continue

        if ext == ".out3":
            expected = io_tools.read_out3(str(fpath_ref))
            actual = io_tools.read_out3(str(fpath_test))
        elif ext == ".out4":
            expected = io_tools.read_out4(str(fpath_ref))
            actual = io_tools.read_out4(str(fpath_test))
        else:
            continue

        # Check for errors
        errors = check_value(actual, expected, rtol, atol=1e-08)
        ok &= not errors
        if errors:
            print("Errors in: %s" % fpath_test)
            print_errors("%s: " % fpath_test, errors)
    return ok
示例#24
0
def main():
    """ Main function
    1. Get all list.json file from himalaya_download_dir
    2. Update and rename each file in album_path
    3. Rename album_path
    :return:
    """

    logging.basicConfig(format='[%(asctime)s] [%(levelname)s]: %(message)s',
                        datefmt='%m/%d/%Y %I:%M:%S %p',
                        level=logging.INFO,
                        handlers=[logging.FileHandler('logfile.log', 'w', 'utf-8')])

    himalaya_download_dir, target_dir, keep_original, verbose = load_config()

    sys.stdout = open(os.devnull, mode='w', encoding='utf-8') if not verbose else sys.stdout

    # 1. Get all list.json file from himalaya_download_dir
    json_file_list = get_json_list(himalaya_download_dir)
    os.makedirs(target_dir, exist_ok=True)
    for jfile in json_file_list:
        tracks = parse_list_json(jfile)
        if not tracks:
            continue
        album_path = PurePath(himalaya_download_dir).joinpath(str(list(tracks.values())[0].get('albumId')))
        if not Path(album_path).is_dir():
            logger.warning('Source folder {0} does not exist.'.format(album_path))
            continue
        album_path_tgt = PurePath(target_dir).joinpath(str(list(tracks.values())[0].get('album')))
        os.makedirs(album_path_tgt, exist_ok=True)
        # 2. Copy/Move each file in album_path to album_path_tgt
        for file_id3 in tracks.values():
            files = os.listdir(album_path)
            filename = str(file_id3.get('filename'))
            try:
                src_file = [fn for fn in files if PurePath(fn).stem == filename][0]
            except IndexError:
                msg = 'File {0} does not exist, skip it.'.format(filename)
                logger.warning(msg)
                continue
            src_fp = album_path.joinpath(src_file)
            tgt_fp = album_path_tgt.joinpath(src_file)
            copy_file_to_tgt(src_fp, tgt_fp)
            # 3. Update ID3/MP4 for tgt_fp, and rename it accordingly
            update_id3(tgt_fp, file_id3)
            rename_with_id3(tgt_fp)
        # 4. Delete original files
        if not keep_original:
            shutil.rmtree(album_path)
示例#25
0
def main(args):

    print("Joining numpy arrays")
    # npfiles = glob.glob(args.glob)
    # npfiles.sort()
    combined_data = None
    # print(args.files)
    for file in args.files:
        data = np.load(file)
        if combined_data is not None:
            combined_data = np.vstack((combined_data, data))
        else:
            combined_data = data
    # Make modifications
    if args.scale:
        split = args.scale.split(',')
        assert len(split) == combined_data.shape[1]
        temp = combined_data.copy()
        for axis, mult_str in enumerate(split):
            mult_float = float(mult_str)
            combined_data[:, axis] = temp[:, axis] * mult_float

    if args.axis:
        split = args.axis.split(',')
        assert len(split) == combined_data.shape[1]
        temp = combined_data.copy()
        for orig_axis, axis_str in enumerate(split):
            axis_num = int(axis_str)
            combined_data[:, orig_axis] = temp[:, axis_num]

    print("Final size is : {}".format(combined_data.shape))
    if args.las:
        file = PurePath(args.out)
        new_filename = str(PurePath.joinpath(file.parent, file.stem))
        new_filename += '.las'
        make_las_file(combined_data, new_filename)
    elif args.csv:
        file = PurePath(args.out)
        new_filename = str(PurePath.joinpath(file.parent, file.stem))
        new_filename += '.csv'
        np.savetxt(new_filename,
                   combined_data,
                   fmt='%.4f',
                   delimiter=',',
                   header=args.csv_header,
                   comments='')
    else:
        np.save(args.out, combined_data)
示例#26
0
def convert_uxf_to_other_format(filename, convertion_type):
    '''
  Uses the umlet command line to convert an umlet drawing into another format.

  Example:
    # to write './_static/n_ergotic_mongol_1.pdf'
    convert_uxf_to_other_format(
      './_static/n_ergotic_mongol_1.uxf', 'pdf')

  '''
    # umlet throws X11 java errors from Linux (hours of wasted time)
    # so I use the windows version instead
    cmd_string = \
      r"cmd.exe /C '{} -action=convert -format={} -filename={}'". \
        format(Windows_Path_To_UMLet, convertion_type, filename)

    p = subprocess.Popen(cmd_string,
                         stdout=subprocess.PIPE,
                         stdin=subprocess.PIPE,
                         shell=True)
    p.communicate()
    p.wait()
    path = PurePath.joinpath(PurePosixPath(os.getcwd()),
                             PurePosixPath(filename))

    basename = Path(filename).resolve().stem
    basepath = str(path.parents[0])
    try:
        # old version of umlet mislabel files, here we name them what they should be
        # named
        shutil.move(
            basepath + '/' + basename + ".uxf.{}".format(convertion_type),
            basepath + '/' + basename + '.{}'.format(convertion_type))
    except:
        pass
示例#27
0
def main():
    script_dir = PurePath(os.path.realpath(__file__)).parent

    # By default search for a configuration file in the same directory as this
    # script.
    default_config_path = Path(script_dir.joinpath("build-configurations.yml"))

    parser = argparse.ArgumentParser(description="Run a CI build")
    parser.add_argument("build", help="The name of the build to run")
    parser.add_argument(
        "--config",
        "-c",
        help="Path to the builds configuration file (default to {})".format(
            str(default_config_path)))

    args, unknown_args = parser.parse_known_args()

    # Check the configuration file exists
    config_path = Path(args.config) if args.config else default_config_path
    build_configuration = BuildConfiguration(script_dir, config_path,
                                             args.build)

    if is_running_under_teamcity():
        build = TeamcityBuild(build_configuration)
    else:
        build = UserBuild(build_configuration)

    sys.exit(build.run(unknown_args)[0])
示例#28
0
def get_output_file(mkdir=True):
    now = time.localtime()
    directory = PurePath.joinpath(args.basedir, time.strftime("%Y-%m-%d", now))
    if mkdir:
        Path(directory).mkdir(parents=True, exist_ok=True)
    minutes = now.tm_hour * 60 + now.tm_min
    return f'{directory}/{time.strftime("%Y%m%d", now)}_{str(minutes).rjust(5, "0")}.jpg'
示例#29
0
    def build_demo_image(self):
        """
        Build an image for the demo container. Use the dockerfile located at this folder.

        The command to build a container is:
        podman build -f Dockerfile -t httpdemo
        """

        command_text = "podman build -f $dockerfile -t httpdemo"
        dockerfile_dir = str(
            PurePath.joinpath(Path(__file__).parent, "Dockerfile"))
        command_text = command_text.replace("$dockerfile", dockerfile_dir)

        try:
            p = subprocess.run(
                command_text,
                text=True,
                shell=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                check=True,
            )
            print(p.stdout)

        except Exception as e:
            print(e)
示例#30
0


# 純粋パスを扱う(PurePath)-------------------------------------------------
from pathlib import PurePath
p = PurePath('/hoge/fuga/piyo.txt')
pprint(p.drive)
pprint(p.root)
pprint(p.anchor)
pprint(list(p.parents))
pprint(p.parent)
pprint(p.name)
pprint(p.suffix)
pprint(p.stem)
pprint(p.is_absolute())
pprint(p.joinpath('foo', 'bar', 'baz'))
pprint(p.match('piyo.*'))


# 具象パスを扱う(Path)--------------------------------------------------------
from pathlib import Path
p = Path.cwd() / 'newfile.txt'
pprint(p)
pprint(p.exists())
f = p.open('w+')
pprint(p.exists())
pprint(p.resolve())



# ディレクトリ探索--------------------------------------------------------
示例#31
0
# -- time and locale --

TIME_ZONE = 'Asia/Jerusalem'

USE_TZ = True

LANGUAGE_CODE = 'ru-RU'

USE_I18N = USE_L10N = False

# -- paths and urls --

OUR_ROOT = PurePath(__file__).parents[2]

MEDIA_ROOT = OUR_ROOT.joinpath('media').as_posix()

MEDIA_URL = '/media/'

STATIC_ROOT = OUR_ROOT.joinpath('_pub').as_posix()

STATIC_URL = '/pub/'

STATICFILES_DIRS = (OUR_ROOT.joinpath('pub').as_posix(),)

TEMPLATE_DIRS = (OUR_ROOT.joinpath('templates').as_posix(),)

ROOT_URLCONF = 'rarjpeg.urls'

# -- auth and other things --
示例#32
-1
    def generate_client_library(self):
        cur_dir_path = PurePath(os.getcwd())

        if not self.args.protodir:
            client_libraries_base_dir_path = cur_dir_path.joinpath("client_libraries")
            if not os.path.exists(client_libraries_base_dir_path):
                os.makedirs(client_libraries_base_dir_path)
        else:
            if os.path.isabs(self.args.protodir):
                client_libraries_base_dir_path = PurePath(self.args.protodir)
            else: 
                client_libraries_base_dir_path = cur_dir_path.joinpath(self.args.protodir)

            if not os.path.isdir(client_libraries_base_dir_path):
                self._error("directory {} does not exist. Please make sure that the specified path exists".format(client_libraries_base_dir_path))

        # Create service client libraries path
        library_language = self.args.language
        library_org_id = self.args.org_id
        library_service_id = self.args.service_id

        library_dir_path = client_libraries_base_dir_path.joinpath(library_language, get_contract_address(self, "Registry"), library_org_id, library_service_id)

        metadata = self._get_service_metadata_from_registry()
        model_ipfs_hash = metadata["model_ipfs_hash"]

        with TemporaryDirectory() as temp_dir: 
            temp_dir_path = PurePath(temp_dir)
            proto_temp_dir_path = temp_dir_path.joinpath(library_language, library_org_id, library_service_id)
            safe_extract_proto_from_ipfs(self._get_ipfs_client(), model_ipfs_hash, proto_temp_dir_path)

        # Compile proto files
            compile_proto(Path(proto_temp_dir_path), library_dir_path)

        self._printout('client libraries for service with id "{}" in org with id "{}" generated at {}'.format(library_service_id, library_org_id, library_dir_path))
示例#33
-1
def convert_uxf_to_other_format(filename, convertion_type):
  '''
  Uses the umlet command line to convert an umlet drawing into another format.

  Example:
    # to write './_static/n_ergotic_mongol_1.pdf'
    convert_uxf_to_other_format(
      './_static/n_ergotic_mongol_1.uxf', 'pdf')

  '''
  # umlet throws X11 java errors from Linux (hours of wasted time)
  # so I use the windows version instead
  cmd_string = \
    r"cmd.exe /C '{} -action=convert -format={} -filename={}'". \
      format(Windows_Path_To_UMLet, convertion_type, filename)

  p = subprocess.Popen(cmd_string,
                        stdout=subprocess.PIPE,
                        stdin=subprocess.PIPE,
                        shell=True)
  p.communicate()
  p.wait()
  path = PurePath.joinpath(
      PurePosixPath(os.getcwd()),
      PurePosixPath(filename))

  basename = Path(filename).resolve().stem
  basepath = str(path.parents[0])
  shutil.move(
    basepath + '/' + basename + ".uxf.{}".format(convertion_type),
    basepath + '/' + basename + '.{}'.format(convertion_type)
  )
示例#34
-1
文件: config.py 项目: bladams/keg
    def config_file_paths(self):
        dirs = self.dirs

        config_fname = '{}-config.py'.format(self.app_import_name)

        dpaths = []
        if appdirs.system != 'win32':
            dpaths.extend(dirs.site_config_dir.split(':'))
            dpaths.append('/etc/{}'.format(self.app_import_name))
            dpaths.append('/etc')
        else:
            system_drive = PurePath(dirs.site_config_dir).drive
            system_etc_dir = PurePath(system_drive, '/', 'etc')
            dpaths.extend((
                dirs.site_config_dir,
                system_etc_dir.joinpath(self.app_import_name).__str__(),
                system_etc_dir.__str__()
            ))
        dpaths.append(dirs.user_config_dir)
        dpaths.append(osp.dirname(self.app_root_path))

        fpaths = [osp.join(dpath, config_fname) for dpath in dpaths]

        return fpaths
示例#35
-1
 def getMirrorPath(self, filename):
     '''Get path to mirror file given for a filename.'''
     return PurePath.joinpath(self.mirrorDir, self.getRelativePath(filename.absolute()))