Пример #1
0
    def save_yaml(self):
        """Saves yaml

        Raises:
            PluginError: Error
        """
        top_fields = ['title', 'url']

        def sort_key(item: typing.Tuple[str, typing.Any]) -> typing.Tuple[int, str]:
            # Sort important fields first, then the rest of the fields alphabetically
            try:
                return top_fields.index(item[0]), ''
            except ValueError:
                return len(top_fields), item[0]

        out = []
        for entry in self.entries:
            filtered_entry = json.coerce(self.filter_keys(entry))
            out.append(OrderedDict(sorted(filtered_entry.items(), key=sort_key)))

        try:
            # By default we try to write strings natively to the file, for nicer manual reading/writing
            out_bytes = dump_yaml(
                out, default_flow_style=False, encoding=self.encoding, allow_unicode=True
            )
        except UnicodeEncodeError:
            # If strings are not representable in the specified file encoding, let yaml use backslash escapes
            out_bytes = dump_yaml(out, default_flow_style=False, encoding=self.encoding)

        try:
            with open(self.filename, 'wb') as outfile:
                outfile.write(out_bytes)
        except Exception as e:
            raise PluginError(f'Error writhing data to `{self.filename}`: {e}')
Пример #2
0
def write_yaml(metadata, yml_path):

    # open YAML file for writing
    with open(yml_path, 'w') as yml_file:

        # write metadata to YAML file
        dump_yaml(metadata,
                  yml_file,
                  default_flow_style=False,
                  encoding='utf-8',
                  allow_unicode=True)
Пример #3
0
    def to_yaml(self, destination):
        """
        Save a dictionnary into a YAML file.

        Argument:
            - destination: str
                A path to a file where we're going to write the converted dict into a JSON format.
        """

        with open(destination, "w") as file:
            dump_yaml(self.main_dictionnary, file, indent=4)
Пример #4
0
 def safe_config(self, config_to_save):
     if config_to_save == 'network':
         config = self.config
         file_config = self.network_file
     elif config_to_save == 'replication':
         config = self.replication_config
         file_config = self.replication_network_file
     else:
         return False
     with file_config.open('w', encoding='utf-8') as stream:
         dump_yaml(config, stream)
Пример #5
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--fields',
        type=argparse.FileType('r'),
        help='Two-column TSV: Field name and description')
    args = parser.parse_args()

    field_list = []
    for row in csv.reader(args.fields, dialect='excel-tab'):
        if len(row) == 2:
            field_list.append({
                'name': row[0],
                'description': row[1]
            })
    field_list[0] = {
        # Rebuild dict, so 'heading' is first.
        'heading': 'Level 2',
        **field_list[0]
    }
    level_1_overrides = [
        {
            'name': name,
            'constraints': {
                'enum': ['TODO']
            }
        }
        for name in ['assay_category', 'assay_type', 'analyte_class']
    ]
    print(dump_yaml({
        'doc_url': 'TODO',
        'fields': level_1_overrides + field_list
    }, sort_keys=False))

    return 0
Пример #6
0
    def hack_config_for_multi_region(self, ssh_options, seeds):
        instances = self.get_instances()
        downloaded_file = os.path.join("/tmp", "cassandra.yaml.downloaded")
        for instance in instances:
            with settings(host_string=instance.public_dns_name, warn_only=True):
                # download config file
                print "downloading config from %s" % instance.public_dns_name
                get("/etc/cassandra/cassandra.yaml", downloaded_file)

                print "modifying config from %s" % instance.public_dns_name
                yaml = parse_yaml(urllib.urlopen(downloaded_file))
                yaml['seed_provider'][0]['parameters'][0]['seeds'] = seeds
                yaml['listen_address'] = str(instance.public_dns_name)
                yaml['rpc_address'] = str(instance.public_dns_name)
                yaml['broadcast_address'] = socket.gethostbyname(str(instance.public_dns_name))
                yaml['endpoint_snitch'] = 'org.apache.cassandra.locator.Ec2MultiRegionSnitch'
                
                print "saving config from %s" % instance.public_dns_name
                fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_', text=True)
                os.write(fd, dump_yaml(yaml))
                os.close(fd)

                #upload config file
                print "uploading new config to %s" % instance.public_dns_name
                put(temp_file, "/etc/cassandra/cassandra.yaml", use_sudo=use_sudo())

                os.unlink(temp_file)
                os.unlink(downloaded_file)
Пример #7
0
    def _save_yaml(self, config):
        console_message = 'Path to main config ? [{:}] '.format(self.PlatformSetup.MAIN_CONFIG_PATH)
        main_config_path = raw_input(console_message) or self.PlatformSetup.MAIN_CONFIG_PATH
        self._create_directory(dirname(main_config_path))

        with open(main_config_path, 'w') as fd:
            fd.write(dump_yaml(config, default_flow_style=False, indent=4, line_break='\n\n'))
Пример #8
0
def _validation_error_to_string(error, indent):
    schema_string = ''.join([
        f'\n{indent}{line}'
        for line in dump_yaml(error.schema[error.validator]).split('\n')
    ])
    fail_message = f'''
fails this "{error.validator}" check:
{schema_string}
    '''

    error_type = type(error.instance)

    if error_type == str:
        return f'''This string:
{indent}{error.instance}{fail_message}
        '''

    if error_type == dict:
        return f'''This item:
{_to_dir_listing([error.instance], indent)}{fail_message}
        '''

    if error_type == list:
        return f'''This directory:
{_to_dir_listing(error.instance, indent)}{fail_message}
        '''

    raise Exception(f'Unrecognized type "{error_type}"')
Пример #9
0
    def hack_config_for_multi_region(self, ssh_options, seeds):
        instances = self.get_instances()
        downloaded_file = "cassandra.yaml.downloaded"
        for instance in instances:

            # download config file
            print "downloading config from %s" % instance.public_dns_name
            scp_command = 'scp %s root@%s:/usr/local/apache-cassandra/conf/cassandra.yaml %s' % (xstr(ssh_options), instance.public_dns_name, downloaded_file)
            subprocess.call(scp_command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)

            print "modifying config from %s" % instance.public_dns_name
            yaml = parse_yaml(urllib.urlopen(downloaded_file))
            yaml['seed_provider'][0]['parameters'][0]['seeds'] = seeds
            yaml['listen_address'] = str(instance.public_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)
            yaml['broadcast_address'] = socket.gethostbyname(str(instance.public_dns_name))
            yaml['endpoint_snitch'] = 'org.apache.cassandra.locator.Ec2MultiRegionSnitch'
            
            print "saving config from %s" % instance.public_dns_name
            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_', text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)

            #upload config file
            print "uploading new config to %s" % instance.public_dns_name
            scp_command = 'scp %s %s root@%s:/usr/local/apache-cassandra/conf/cassandra.yaml' % (xstr(ssh_options), temp_file, instance.public_dns_name)
            subprocess.check_call(scp_command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)

            os.unlink(temp_file)
            os.unlink(downloaded_file)
Пример #10
0
    def _modify_config_file(self,
                            instance,
                            config_file,
                            seed_ips,
                            token,
                            set_tokens=True,
                            auto_bootstrap=False):
        # YAML (0.7.x+)
        if config_file.endswith(".yaml"):
            remote_file = "cassandra.yaml"

            yaml = parse_yaml(urllib.urlopen(config_file))
            yaml['seed_provider'][0]['parameters'][0]['seeds'] = ",".join(
                seed_ips)
            if set_tokens is True:
                yaml['initial_token'] = token
            if auto_bootstrap:
                yaml['auto_bootstrap'] = 'true'
            yaml['data_file_directories'] = ['/mnt/cassandra-data']
            yaml['commitlog_directory'] = '/mnt/cassandra-logs'
            yaml['listen_address'] = str(instance.public_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)

            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_',
                                             text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)
        else:
            raise Exception(
                "Configuration file must be yaml (implies Cassandra 0.7.x or greater)"
            )

        return temp_file, remote_file
Пример #11
0
    def save_game(self):
        """
        saves the game to a file in the ./saves/ directory
        files are named DDMMYY-HR_MIN_SEC.yaml (24 hours for the HR)
        """
        # Generate the save filename from the date / time
        filename = "./saves/" + strftime("%d%m%y-%H_%M_%S") + ".yaml"
        # generate the data to save from the data the game has about the current game :)
        save_data = {
            "puck": {
                "pos": self.puck.pos,
                "vel": self.puck.vel,
                "color": self.puck.color
            },
            "player1": {
                "pos": self.player1.pos,
                "score": self.player1.score,
                "color": self.player1.color
            },
            "player2": {
                "pos": self.player2.pos,
                "score": self.player2.score,
                "color": self.player2.color
            },
            "fullscreen": self.window.fullscreen,
            "show_fps": self.show_fps,
            "options": self.options
        }
        # Save the data to the generated file
        with open(filename, "w") as file_:
            file_.write(dump_yaml(save_data))

        # set the current save variable
        self.current_save = filename.split("/")[2]
Пример #12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('type', help='What type to generate')
    parser.add_argument('target',
                        type=dir_path,
                        help='Directory to write output to')
    args = parser.parse_args()

    schema_versions = dict_schema_versions()
    versions = sorted(schema_versions[args.type])
    assert versions, f'No versions for {args.type}'
    max_version = max(versions)

    is_assay = get_is_assay(args.type)
    if is_assay:
        table_schemas = {v: get_table_schema(args.type, v) for v in versions}
        directory_schema = get_directory_schema(args.type)
    else:
        table_schemas = {v: get_other_schema(args.type, v) for v in versions}
        directory_schema = {}

    # README.md:
    with open(Path(args.target) / 'README.md', 'w') as f:
        url = f'https://hubmapconsortium.github.io/ingest-validation-tools/{args.type}/'
        f.write(f'Moved to [github pages]({url}).')

    # index.md:
    with open(Path(args.target) / 'index.md', 'w') as f:
        f.write(
            generate_readme_md(table_schemas,
                               directory_schema,
                               args.type,
                               is_assay=is_assay))

    # YAML:
    for v in versions:
        schema = table_schemas[v]
        first_field = schema['fields'][0]
        if first_field['name'] == 'version':
            assert first_field['constraints']['enum'] == [v], \
                f'Wrong version constraint in {args.type}-v{v}.yaml'
        assert schema['fields'][0]
        with open(Path(args.target) / f'v{v}.yaml', 'w') as f:
            f.write('# Generated YAML: PRs should not start here!\n' +
                    dump_yaml(schema))

    # Data entry templates:
    with open(
            Path(args.target) / get_tsv_name(args.type, is_assay=is_assay),
            'w') as f:
        max_schema = table_schemas[max_version]
        f.write(generate_template_tsv(max_schema))
    create_xlsx(max_schema,
                Path(args.target) /
                get_xlsx_name(args.type, is_assay=is_assay),
                idempotent=True,
                sheet_name='Export as TSV')
def main():
    mapping = {}
    for assay_type in list_types():
        try:
            schema = get_table_schema(assay_type)
        except Exception as e:
            print(f'Processing: {assay_type}\n{e}', file=sys.stderr)
            return 1
        _add_field_descriptions_to_mapping(schema['fields'], mapping)
    sample_schema = get_sample_schema()
    _add_field_descriptions_to_mapping(sample_schema['fields'], mapping)
    print(dump_yaml(mapping))
    return 0
Пример #14
0
    def to_yaml(self, destination, flow_style=False):
        """
        Save a dictionnary into a YAML file.

        :param str destination:
            A path to a file where we're going to write the
            converted dict into a JSON format.
        """

        with open(destination, "w") as file:
            # We open the file we are going to write.
            # Note: We always overwrite the destination.

            # We save the current dictionnary into a json format.
            dump_yaml(
                self.main_dictionnary,
                file,
                encoding="utf-8",
                allow_unicode=True,
                indent=4,
                default_flow_style=flow_style,
            )
Пример #15
0
def main():
    parser = argparse.ArgumentParser(
        description='Outputs a YAML dict listing fields and their definitions.'
    )
    parser.parse_args()

    mapping = {}
    for schema_version in list_schema_versions():
        schema_name = schema_version.schema_name
        get_schema = get_table_schema if get_is_assay(
            schema_name) else get_other_schema
        schema = get_schema(schema_version.schema_name, schema_version.version)
        _add_field_descriptions_to_mapping(schema['fields'], mapping)
    print(dump_yaml(mapping))
    return 0
Пример #16
0
    def _configure_cassandra_instance(self, instance, seed_ips, token, set_tokens=True, auto_bootstrap=False):
        self.logger.debug("Configuring %s..." % instance.id)
        yaml_file = os.path.join("/tmp", "cassandra.yaml")
        cassandra_home = self.get_cassandra_home(instance)

        self.logger.debug("Local cassandra.yaml file: %s" % yaml_file)
        with settings(host_string=instance.public_dns_name, warn_only=True): #, hide("everything"):

            cassandra_data = os.path.join("/mnt", "cassandra-data")
            cassandra_logs = os.path.join("/mnt", "cassandra-logs")

            # create directories and log files
            exec_command("mkdir -p %s" % cassandra_data)
            exec_command("mkdir -p %s" % cassandra_logs)

            # set permissions
            exec_command("chown -R cassandra:cassandra %s %s" % (cassandra_data, cassandra_logs))

            try:
                # get yaml file
                get(os.path.join(cassandra_home, "conf", "cassandra.yaml"), "/tmp")

                # modify it
                f = open(yaml_file)
                yaml = parse_yaml(f)
                f.close()

                yaml['seed_provider'][0]['parameters'][0]['seeds'] = ",".join(seed_ips)
                if set_tokens is True :
                    yaml['initial_token'] = token
                if auto_bootstrap :
                    yaml['auto_bootstrap'] = 'true'
                yaml['data_file_directories'] = [cassandra_data]
                yaml['commitlog_directory'] = cassandra_logs
                yaml['listen_address'] = str(instance.private_dns_name)
                yaml['rpc_address'] = str(instance.public_dns_name)

                f = open(yaml_file, "w")
                f.write(dump_yaml(yaml))
                f.close()

                # put modified yaml file
                put(yaml_file, os.path.join(cassandra_home, "conf", "cassandra.yaml"), use_sudo=use_sudo())
            except SystemExit, e:
                raise
                pass
Пример #17
0
def main():
    parser = argparse.ArgumentParser(
        description='Translate a directory of TSVs into YAML.')
    parser.add_argument('--definitions',
                        type=_dir_path,
                        required=True,
                        help='Definitions directory, containing TSVs')

    args = parser.parse_args()
    path = Path(args.definitions)

    output = {}
    output['fields'] = read_fields(path / 'fields.tsv')
    output['enums'] = read_enums(path / 'enums')

    print(dump_yaml(output))
    return 0
Пример #18
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('type', help='What type to generate')
    parser.add_argument('target',
                        type=dir_path,
                        help='Directory to write output to')
    args = parser.parse_args()

    schema_versions = dict_schema_versions()
    versions = sorted(schema_versions[args.type])
    max_version = max(versions)

    is_assay = get_is_assay(args.type)
    if is_assay:
        table_schemas = {v: get_table_schema(args.type, v) for v in versions}
        directory_schema = get_directory_schema(args.type)
    else:
        table_schemas = {v: get_other_schema(args.type, v) for v in versions}
        directory_schema = {}

    # README:
    with open(Path(args.target) / 'README.md', 'w') as f:
        f.write(
            generate_readme_md(table_schemas,
                               directory_schema,
                               args.type,
                               is_assay=is_assay))

    # YAML:
    for v in versions:
        with open(Path(args.target) / f'v{v}.yaml', 'w') as f:
            f.write('# Generated YAML: PRs should not start here!\n' +
                    dump_yaml(table_schemas[v]))

    # Data entry templates:
    with open(
            Path(args.target) / get_tsv_name(args.type, is_assay=is_assay),
            'w') as f:
        max_schema = table_schemas[max_version]
        f.write(generate_template_tsv(max_schema))
    create_xlsx(max_schema,
                Path(args.target) /
                get_xlsx_name(args.type, is_assay=is_assay),
                idempotent=True,
                sheet_name='Export as TSV')
Пример #19
0
 def apply_options(self):
     """
     Applies options from either the save or the temp game options file
     """
     options_menu = self.menus["options"]
     # Sets the options variable to a new Options instance
     self.options = Options(player1_color=options_menu.get_element_by_id(
         "player1_color").get_color(),
                            player2_color=options_menu.get_element_by_id(
                                "player2_color").get_color(),
                            puck_color=options_menu.get_element_by_id(
                                "puck_color").get_color(),
                            difficulty=options_menu.get_element_by_id(
                                "difficulty", "slider").get_value(),
                            is_fullscreen=self.window.fullscreen)
     # Applies the new options
     self.options.apply_settings(self)
     with open("./resources/.temp_options.yaml", "w") as file_:
         # Writes the new options to the temp file
         file_.write(dump_yaml({"options": self.options}))
Пример #20
0
def main():
    parser = argparse.ArgumentParser(
        description='Translate definitions as YAML into JSON Schemas.')
    parser.add_argument('--definitions',
                        type=argparse.FileType('r'),
                        required=True,
                        help='Definitions YAML')
    parser.add_argument('--schemas',
                        type=_dir_path,
                        required=True,
                        help='Output directory for JSON Schema')

    args = parser.parse_args()
    definitions = load_yaml(args.definitions.read())

    for entity_type in ['donor', 'sample', 'dataset', 'collection']:
        path = args.schemas / f'{entity_type}.schema.yaml'
        path.write_text(dump_yaml(make_schema(entity_type, definitions)))

    return 0
Пример #21
0
    def toggle_fullscreen(self):
        """
        toggles the fullscreen-ness of the game, obviously
        """
        # save the current options to the temp options file
        self.apply_options()

        # open the temporary file for the player, puck positions
        with open("./resources/.temp.yaml", "w") as file_:
            file_.write(
                dump_yaml({
                    # correct height for the players
                    "player1":
                    self.player1.pos.y / self.window.height,
                    "player2":
                    self.player2.pos.y / self.window.height,
                    # rationalise the position
                    "puck":
                    Vector2D(x=self.puck.pos.x / self.window.width,
                             y=self.puck.pos.y / self.window.height)
                }))
        # toggle the window's fullscreen attribute
        self.window.set_fullscreen(not self.window.fullscreen)
        # reload the menus
        self.load_menus()
        # reload the options
        self.load_options()
        # reset the players (fix the x pos)
        [player.reset() for player in self.players]

        # open the puck, player temp file
        with open("./resources/.temp.yaml", "r") as file_:
            # load the data from yaml
            yaml_data = load_yaml(file_.read())
            # set the player1, 2 y positions / derationalise them
            self.player1.pos.y = yaml_data["player1"] * self.window.height
            self.player2.pos.y = yaml_data["player2"] * self.window.height

            # set the puck pos, derationalise
            self.puck.pos = yaml_data["puck"] * (self.window.width,
                                                 self.window.height)
Пример #22
0
    def hack_config_for_multi_region(self, ssh_options, seeds):
        instances = self.get_instances()
        downloaded_file = "cassandra.yaml.downloaded"
        for instance in instances:

            # download config file
            print "downloading config from %s" % instance.public_dns_name
            scp_command = 'scp %s root@%s:/usr/local/apache-cassandra/conf/cassandra.yaml %s' % (
                xstr(ssh_options), instance.public_dns_name, downloaded_file)
            subprocess.call(scp_command,
                            shell=True,
                            stderr=subprocess.PIPE,
                            stdout=subprocess.PIPE)

            print "modifying config from %s" % instance.public_dns_name
            yaml = parse_yaml(urllib.urlopen(downloaded_file))
            yaml['seed_provider'][0]['parameters'][0]['seeds'] = seeds
            yaml['listen_address'] = str(instance.public_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)
            yaml['broadcast_address'] = socket.gethostbyname(
                str(instance.public_dns_name))
            yaml[
                'endpoint_snitch'] = 'org.apache.cassandra.locator.Ec2MultiRegionSnitch'

            print "saving config from %s" % instance.public_dns_name
            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_',
                                             text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)

            #upload config file
            print "uploading new config to %s" % instance.public_dns_name
            scp_command = 'scp %s %s root@%s:/usr/local/apache-cassandra/conf/cassandra.yaml' % (
                xstr(ssh_options), temp_file, instance.public_dns_name)
            subprocess.check_call(scp_command,
                                  shell=True,
                                  stderr=subprocess.PIPE,
                                  stdout=subprocess.PIPE)

            os.unlink(temp_file)
            os.unlink(downloaded_file)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'type',
        choices=list_types(),
        help='What type to generate')
    parser.add_argument(
        'target',
        type=dir_path,
        help='Directory to write output to')
    args = parser.parse_args()

    table_schema = get_table_schema(args.type)
    directory_schemas = get_directory_schemas(args.type)

    with open(Path(args.target) / get_tsv_name(args.type), 'w') as f:
        f.write(generate_template_tsv(table_schema))
    with open(Path(args.target) / 'README.md', 'w') as f:
        f.write(generate_readme_md(table_schema, directory_schemas, args.type))
    with open(Path(args.target) / 'unified.yaml', 'w') as f:
        f.write(
            f'# NOTE: Do not edit this; It is generated by {__file__}.\n\n'
            + dump_yaml(table_schema))
Пример #24
0
    def _modify_config_file(self, instance, config_file, seed_ips, token, set_tokens=True, auto_bootstrap=False):
        # YAML (0.7.x+)
        if config_file.endswith(".yaml"):
            remote_file = "cassandra.yaml"

            yaml = parse_yaml(urllib.urlopen(config_file))
            yaml['seed_provider'][0]['parameters'][0]['seeds'] = ",".join(seed_ips)
            if set_tokens is True :
                yaml['initial_token'] = token
            if auto_bootstrap :
                yaml['auto_bootstrap'] = 'true'
            yaml['data_file_directories'] = ['/mnt/cassandra-data']
            yaml['commitlog_directory'] = '/mnt/cassandra-logs'
            yaml['listen_address'] = str(instance.public_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)
            
            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_', text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)
        else:
            raise Exception("Configuration file must be yaml (implies Cassandra 0.7.x or greater)") 

        return temp_file, remote_file
Пример #25
0
    def write(self, path=None, erase=False):
        """Save the current session in a folder that will have the name of the
        ORIGIN object (self.name).

        The ORIGIN.load(folder, newname=None) method will be used to load a
        session. The parameter newname will let the user to load a session but
        continue in a new one.

        Parameters
        ----------
        path : str
            Path where the folder (self.name) will be stored.
        erase : bool
            Remove the folder if it exists.

        """
        self._loginfo("Writing...")

        # adapt session if path changes
        if path is not None and path != self.path:
            if not os.path.exists(path):
                raise ValueError(f"path does not exist: {path}")
            self.path = path
            outpath = os.path.join(path, self.name)
            # copy outpath to the new path
            shutil.copytree(self.outpath, outpath)
            self.outpath = outpath
            self._setup_logfile(self.logger)

        if erase:
            shutil.rmtree(self.outpath)
        os.makedirs(self.outpath, exist_ok=True)

        # PSF
        if isinstance(self.PSF, list):
            for i, psf in enumerate(self.PSF):
                cube = Cube(data=psf, mask=np.ma.nomask, copy=False)
                cube.write(os.path.join(self.outpath,
                                        "cube_psf_%02d.fits" % i))
        else:
            cube = Cube(data=self.PSF, mask=np.ma.nomask, copy=False)
            cube.write(os.path.join(self.outpath, "cube_psf.fits"))

        if self.wfields is not None:
            for i, wfield in enumerate(self.wfields):
                im = Image(data=wfield, mask=np.ma.nomask)
                im.write(os.path.join(self.outpath, "wfield_%02d.fits" % i))

        if self.ima_white is not None:
            self.ima_white.write("%s/ima_white.fits" % self.outpath)

        for step in self.steps.values():
            step.dump(self.outpath)

        # parameters in .yaml
        with open(f"{self.outpath}/{self.name}.yaml", "w") as stream:
            dump_yaml(self.param, stream)

        # step3 - saving this manually for now
        if self.nbAreas is not None:
            if self.testO2 is not None:
                for area in range(1, self.nbAreas + 1):
                    np.savetxt("%s/testO2_%d.txt" % (self.outpath, area),
                               self.testO2[area - 1])
            if self.histO2 is not None:
                for area in range(1, self.nbAreas + 1):
                    np.savetxt("%s/histO2_%d.txt" % (self.outpath, area),
                               self.histO2[area - 1])
            if self.binO2 is not None:
                for area in range(1, self.nbAreas + 1):
                    np.savetxt("%s/binO2_%d.txt" % (self.outpath, area),
                               self.binO2[area - 1])

        self._loginfo("Current session saved in %s", self.outpath)
Пример #26
0
 def __str__(self):
     return "---\n" + dump_yaml(self.__config__)
Пример #27
0
    n, *_ = tags.shape
    ints = np.fromiter(reader['fel_intensity'], 'double')
    period = next(reader['bg_period.each_file'])
    mask = ((np.fromiter(
        (img.sum()
         for img in tqdm(imgs(), total=n)), 'double') == 0) | ~good_int(ints))
    isbg = (np.mod(tags, period) == 0) & ~mask
    issig = (np.mod(tags, period) != 0) & ~mask
    isdiff = issig
    bgimg = reduce_imgs(imgs(), isbg)
    sigimg = reduce_imgs(imgs(), issig)
    diffimg = sigimg - bgimg

# write
with open('{}config.yaml'.format(prefix), 'w') as f:
    f.write(dump_yaml(config, default_flow_style=False))

# with H5File('{}reduced.h5'.format(prefix), 'w') as f:  # h5py bug
try:
    f = H5File('{}reduced.h5'.format(prefix), 'w')
except OSError:
    f = H5File('{}reduced.h5'.format(prefix), 'w')
for name in ('bg', 'sig', 'diff'):
    img = eval('{}img'.format(name))
    n = eval('is{}.sum()'.format(name))
    f['{}img'.format(name)] = img
    f['n{}'.format(name)] = n
    plt.figure(figsize=(8, 8))
    plt.imshow(img)
    plt.title('reduced {} imgs (n={})'.format(name, n))
    plt.savefig('{}{}img.png'.format(prefix, name))
Пример #28
0
def load_formated_yaml(string):
    loaded = load_yaml(string)
    if 'format' not in loaded:
        return loaded
    fmt = loaded.pop('format')
    return load_yaml(dump_yaml(loaded, default_flow_style=False).format(**fmt))
Пример #29
0
 def _init_temp_options(self):
     """
     Preps the file that stores unsaved options for writing
     """
     with open("./resources/.temp_options.yaml", "w") as file_:
         file_.write(dump_yaml({"options": Options()}))
Пример #30
0
 def save_configuration(self, configuration, stream):
     dictionary = self._as_dictionary(configuration)
     dump_yaml(dictionary, stream, default_flow_style=False)
Пример #31
0
#!/usr/bin/env python3

import sys
from pathlib import Path
import logging

from yaml import dump as dump_yaml, safe_load as load_yaml

from elasticsearch.addl_index_transformations.portal import transform


if __name__ == "__main__":
    paths = sys.argv[1:]
    if len(paths) == 0:
        print('Provide paths to JSON or YAML files as arguments')
        sys.exit(1)
    logging.basicConfig(level=logging.DEBUG)
    for path in paths:
        doc = load_yaml(Path(path).read_text())
        new_name = f'{path}.transformed.yaml'
        Path(new_name).open('w').write(dump_yaml(transform(doc)))
        print(f'Wrote {new_name}')
Пример #32
0
    def _modify_config_file(self, instance, config_file, seed_ips, token):
        # XML (0.6.x)
        if config_file.endswith(".xml"):
            remote_file = "storage-conf.xml"

            xml = parse_xml(urllib.urlopen(config_file)).getroot()

            #  Seeds
            seeds = xml.find("Seeds")
            if seeds is not None:
                while seeds.getchildren():
                    seeds.remove(seeds.getchildren()[0])
            else:
                seeds = Element("Seeds")
                xml.append(seeds)

            for seed_ip in seed_ips:
                seed = Element("Seed")
                seed.text = seed_ip
                seeds.append(seed)

            # Initial token
            initial_token = xml.find("InitialToken")
            if initial_token is None:
                initial_token = Element("InitialToken")
                xml.append(initial_token)
            initial_token.text = token

            # Logs
            commit_log_directory = xml.find("CommitLogDirectory")
            if commit_log_directory is None:
                commit_log_directory = Element("CommitLogDirectory")
                xml.append(commit_log_directory)
            commit_log_directory.text = "/mnt/cassandra-logs"

            # Data
            data_file_directories = xml.find("DataFileDirectories")
            if data_file_directories is not None:
                while data_file_directories.getchildren():
                    data_file_directories.remove(
                        data_file_directories.getchildren()[0])
            else:
                data_file_directories = Element("DataFileDirectories")
                xml.append(data_file_directories)
            data_file_directory = Element("DataFileDirectory")
            data_file_directory.text = "/mnt/cassandra-data"
            data_file_directories.append(data_file_directory)

            # listen address
            listen_address = xml.find("ListenAddress")
            if listen_address is None:
                listen_address = Element("ListenAddress")
                xml.append(listen_address)
            listen_address.text = ""

            # thrift address
            thrift_address = xml.find("ThriftAddress")
            if thrift_address is None:
                thrift_address = Element("ThriftAddress")
                xml.append(thrift_address)
            thrift_address.text = ""

            fd, temp_file = tempfile.mkstemp(prefix='storage-conf.xml_',
                                             text=True)
            os.write(fd, dump_xml(xml))
            os.close(fd)

        # YAML (0.7.x)
        elif config_file.endswith(".yaml"):
            remote_file = "cassandra.yaml"

            yaml = parse_yaml(urllib.urlopen(config_file))
            yaml['seeds'] = seed_ips
            yaml['initial_token'] = token
            yaml['data_file_directories'] = ['/mnt/cassandra-data']
            yaml['commitlog_directory'] = '/mnt/cassandra-logs'
            yaml['listen_address'] = str(instance.private_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)

            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_',
                                             text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)
        else:
            raise Exception("Configuration file must be one of xml or yaml")

        return temp_file, remote_file
Пример #33
0
    def _modify_config_file(self, instance, config_file, seed_ips, token):
        # XML (0.6.x) 
        if config_file.endswith(".xml"):
            remote_file = "storage-conf.xml"

            xml = parse_xml(urllib.urlopen(config_file)).getroot()

            #  Seeds
            seeds = xml.find("Seeds")
            if seeds is not None:
                while seeds.getchildren():
                    seeds.remove(seeds.getchildren()[0])
            else:
                seeds = Element("Seeds")
                xml.append(seeds)

            for seed_ip in seed_ips:
                seed = Element("Seed")
                seed.text = seed_ip
                seeds.append(seed)

            # Initial token
            initial_token = xml.find("InitialToken")
            if initial_token is None:
                initial_token = Element("InitialToken")
                xml.append(initial_token)
            initial_token.text = token

            # Logs
            commit_log_directory = xml.find("CommitLogDirectory")
            if commit_log_directory is None:
                commit_log_directory = Element("CommitLogDirectory")
                xml.append(commit_log_directory)
            commit_log_directory.text = "/mnt/cassandra-logs"

            # Data 
            data_file_directories = xml.find("DataFileDirectories")
            if data_file_directories is not None:
                while data_file_directories.getchildren():
                    data_file_directories.remove(data_file_directories.getchildren()[0])
            else:
                data_file_directories = Element("DataFileDirectories")
                xml.append(data_file_directories)
            data_file_directory = Element("DataFileDirectory")
            data_file_directory.text = "/mnt/cassandra-data"
            data_file_directories.append(data_file_directory)


            # listen address
            listen_address = xml.find("ListenAddress")
            if listen_address is None:
                listen_address = Element("ListenAddress")
                xml.append(listen_address)
            listen_address.text = ""

            # thrift address
            thrift_address = xml.find("ThriftAddress")
            if thrift_address is None:
                thrift_address = Element("ThriftAddress")
                xml.append(thrift_address)
            thrift_address.text = ""

            fd, temp_file = tempfile.mkstemp(prefix='storage-conf.xml_', text=True)
            os.write(fd, dump_xml(xml))
            os.close(fd)
            
        # YAML (0.7.x)
        elif config_file.endswith(".yaml"):
            remote_file = "cassandra.yaml"

            yaml = parse_yaml(urllib.urlopen(config_file))
            yaml['seeds'] = seed_ips
            yaml['initial_token'] = token
            yaml['data_file_directories'] = ['/mnt/cassandra-data']
            yaml['commitlog_directory'] = '/mnt/cassandra-logs'
            yaml['listen_address'] = str(instance.private_dns_name)
            yaml['rpc_address'] = str(instance.public_dns_name)

            fd, temp_file = tempfile.mkstemp(prefix='cassandra.yaml_', text=True)
            os.write(fd, dump_yaml(yaml))
            os.close(fd)
        else:
            raise Exception("Configuration file must be one of xml or yaml") 

        return temp_file, remote_file
Пример #34
0
 def save_test_reports(self, reports, stream):
     data = {"reports": [each.as_dictionary for each in reports]}
     dump_yaml(data, stream, default_flow_style=False, allow_unicode=True)