Пример #1
0
def sanitise_report(report_file, semaphore):
    match = re.search("^" + re.escape(settings.reports_directory) + "(.*)", report_file)

    # read report file
    report = Report(report_file)
    report_header = report.header
    report_header["report_file"] = match.group(1)

    report_filename = os.path.split(report_file)[-1]
    report_filename_sanitised = os.path.join(settings.sanitised_directory, report_filename)

    if os.path.isfile(report_filename_sanitised):
        log.info("Sanitised report name already exists, overwriting: %s" % report_filename_sanitised)
    else:
        log.info("New report file: %s" % report_filename_sanitised)

    report_file_sanitised = open(report_filename_sanitised, "w")

    safe_dump(report_header, report_file_sanitised, explicit_start=True, explicit_end=True)

    safe_dump_all(report, report_file_sanitised, explicit_start=True, explicit_end=True, default_flow_style=False)

    log.info("Moving original unsanitised file %s to archive" % report_file)

    archive_report(report_file)

    report_file_sanitised.close()
    report.close()

    os.remove(report_file)

    semaphore.release()
Пример #2
0
def readpage(page):
	news=get_news(page)
	if len(news)>0:
		with open('meneame.yml', 'a') as outfile:
			yaml.safe_dump_all(news, outfile, default_flow_style=False,allow_unicode=True, default_style=None)
			outfile.write("---\n")
	else:
		print "SKIP"
Пример #3
0
def main(archive_file):
    if not os.path.isfile(archive_file):
        print "Archive file does not exist: " + archive_file
        return 1

    # we only support tar.gz and .gz
    if archive_file.endswith("tar.gz"):
        archive_tar = tarfile.open(archive_file)
        for element in archive_tar:
            f = archive_tar.extractfile(element)
            fp, report_file = tempfile.mkstemp()
            while True:
                data = f.read()
                if not data:
                    break
                os.write(fp, data)
            f.close()
    elif archive_file.endswith(".gz"):
        fp, report_file = tempfile.mkstemp()
        f = gzip.open(archive_file, 'rb')
        while True:
            data = f.read()
            if not data:
                break
            os.write(fp, data)
        f.close()

    report = Report(report_file, action="sanitise")
    report_filename = generate_filename(report.header)
    report_filename_sanitised = os.path.join(settings.sanitised_directory,
                                             report_filename)
    report.header['report_file'] = "%s/%s" % (report.header['probe_cc'],
                                              report_filename)

    report_file_sanitised = open(report_filename_sanitised, "w")

    safe_dump(
        report.header,
        report_file_sanitised,
        explicit_start=True,
        explicit_end=True)
    safe_dump_all(
        report,
        report_file_sanitised,
        explicit_start=True,
        explicit_end=True,
        default_flow_style=False)
    delete_existing_report_entries(report.header)

    public_report_file = os.path.join(
        settings.public_directory, report.header['probe_cc'], report_filename)
    if os.path.isfile(public_report_file):
        os.remove(public_report_file)

    report_file_sanitised.close()
    os.remove(report_file)
Пример #4
0
def read(id):
	url=STR+id
	response = requests.get(url)
	soup = bs4.BeautifulSoup(response.text,"lxml")
	body=soup.select("div.news-body")
	if not body or len(body)==0:
		print "ERROR: "+url
		return True
	a=body[0]
	cerrado=a.select("#a-va-"+id+" span.closed")
	if len(cerrado)==0:
		print "ABIERTA: "+url
		return False
	div=a.select("div.news-shakeit")
	if len(div)==0:
		print "SIN SHAKE: "+url
		return False
	status=div[0].attrs['class'][-1].split('-')[-1]
	h1=soup.find("h1")
	if not h1:
		print "SIN h1: "+url
		return False
	dates=[int(d.attrs['data-ts'].strip()) for d in a.select("div.news-submitted span.ts")]
	new={
		'id': int(id),
		'title': h1.get_text().strip(), 
		'url': h1.a.attrs["href"],
		'author': a.select("div.news-submitted a")[0].attrs["href"].split("/")[-1],
		'body': " ".join([t.string.strip() for t in a.find_all(text=True,recursive=False)]).strip(), 
		'sub': a.select("div.news-details span.tool a")[0].get_text().strip(),
		'story': get_meta(soup,"property","og:url"),
		'karma': int(a.select("#a-karma-"+id)[0].get_text().strip()),
		'sent': dates[0], 
		'status': status,
		'votes': {
			'users': int(a.select("#a-usu-"+id)[0].get_text().strip()),
			'anonymous': int(a.select("#a-ano-"+id)[0].get_text().strip()),
			'negative':int(a.select("#a-neg-"+id)[0].get_text().strip())
		}
	}
	if status == "published":
		new['comments']=dates[1]
	counter=a.select("span.comments-counter span.counter")
	if len(counter)>0:
		new['comments']=int(counter[0].get_text().strip())
	else:
		new['comments']=0
	tags=[t.get_text().strip() for t in a.select("span.news-tags a") if len(t.get_text().strip())>0]
	if len(tags)>0:
		new['tags']=tags

	with open(OUT, 'a') as outfile:
		yaml.safe_dump_all([new], outfile, default_flow_style=False,allow_unicode=True, default_style=None)
		outfile.write("---\n")

	return True
Пример #5
0
def main():
    parser = optparse.OptionParser()
    parser.add_option("-H", "--hash", action="store",
                      help="verify that stdin hash given sha256 hash")
    options, args = parser.parse_args()
    if options.hash:
        gen = process_package_with_hash(sys.stdin, options.hash)
    else:
        gen = process_package(sys.stdin)
    yaml.safe_dump_all(gen, sys.stdout)
Пример #6
0
def write(fname, *items):
  try:
    written = False
    with closing(_open_userfile(fname, 'w')) as f:
      if written:
        f.write(SEPARATOR)
      yaml.safe_dump_all(items, f)
  except Exception as e:
    print("Can't write filename", fname, e.message)
    if PROPAGATE_EXCEPTIONS:
      raise
Пример #7
0
def make_adf(archive=None, canonical=False, out=None, pretty=False):
    """
    >>> meta = Meta('zip', 'aes-256-ctr', created='now')
    >>> archive = Archive('title', meta)
    >>> cert = Certificate(chr(255)*16)
    >>> make_adf([archive, cert], out=sys.stdout, pretty=True)
    !archive
    meta: !meta
      cipher: aes-256-ctr
      created: now
      format: zip
    title: title
    --- !certificate
    key: !!binary |
      /////////////////////w==
    >>> make_adf([archive, cert], out=sys.stdout)
    !archive
    meta: !meta {cipher: aes-256-ctr, created: now, format: zip}
    title: title
    --- !certificate
    key: !!binary |
      /////////////////////w==
    """

    if not hasattr(archive, '__getitem__'):
        archive = [archive]
    if pretty:
        out.write("--- ".join(map(pyaml.dump, archive)))
        return
    return yaml.safe_dump_all(archive, out, canonical=canonical)
    def test_deserializer_validate_existing_assignments(self):
        assignment1 = coremodels.Assignment.objects.create(
            title='Existing1', text='text1')
        assignment2 = coremodels.Assignment.objects.create(
            title='Existing2', text='text2')
        duck1000tag = coremodels.Tag.objects.create(tag='duck1000')
        assignment1.tags.add(duck1000tag)
        assignment2.tags.add(duck1000tag)
        deserializer = multiassignment_serialize.Deserializer(yaml.safe_dump_all([
            {'id': assignment1.id, 'title': 'Updated1', 'text': 'updatedText1',
             'tags': ['oblig1']},
            {'title': 'New'},
            {'id': assignment2.id, 'title': 'Updated2', 'text': 'updatedText2',
             'tags': ['duck1000', 'oblig2', 'week3']},
        ]), course_tag='duck1000')

        assignments_by_tag = {}
        existing_assignments = deserializer._validate_existing_assignments(assignments_by_tag)
        self.assertEquals(len(existing_assignments), 2)
        self.assertEquals(
            set(assignments_by_tag.keys()),
            set(['duck1000', 'oblig1', 'oblig2', 'week3']))
        self.assertEquals(
            assignments_by_tag['duck1000'], [assignment2, assignment1])
        self.assertEquals(
            assignments_by_tag['oblig2'], [assignment2])
Пример #9
0
    def CompileReport(self, analysis_mediator):
        """Compiles a report of the analysis.

    Args:
      analysis_mediator: The analysis mediator object (instance of
                         AnalysisMediator).

    Returns:
      The analysis report (instance of AnalysisReport).
    """
        report = event.AnalysisReport(self.NAME)

        if self._output_format == u"yaml":
            lines_of_text = []
            lines_of_text.append(yaml.safe_dump_all(self._service_collection.services))
        else:
            lines_of_text = [u"Listing Windows Services"]
            for service in self._service_collection.services:
                lines_of_text.append(self._FormatServiceText(service))
                # Separate services with a blank line.
                lines_of_text.append(u"")

        report.SetText(lines_of_text)

        return report
Пример #10
0
def dump(arg):
    """Dump a sequence of dictionaries as YAML for editing.
    """
    return yaml.safe_dump_all(
        arg,
        allow_unicode=True,
        default_flow_style=False,
    )
Пример #11
0
def write_yaml(input, filename):
    with open(filename, "w") as f:
        if isinstance(input, list):
            f.write(yaml.safe_dump_all(input, default_flow_style=False))
        elif isinstance(input, dict):
            f.write(yaml.safe_dump(input, default_flow_style=False))
        else:
            raise Exception("cannot dump $s objects to yaml." % str(type(input)))
Пример #12
0
def pass_insert_host_yaml(host, yaml_docs):
    value = yaml.safe_dump_all(
        yaml_docs,
        # use multiline "block" style instead of {a:b} ("flow" style)
        default_flow_style=False
    )

    pass_insert( host_to_pass_key(host), value )
 def test_deserializer_validate_existing_assignments_invalid(self):
     assignment1 = coremodels.Assignment.objects.create(
         title='Existing1', text='text1')
     assignment1.tags.create(tag='duck1000')
     deserializer = multiassignment_serialize.Deserializer(yaml.safe_dump_all([
         {'id': assignment1.id},
     ]), course_tag='duck1000')
     with self.assertRaises(multiassignment_serialize.DeserializerValidationErrors):
         deserializer._validate_existing_assignments(assignments_by_tag={})
Пример #14
0
def sanitise_report(report_file, semaphore):
    match = re.search("^" + re.escape(settings.reports_directory) + "(.*)",
                      report_file)

    # read report file
    report = Report(report_file)
    report_header = report.header
    report_header['report_file'] = match.group(1)

    report_filename = os.path.split(report_file)[-1]
    report_filename_sanitised = os.path.join(settings.sanitised_directory,
                                             report_filename)

    if os.path.isfile(report_filename_sanitised):
        log.info("Sanitised report name already exists, overwriting: %s" %
                 report_filename_sanitised)
    else:
        log.info("New report file: %s" % report_filename_sanitised)

    report_file_sanitised = open(report_filename_sanitised, 'w')

    safe_dump(report_header,
              report_file_sanitised,
              explicit_start=True,
              explicit_end=True)

    safe_dump_all(report,
                  report_file_sanitised,
                  explicit_start=True,
                  explicit_end=True,
                  default_flow_style=False)

    log.info("Moving original unsanitised file %s to archive" % report_file)

    archive_report(report_file)

    report_file_sanitised.close()
    report.close()

    os.remove(report_file)

    semaphore.release()
Пример #15
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--pipeline_cfg',
        default=paths.flavour_cfg_path,
    )
    parser.add_argument(
        '--flavour-set',
        default='all',
    )
    parser.add_argument(
        '--cicd-cfg',
        default='default',
    )
    parser.add_argument(
        '--outfile',
        default='pipeline.yaml',
    )
    parsed = parser.parse_args()

    build_yaml = parsed.pipeline_cfg

    flavour_set = glci.util.flavour_set(
        flavour_set_name=parsed.flavour_set,
        build_yaml=build_yaml,
    )

    gardenlinux_flavours = tuple(flavour_set.flavours())
    outfile = parsed.outfile

    pipeline: dict = render_pipeline_dict(
        gardenlinux_flavours=gardenlinux_flavours,
        cicd_cfg_name=parsed.cicd_cfg,
    )

    with open(outfile, 'w') as f:
        pipeline_raw = dataclasses.asdict(pipeline)
        yaml.safe_dump_all((pipeline_raw, ), stream=f)

    print(
        f'dumped pipeline with {len(gardenlinux_flavours)} task(s) to {outfile}'
    )
Пример #16
0
    def test_put_empty_bucket(self):
        rules = {'deckhand:create_cleartext_documents': '@'}
        self.policy.set_rules(rules)

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all([]))
        self.assertEqual(200, resp.status_code)
        created_documents = list(yaml.safe_load_all(resp.text))
        self.assertEmpty(created_documents)
Пример #17
0
def create_configdocs(ctx, collection, filename, directory, append, replace,
                      recurse):
    if (append and replace):
        ctx.fail('Either append or replace may be selected but not both')
    if (not filename and not directory) or (filename and directory):
        ctx.fail('Please specify one or more filenames using '
                 '--filename="<filename>" OR one or more directories using '
                 '--directory="<directory>"')

    if append:
        create_buffer = 'append'
    elif replace:
        create_buffer = 'replace'
    else:
        create_buffer = None

    if directory:
        for dir in directory:
            if recurse:
                for path, dirs, files in os.walk(dir):
                    filename += tuple([
                        os.path.join(path, name) for name in files
                        if name.endswith('.yaml')
                    ])
            else:
                filename += tuple([
                    os.path.join(dir, each) for each in os.listdir(dir)
                    if each.endswith('.yaml')
                ])

        if not filename:
            # None or empty list should raise this error
            ctx.fail('The directory does not contain any YAML files. '
                     'Please enter one or more YAML files or a '
                     'directory that contains one or more YAML files.')

    docs = []
    for file in filename:
        with open(file, 'r') as stream:
            if file.endswith(".yaml"):
                try:
                    docs += list(yaml.safe_load_all(stream))
                except yaml.YAMLError as exc:
                    ctx.fail('YAML file {} is invalid because {}'.format(
                        file, exc))
            else:
                ctx.fail('The file {} is not a YAML file.  Please enter '
                         'only YAML files.'.format(file))

    data = yaml.safe_dump_all(docs)

    click.echo(
        CreateConfigdocs(ctx, collection, create_buffer, data,
                         filename).invoke_and_return_resp())
Пример #18
0
def main(argv=sys.argv):

    parser = arg_parser()

    args = parser.parse_args()

    print("Reading %s" % args.infile)
    infile = open( args.infile )
    d, report = csv2dict(infile)


    print("Writing to %s" % args.outfile)
    outfile = open( args.outfile, 'w' )

    if args.fmt == 'json':
        import json
        outfile.write( json.dumps( d ) )
    elif args.fmt == 'yaml':
        import yaml
        yaml.safe_dump_all( d, outfile, default_flow_style=False )
Пример #19
0
    def test_rendered_documents_fail_post_validation(self):
        """Validates that when fully rendered documents fail schema validation,
        a 400 is raised.

        For this scenario a DataSchema checks that the relevant document has
        a key in its data section, a key which is removed during the rendering
        process as the document uses a delete action. This triggers
        post-rendering validation failure.
        """
        rules = {'deckhand:list_cleartext_documents': '@',
                 'deckhand:list_encrypted_documents': '@',
                 'deckhand:create_cleartext_documents': '@'}
        self.policy.set_rules(rules)

        # Create a document for a bucket.
        documents_factory = factories.DocumentFactory(2, [1, 1])
        payload = documents_factory.gen_test({
            "_GLOBAL_DATA_1_": {"data": {"a": "b"}},
            "_SITE_DATA_1_": {"data": {"a": "b"}},
            "_SITE_ACTIONS_1_": {
                "actions": [{"method": "delete", "path": "."}]
            }
        }, site_abstract=False)

        data_schema_factory = factories.DataSchemaFactory()
        metadata_name = payload[-1]['schema']
        schema_to_use = {
            '$schema': 'http://json-schema.org/schema#',
            'type': 'object',
            'properties': {
                'a': {
                    'type': 'string'
                }
            },
            'required': ['a'],
            'additionalProperties': False
        }
        data_schema = data_schema_factory.gen_test(
            metadata_name, data=schema_to_use)
        payload.append(data_schema)

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(200, resp.status_code)
        revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][
            'revision']

        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/rendered-documents' % revision_id,
            headers={'Content-Type': 'application/x-yaml'})

        self.assertEqual(400, resp.status_code)
Пример #20
0
def generate_manifest(image_name: str, falcon_cid: str, output_file: str):
    "Generate and save manifest for falcon-container injector deployment"
    log.info("Generating manifest from: '%s'" % (image_name))
    resp = docker_client.containers.run(
        image_name, "--image=" + image_name + " -cid=" + falcon_cid +
        " -pullpolicy=Always")
    manifest = yaml.safe_dump_all(yaml.safe_load_all(resp))
    log.info("Writing manifest file to '%s'" % (output_file))
    f = open(output_file, "w")
    f.write(manifest)
    return manifest
Пример #21
0
def infer_type_and_default(action, default, typ, required):
    """
    Infer the type string from the default and typ

    :param action: Name of the action
    :type action: ```Optional[str]```

    :param default: Initial default value
    :type default: ```Any```

    :param typ: The type of the argument
    :type typ: ```Optional[str]```

    :param required: Whether to require the argument
    :type required: ```bool```

    :returns: action (e.g., for `argparse.Action`), default, whether its required, inferred type str
    :rtype: ```Tuple[str, Any, bool, str]```
    """
    if code_quoted(default):
        return _infer_type_and_default_from_quoted(action, default, required, typ)
    elif type(default).__name__ in simple_types:
        typ = type(default).__name__
    elif isinstance(default, AST):
        action, default, required, typ = _parse_default_from_ast(
            action, default, required, typ
        )
    elif hasattr(default, "__str__") and str(default) == "<required parameter>":
        # Special type that PyTorch uses & defines
        action, default, required, typ = None, None, True, default.__class__.__name__
    elif isinstance(default, (list, tuple)):
        action, default, required, typ = _infer_type_and_default_for_list_or_tuple(
            action, default, required
        )
    elif isinstance(default, dict):
        typ = "loads"
        try:
            default = dumps(default)
        except TypeError:
            # YAML is more permissive though less concise, but `loads` from yaml is used so this works
            default = safe_dump_all(default)
    elif default is None:
        if "Optional" not in (typ or iter(())) and typ not in frozenset(
            ("Any", "pickle.loads", "loads")
        ):
            typ = None
    elif isinstance(default, type) or isfunction(default) or isclass(default):
        typ, default, required = "pickle.loads", pickle.dumps(default), False
    else:
        raise NotImplementedError(
            "Parsing type {!s}, which contains {!r}".format(type(default), default)
        )

    return action, default, required, typ
Пример #22
0
def dump(outfile, resources, included_kinds, app_name, app_uid,
         app_api_version):
    to_be_dumped = []
    for resource in resources:
        if included_kinds is None or resource["kind"] in included_kinds:
            log.info("Application '{:s}' owns '{:s}/{:s}'", app_name,
                     resource["kind"], resource["metadata"]["name"])
            resource = copy.deepcopy(resource)
            set_resource_ownership(app_uid=app_uid,
                                   app_name=app_name,
                                   app_api_version=app_api_version,
                                   resource=resource)
        else:
            log.info("Application '{:s}' doesn't own '{:s}/{:s}'", app_name,
                     resource["kind"], resource["metadata"]["name"])
        to_be_dumped.append(resource)
    yaml.safe_dump_all(to_be_dumped,
                       outfile,
                       default_flow_style=False,
                       indent=2)
Пример #23
0
def export_challenges(out_file, dst_attachments, src_attachments, tarfile=None):
    from CTFd.models import Challenges, Flags, Tags, Files

    chals = Challenges.query.order_by(Challenges.value).all()
    chals_list = []

    for chal in chals:
        properties = {
        'name': chal.name,
        'value': chal.value,
        'description': chal.description,
        'category': chal.category,
        }
        flags_obj = Flags.query.filter_by(chal=chal.id)
        flags = []
        for flag_obj in flags_obj:
            flag = {}
            flag['flag'] = flag_obj.flag
            flag['type'] = flag_obj.type
            flags.append(flag)
        properties['flags'] = flags

        if chal.hidden:
            properties['hidden'] = bool(chal.hidden)
        tags = [tag.tag for tag in Tags.query.add_columns('tag').filter_by(chal=chal.id).all()]
        if tags:
            properties['tags'] = tags

        #These file locations will be partial paths in relation to the upload folder
        src_paths_rel = [file.location for file in Files.query.add_columns('location').filter_by(chal=chal.id).all()]

        file_map = {}
        file_list = []
        for src_path_rel in src_paths_rel:
            dirname, filename = os.path.split(src_path_rel)
            dst_dir = os.path.join(dst_attachments, dirname)
            src_path = os.path.join(src_attachments, src_path_rel)
            file_map[src_path] = os.path.join(dst_dir, filename)

            # Create path relative to the output file
            dst_dir_rel = os.path.relpath(dst_dir, start=os.path.dirname(out_file))
            file_list.append(os.path.join(dst_dir_rel, filename))

        if file_map:
            properties['files'] = file_list
            if tarfile:
                tar_files(file_map, tarfile)
            else:
                copy_files(file_map)

        print("Exporting", properties['name'])
        chals_list.append(properties)

    return yaml.safe_dump_all(chals_list, default_flow_style=False, allow_unicode=True, explicit_start=True)
Пример #24
0
def p_yaml(width: int, indent: int) -> int:
    try:
        data = safe_load_all(stdin)
    except ScannerError as e:
        log.critical("%s", f"{ERROR}{linesep}{e}")
        return 1
    else:
        yaml = recur_sort(data)
        fold_pt = width // 2
        add_representer(str, _repr_str(fold_pt), Dumper=SafeDumper)
        safe_dump_all(
            yaml,
            stdout,
            sort_keys=False,
            allow_unicode=True,
            explicit_start=True,
            width=width,
            indent=indent,
        )
        return 0
Пример #25
0
def main(datasets_folder, output):
    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                        level=logging.INFO)
    datasets_folder_path = Path(datasets_folder[0]).resolve()
    output_path = Path(output).resolve()
    for dataset in os.listdir(datasets_folder_path):
        prev_path = Path(os.path.join(datasets_folder_path, dataset))
        path = Path(os.path.join(output_path, dataset))
        output_path_dataset = Path(os.path.join(output_path, dataset))
        logging.info("Moving %s to %s", prev_path, output_path)
        try:
            shutil.move(str(prev_path), str(output_path))
        except:
            logging.exception(
                "Error occurred while moving the dataset, skipping.")
            continue

        if path.is_dir():
            for file in os.listdir(path):
                if file.endswith(".xml"):
                    if file.startswith("MTD"):
                        path = Path(os.path.join(path, file))
        if path.suffix != '.xml':
            raise RuntimeError('want xml')

        logging.info("Processing %s", path)

        documents = prepare_dataset(path)

        if 'xml' in str(path):
            yaml_path = output_path_dataset.joinpath(path.parent.name +
                                                     '.yaml')
        else:
            yaml_path = output_path_dataset.joinpath(path.name + '.yaml')
        if documents:
            logging.info("Writing %s dataset(s) into %s", len(documents),
                         yaml_path)
            with open(yaml_path, 'w') as stream:
                yaml.safe_dump_all(documents, stream, sort_keys=False)
        else:
            logging.info("No datasets discovered. Bye!")
    def test_create_delete_then_recreate_document_in_different_bucket(self):
        """Ordiniarly creating a document with the same metadata.name/schema
        in a separate bucket raises an exception, but if we delete the document
        and re-create it in a different bucket this should be a success
        scenario.
        """
        rules = {'deckhand:create_cleartext_documents': '@'}
        self.policy.set_rules(rules)

        payload = factories.DocumentFactory(2, [1, 1]).gen_test({})
        bucket_name = test_utils.rand_name('bucket')
        alt_bucket_name = test_utils.rand_name('bucket')

        # Create the documents in the first bucket.
        resp = self.app.simulate_put(
            '/api/v1.0/buckets/%s/documents' % bucket_name,
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(200, resp.status_code)
        documents = list(yaml.safe_load_all(resp.text))
        self.assertEqual(3, len(documents))
        self.assertEqual([bucket_name] * 3,
                         [d['status']['bucket'] for d in documents])

        # Delete the documents from the first bucket.
        resp = self.app.simulate_put(
            '/api/v1.0/buckets/%s/documents' % bucket_name,
            headers={'Content-Type': 'application/x-yaml'},
            body=None)
        self.assertEqual(200, resp.status_code)

        # Re-create the documents in the second bucket.
        resp = self.app.simulate_put(
            '/api/v1.0/buckets/%s/documents' % alt_bucket_name,
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(200, resp.status_code)
        documents = list(yaml.safe_load_all(resp.text))
        self.assertEqual(3, len(documents))
        self.assertEqual([alt_bucket_name] * 3,
                         [d['status']['bucket'] for d in documents])
Пример #27
0
 def _create_revision(self, payload=None):
     if not payload:
         documents_factory = factories.DocumentFactory(2, [1, 1])
         payload = documents_factory.gen_test({})
     resp = self.app.simulate_put(
         '/api/v1.0/buckets/mop/documents',
         headers={'Content-Type': 'application/x-yaml'},
         body=yaml.safe_dump_all(payload))
     self.assertEqual(200, resp.status_code)
     revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][
         'revision']
     return revision_id
Пример #28
0
    def test_put_bucket_cleartext_documents_except_forbidden(self):
        rules = {'deckhand:create_cleartext_documents': 'rule:admin_api'}
        self.policy.set_rules(rules)

        documents_factory = factories.DocumentFactory(2, [1, 1])
        payload = documents_factory.gen_test({})

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(403, resp.status_code)
Пример #29
0
    def decrypt_secrets(self):
        """Decrypt and unwrap pegleg managed encrypted secrets documents
        included in a site secrets file, and print the result to the standard
        out."""
        add_representer_ordered_dict()
        secrets = self.get_decrypted_secrets()

        return yaml.safe_dump_all(secrets,
                                  sort_keys=False,
                                  explicit_start=True,
                                  explicit_end=True,
                                  default_flow_style=False)
Пример #30
0
    def test_rendered_documents_sorting_metadata_name(self):
        rules = {
            'deckhand:list_cleartext_documents': '@',
            'deckhand:list_encrypted_documents': '@',
            'deckhand:create_cleartext_documents': '@'
        }
        self.policy.set_rules(rules)

        documents_factory = factories.DocumentFactory(2, [1, 1])
        documents = documents_factory.gen_test({},
                                               global_abstract=False,
                                               region_abstract=False,
                                               site_abstract=False)
        expected_names = ['bar', 'baz', 'foo']
        for idx in range(len(documents)):
            documents[idx]['metadata']['name'] = expected_names[idx]

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(documents))
        self.assertEqual(200, resp.status_code)
        revision_id = list(yaml.safe_load_all(
            resp.text))[0]['status']['revision']

        # Test ascending order.
        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/rendered-documents' % revision_id,
            params={'sort': 'metadata.name'},
            params_csv=False,
            headers={'Content-Type': 'application/x-yaml'})
        self.assertEqual(200, resp.status_code)
        retrieved_documents = list(yaml.safe_load_all(resp.text))

        self.assertEqual(3, len(retrieved_documents))
        self.assertEqual(expected_names,
                         [d['metadata']['name'] for d in retrieved_documents])

        # Test descending order.
        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/rendered-documents' % revision_id,
            params={
                'sort': 'metadata.name',
                'order': 'desc'
            },
            params_csv=False,
            headers={'Content-Type': 'application/x-yaml'})
        self.assertEqual(200, resp.status_code)
        retrieved_documents = list(yaml.safe_load_all(resp.text))

        self.assertEqual(3, len(retrieved_documents))
        self.assertEqual(list(reversed(expected_names)),
                         [d['metadata']['name'] for d in retrieved_documents])
Пример #31
0
    def test_list_rendered_documents_exclude_abstract_documents(self):
        rules = {
            'deckhand:list_cleartext_documents': '@',
            'deckhand:list_encrypted_documents': '@',
            'deckhand:create_cleartext_documents': '@'
        }
        self.policy.set_rules(rules)

        # Create 2 docs: one concrete, one abstract.
        documents_factory = factories.DocumentFactory(2, [1, 1])
        payload = documents_factory.gen_test(
            {
                '_SITE_ACTIONS_1_': {
                    'actions': [{
                        'method': 'merge',
                        'path': '.'
                    }]
                }
            },
            global_abstract=False)
        concrete_doc = payload[1]

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(200, resp.status_code)
        revision_id = list(yaml.safe_load_all(
            resp.text))[0]['status']['revision']

        # Verify that the concrete document is returned, but not the abstract
        # one.
        resp = self.app.simulate_get(
            '/api/v1.0/revisions/%s/rendered-documents' % revision_id,
            headers={'Content-Type': 'application/x-yaml'})
        self.assertEqual(200, resp.status_code)
        rendered_documents = list(yaml.safe_load_all(resp.text))

        self.assertEqual(2, len(rendered_documents))
        rendered_documents = list(
            filter(
                lambda x: not x['schema'].startswith(
                    types.LAYERING_POLICY_SCHEMA), rendered_documents))

        is_abstract = rendered_documents[-1]['metadata']['layeringDefinition'][
            'abstract']
        self.assertFalse(is_abstract)
        for key, value in concrete_doc.items():
            if isinstance(value, dict):
                self.assertDictContainsSubset(value,
                                              rendered_documents[-1][key])
            else:
                self.assertEqual(value, rendered_documents[-1][key])
Пример #32
0
    def _save_apis_to_file(json_data):
        directory = SquealySettings.get('YAML_PATH', join(settings.BASE_DIR, 'yaml'))

        if not os.path.exists(directory):
            os.makedirs(directory)

        file_name = SquealySettings.get('YAML_FILE_NAME', 'squealy-api.yaml')
        full_path = join(directory, file_name)

        with open(full_path, 'w+') as f:
            f.write(yaml.safe_dump_all(json_data, explicit_start=True))
        f.close()
 def _create_revision(self):
     # Create a revision with any document (doesn't matter).
     secrets_factory = factories.DocumentSecretFactory()
     payload = [secrets_factory.gen_test('Certificate', 'cleartext')]
     resp = self.app.simulate_put(
         '/api/v1.0/buckets/mop/documents',
         headers={'Content-Type': 'application/x-yaml'},
         body=yaml.safe_dump_all(payload))
     self.assertEqual(200, resp.status_code)
     revision_id = list(yaml.safe_load_all(
         resp.text))[0]['status']['revision']
     return revision_id
Пример #34
0
    def dump(self, stream):
        """
        Dump the generated settings file contents to the given stream.

        Arguments:
            stream (file):
                An open writable file object to dump settings into.
        """
        # only dump the secrets yaml document if it is populated
        docs_to_dump = [self.data]
        if self.secrets:
            docs_to_dump.append(self.secrets)

        yaml.safe_dump_all(
            docs_to_dump,
            stream=stream,
            default_flow_style=False,  # Represent objects using indented blocks
                                       # rather than inline enclosures.
            explicit_start=True,  # Begin the first document with '---', per
                                  # our usual settings file syntax.
        )
Пример #35
0
    def test_put_bucket_encrypted_secret_except_forbidden(self):
        rules = {'deckhand:create_encrypted_documents': 'rule:admin_api'}
        self.policy.set_rules(rules)

        secrets_factory = factories.DocumentSecretFactory()
        payload = [secrets_factory.gen_test('Certificate', 'encrypted')]

        resp = self.app.simulate_put(
            '/api/v1.0/buckets/mop/documents',
            headers={'Content-Type': 'application/x-yaml'},
            body=yaml.safe_dump_all(payload))
        self.assertEqual(403, resp.status_code)
Пример #36
0
    def apply_file(
            cls,
            kluster,
            config_path,
            namespace=None,  # pylint: disable=too-many-locals
            timeout=KUBECTL_TIMEOUT,
            environ=None,
            envsubst=True,
            modifiers: List[Callable] = None):
        if environ:
            environ_str = (' '.join(
                [f'{name}="{value}"'
                 for name, value in environ.items()])) + ' '
        else:
            environ_str = ''

        with NamedTemporaryFile(mode='tw') as temp_file:
            resulted_content = []
            if envsubst:
                data = LOCALRUNNER.run(f'{environ_str}envsubst<{config_path}',
                                       verbose=False).stdout
            else:
                with open(config_path, 'r') as config_file_stream:
                    data = config_file_stream.read()
            file_content = yaml.load_all(data)

            for doc in file_content:
                if modifiers:
                    for modifier in modifiers:
                        modifier(doc)
                resulted_content.append(doc)
            temp_file.write(yaml.safe_dump_all(resulted_content))
            temp_file.flush()

            @retrying(n=0,
                      sleep_time=5,
                      timeout=timeout,
                      allowed_exceptions=RuntimeError)
            def run_kubectl():
                try:
                    cls.kubectl(kluster,
                                "apply",
                                "-f",
                                temp_file.name,
                                namespace=namespace,
                                timeout=timeout)
                except invoke.exceptions.UnexpectedExit as exc:
                    if 'did you specify the right host or port' in exc.result.stderr:
                        raise RuntimeError(str(exc)) from None
                    raise

            run_kubectl()
Пример #37
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--outfile', default='tasks.yaml')

    parsed = parser.parse_args()

    build_task_yaml_path = os.path.join(paths.own_dir, 'build-task.yaml.template')
    with open(build_task_yaml_path) as f:
        raw_build_task = yaml.safe_load(f)

    promote_task = tasks.promote_task(
        committish=NamedParam(name='committish'),
        gardenlinux_epoch=NamedParam(name='gardenlinux_epoch'),
        snapshot_timestamp=NamedParam(name='snapshot_timestamp'),
        cicd_cfg_name=NamedParam(name='cicd_cfg_name'),
        version=NamedParam(name='version'),
        promote_target=NamedParam(name='promote_target'),
        promote_mode=NamedParam(name='promote_mode'),
        flavourset=NamedParam(name='flavourset'),
    )

    raw_promote_task = dataclasses.asdict(promote_task)


    clone_step = steps.clone_step(
        committish=tkn.model.NamedParam(name='committish'),
        repo_dir=tkn.model.NamedParam(name='repodir'),
        git_url=tkn.model.NamedParam(name='giturl'),
    )

    clone_step_dict = dataclasses.asdict(clone_step)

    # hack: patch-in clone-step (avoid redundancy with other tasks)
    raw_build_task['spec']['steps'][0] = clone_step_dict

    with open(parsed.outfile, 'w') as f:
        yaml.safe_dump_all((raw_build_task, raw_promote_task), f)

    print(f'dumped tasks to {parsed.outfile}')
Пример #38
0
def main():
    args = parser.parse_args()

    try:
        konfigured_resources = konfigenetes(args.input_file_paths,
                                            args.resource_file_paths,
                                            args.patch_file_paths,
                                            parse_var_values(args.var_values))
    except ValueError as e:
        print('Fatal Error:\n{}'.format(e))
        sys.exit(1)

    print(yaml.safe_dump_all(konfigured_resources, explicit_start=True))
Пример #39
0
def create_new_app_yaml(source_file, app_name):
    with open(source_file, 'r') as stream:
        try:
            yaml_content = list(yaml.load_all(stream))
        except yaml.YAMLError as exc:
            raise HokusaiError("Cannot read source yaml file %s." %
                               source_file)

    for c in yaml_content:
        update_namespace(c, clean_string(app_name))

    new_namespace = OrderedDict([('apiVersion', 'v1'), ('kind', 'Namespace'),
                                 ('metadata', {
                                     'name': clean_string(app_name)
                                 })])
    yaml_content = [new_namespace] + yaml_content

    with open("hokusai/%s.yml" % app_name, 'w') as output:
        output.write(YAML_HEADER)
        yaml.safe_dump_all(yaml_content, output, default_flow_style=False)

    print_green("Created hokusai/%s.yml" % app_name)
Пример #40
0
    def FormatArtifacts(self, artifacts):
        """Formats artifacts to desired output format.

    Args:
      artifacts: a list of ArtifactDefinitions.

    Returns:
      formatted string of artifact definition.
    """
        # TODO: improve output formatting of yaml
        artifact_definitions = [artifact.AsDict() for artifact in artifacts]
        yaml_data = yaml.safe_dump_all(artifact_definitions)
        return yaml_data
Пример #41
0
def patch_manifest(cni_file, autodetection):
    """
    Patch the CNI manifest with the IP autodetection method provided
    """
    yaml.SafeDumper.org_represent_str = yaml.SafeDumper.represent_str

    def repr_str(dumper, data):
        if "\n" in data:
            return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
        return dumper.org_represent_str(data)

    yaml.add_representer(str, repr_str, Dumper=yaml.SafeDumper)

    try:
        with open(cni_file, "r", encoding="utf8") as f:
            to_remove = []
            docs = list(yaml.safe_load_all(f))
            for doc in docs:
                if not doc:
                    to_remove.append(doc)
                if doc and doc["kind"] == "DaemonSet" and doc["metadata"]["name"] == "calico-node":
                    # Reach for the containers
                    containers = doc["spec"]["template"]["spec"]["containers"]
                    for c in containers:
                        if c["name"] == "calico-node":
                            env = c["env"]
                            for variable in env:
                                if variable["name"] == "IP_AUTODETECTION_METHOD":
                                    variable["value"] = autodetection

            # remove empty yaml documents
            for d in to_remove:
                docs.remove(d)

        with open(cni_file, "w", encoding="utf8") as fout:
            yaml.safe_dump_all(docs, fout)

    except (yaml.YAMLError, TypeError) as e:
        print(e, file=sys.stderr)
Пример #42
0
    def post(self, request):
        dashboards = json.loads(request.body)
        directory = SquealySettings.get('YAML_PATH', join(settings.BASE_DIR, 'yaml'))

        if not os.path.exists(directory):
            os.makedirs(directory)

        file_name = SquealySettings.get('dashboard_filename', 'squealy_dashboard.yaml')
        full_path = join(directory, file_name)
        with open(full_path, 'w+') as f:
            f.write(yaml.safe_dump_all(dashboards, explicit_start=True, default_flow_style=False))
        f.close()
        return Response({}, status.HTTP_200_OK)
Пример #43
0
  def FormatArtifacts(self, artifacts):
    """Formats artifacts to desired output format.

    Args:
      artifacts (list[ArtifactDefinition]): artifact definitions.

    Returns:
      str: formatted string of artifact definition.
    """
    # TODO: improve output formatting of yaml
    artifact_definitions = [artifact.AsDict() for artifact in artifacts]
    yaml_data = yaml.safe_dump_all(artifact_definitions)
    return yaml_data
Пример #44
0
def write_new_data(p, reachable, unreachable, check_data, statuses,
                   ref_nodelist):
    """Write reachable and unreachable node lists to YaML file.

    :param ref_nodelist: nodelist serving as a comparision reference on evaluating cluster health
    :param namespace p: contains all command-line parameters
    :param list reachable: list of reachable nodes
    :param list unreachable: list of unreachable nodes
    :param list check_data: pre-compiled node status report
    :param dict statuses: status list for unreachable nodes
    :return: nothing
    """
    logging.debug('write_new_data() started')
    filename = "%s/check_akka_cluster-%s.yml" % (STATUS_PATH_PREFIX,
                                                 p.state_file)
    try:
        with open(filename, 'w') as yaml_file:
            yaml.safe_dump_all([(reachable, unreachable, ref_nodelist)],
                               yaml_file)
    except IOError as E:
        logging.error(
            'Could not open %s: %s. Please check file access rights.' %
            (filename, str(E)))
    if check_data is None:
        node_list = prepare_node_report(reachable, unreachable, statuses)
    else:
        node_list = check_data
    filename = "%s/check_akka_cluster-%s.last" % (STATUS_PATH_PREFIX,
                                                  p.state_file)
    try:
        with open(filename, "w") as text_file:
            for i in node_list:
                text_file.writelines("%s\n" % i)
    except IOError as E:
        logging.error(
            'Could not open %s: %s. Please check file access rights.' %
            (filename, str(E)))
    logging.debug('write_new_data() finished')
Пример #45
0
def main():
    parser = ArgumentParser(description=_PROG_HELP)
    schema_values_common.add_to_argument_parser(parser)
    parser.add_argument('--deployer_image', required=True)
    parser.add_argument('--deployer_entrypoint', default=None)
    args = parser.parse_args()

    schema = schema_values_common.load_schema(args)
    values = schema_values_common.load_values(args)
    manifests = process(schema,
                        values,
                        deployer_image=args.deployer_image,
                        deployer_entrypoint=args.deployer_entrypoint)
    print(yaml.safe_dump_all(manifests, default_flow_style=False, indent=2))
 def test_deserializer_grouped_correctly(self):
     deserializer = multiassignment_serialize.Deserializer(yaml.safe_dump_all([
         {'id': 1, 'title': 'Existing1'},
         {'title': 'New'},
         {'id': 2, 'title': 'Existing2'},
     ]), course_tag='duck1000')
     self.assertEquals(
         deserializer.deserialized_assignments_with_id,
         {1: {'id': 1, 'title': 'Existing1'}, 2: {'id': 2, 'title': 'Existing2'}}
     )
     self.assertEquals(
         deserializer.deserialized_assignments_without_id,
         [{'title': 'New'}]
     )
Пример #47
0
 def update_environments_yaml(self, key, val, provider='local'):
     """ updates environments.yaml base file """
     _env_yaml = os.path.expanduser("~/.juju/environments.yaml")
     if os.path.exists(_env_yaml):
         with open(_env_yaml) as f:
             _env_yaml_raw = f.read()
             env_yaml = yaml.load(_env_yaml_raw)
     else:
         raise ConfigException("~/.juju/environments.yaml unavailable, "
                               "is juju bootstrapped?")
     if key in env_yaml['environments'][provider]:
         env_yaml['environments'][provider][key] = val
     with open(_env_yaml, 'w') as f:
         _env_yaml_raw = yaml.safe_dump_all(env_yaml)
         f.write(_env_yaml_raw)
Пример #48
0
Файл: yaml.py Проект: google/grr
def DumpMany(objs):
  """Stringifies a sequence of Python objects to a multi-document YAML.

  Args:
    objs: An iterable of Python objects to convert to YAML.

  Returns:
    A multi-document YAML representation of the given objects.
  """
  precondition.AssertIterableType(objs, object)

  text = yaml.safe_dump_all(objs, default_flow_style=False, allow_unicode=True)

  if compatibility.PY2:
    text = text.decode("utf-8")

  return text
def main(argv):
    o = RewriteOptions()
    try:
        o.parseOptions(argv[1:])
    except UsageError as e:
        raise SystemExit(e)

    docs = freeze(list(safe_load_all(stdin)))

    if o["git-tag"] is not None:
        tag = check_output(["git", "rev-parse", "--short", o["git-tag"]]).strip()
        docs = rewrite_tags(docs, tag)
    elif o["tag"] is not None:
        docs = rewrite_tags(docs, o["tag"])

    if o["no-volumes"]:
        docs = stub_all_volumes(docs)

    stdout.write(safe_dump_all(thaw(docs)))
    def test_deserializer_sync(self):
        assignment1 = coremodels.Assignment.objects.create(
            title='Existing1', text='text1')
        assignment2 = coremodels.Assignment.objects.create(
            title='Existing2', text='text2')
        duck1000tag = coremodels.Tag.objects.create(tag='duck1000')
        assignment1.tags.add(duck1000tag)
        assignment2.tags.add(duck1000tag)
        deserializer = multiassignment_serialize.Deserializer(yaml.safe_dump_all([
            {'id': assignment1.id, 'title': 'Updated1', 'text': 'updatedText1',
             'tags': ['oblig1']},
            {'title': 'New1', 'text': 'newText1'},
            {'title': 'New2', 'text': 'newText2',
             'tags': ['duck1000', 'oblig1'],
             'solution': 'newSolution'},
            {'id': assignment2.id, 'title': 'Updated2', 'text': 'updatedText2',
             'tags': ['duck1000', 'oblig2'],
             'solution': 'updatedSolution'},
        ]), course_tag='duck1000')
        deserializer.sync()

        assignment1 = coremodels.Assignment.objects.get(id=assignment1.id)
        assignment2 = coremodels.Assignment.objects.get(id=assignment2.id)
        newassignment1 = coremodels.Assignment.objects.get(title='New1')
        newassignment2 = coremodels.Assignment.objects.get(title='New2')

        self.assertEquals(assignment1.title, 'Updated1')
        self.assertEquals(assignment1.solution, '')
        self.assertEquals(assignment2.title, 'Updated2')
        self.assertEquals(assignment2.solution, 'updatedSolution')
        self.assertEquals(
            set([tagobject.tag for tagobject in assignment1.tags.all()]),
            set(['duck1000', 'oblig1']))
        self.assertEquals(
            set([tagobject.tag for tagobject in assignment2.tags.all()]),
            set(['duck1000', 'oblig2']))
        self.assertEquals(
            set([tagobject.tag for tagobject in newassignment1.tags.all()]),
            set(['duck1000']))
        self.assertEquals(
            set([tagobject.tag for tagobject in newassignment2.tags.all()]),
            set(['duck1000', 'oblig1']))
Пример #51
0
def serialize(assignments):
    """
    Serialize an iterable of assignments.

    Params:
        assignments (iterable): Iterable of :class:`trix.trix_core.models.Assignment` objects
    """
    serializable_assignments = []
    for assignment in assignments:
        serializable_assignment = YamlMapOrderedDict([
            ('id', assignment.id),
            ('title', assignment.title),
            ('tags', [tag.tag for tag in assignment.tags.all()]),
            ('hidden', assignment.hidden),
            ('text', MarkdownString(assignment.text)),
        ])
        if assignment.solution:
            serializable_assignment['solution'] = MarkdownString(assignment.solution)
        serializable_assignments.append(serializable_assignment)
    return yaml.safe_dump_all(serializable_assignments, allow_unicode=True)
Пример #52
0
  def add_page(page):
    """Appends a page to the bottom of the sidebar.

    Args:
      page: Page to append

    """
    sidebar = Sidebar.load()

    if sidebar is None:
      sidebar = Sidebar(yaml="---\nheading: ''\n\n")

    sidebar_documents = list(yaml.load_all(sidebar.yaml))
    if sidebar_documents:
      last_document = sidebar_documents[-1]
      if not last_document.has_key('pages'):
        last_document['pages'] = []
      last_document['pages'].append({'id': page.key().id(),
                                     'title': page.title})

    sidebar.yaml = yaml.safe_dump_all(sidebar_documents)
    sidebar.put()
Пример #53
0
  def CompileReport(self, mediator):
    """Compiles an analysis report.

    Args:
      mediator (AnalysisMediator): mediates interactions between analysis
          plugins and other components, such as storage and dfvfs.

    Returns:
      AnalysisReport: report.
    """
    # TODO: move YAML representation out of plugin and into serialization.
    lines_of_text = []
    if self._output_format == 'yaml':
      lines_of_text.append(
          yaml.safe_dump_all(self._service_collection.services))
    else:
      lines_of_text.append('Listing Windows Services')
      for service in self._service_collection.services:
        lines_of_text.append(self._FormatServiceText(service))
        lines_of_text.append('')

    lines_of_text.append('')
    report_text = '\n'.join(lines_of_text)
    return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
Пример #54
0
  def CompileReport(self, analysis_mediator):
    """Compiles an analysis report.

    Args:
      analysis_mediator: The analysis mediator object (instance of
                         AnalysisMediator).

    Returns:
      The analysis report (instance of AnalysisReport).
    """
    # TODO: move YAML representation out of plugin and into serialization.
    lines_of_text = []
    if self._output_format == u'yaml':
      lines_of_text.append(
          yaml.safe_dump_all(self._service_collection.services))
    else:
      lines_of_text.append(u'Listing Windows Services')
      for service in self._service_collection.services:
        lines_of_text.append(self._FormatServiceText(service))
        lines_of_text.append(u'')

    lines_of_text.append(u'')
    report_text = u'\n'.join(lines_of_text)
    return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
Пример #55
0
shell_fmt = "shell set -x; ../../../rp/rpki-rtr/rpki-rtr cronjob rcynic-data/authenticated && tar %svf rpki-rtr.tar *.[ai]x*.v*"
shell_first = shell_fmt % "c"
shell_next  = shell_fmt % "u"

sleeper = "sleep 30"

docs = [dict(name         = root,
             valid_for    = "1y",
             kids         = [kid.declare for kid in kids])]

docs.append([shell_first,
             sleeper])

gym = kids[50:70]

for kid in gym:
    docs.append([shell_next,
                 kid.del_routercert,
                 sleeper])

for kid in gym:
    docs.append([shell_next,
                 kid.add_routercert,
                 sleeper])

print '''\
# This configuration was generated by a script.  Edit at your own risk.
'''

print yaml.safe_dump_all(docs, default_flow_style = False, allow_unicode = False)
Пример #56
0
#!/usr/bin/env python

import yaml

def get_tags(d):
	if d and 'tags' in d and d['tags']:
		tags=[t.strip() for t in d['tags'].split(",") if len(t.strip())>0]
		if len(tags)>0:
			return tags
	return None

open("meneame.tags.yml", 'w').close()
with open("meneame.yml", 'r') as stream:
    try:
        docs=yaml.load_all(stream)
	for d in docs:
		tags=get_tags(d)
		if tags:
			o={
				'published': d['published'],
				'tags': tags
			}
			with open('meneame.tags.yml', 'a') as outfile:
				yaml.safe_dump_all([o], outfile) #, default_flow_style=False,allow_unicode=True, default_style=None)
				outfile.write("---\n")
    except yaml.YAMLError as exc:
        print(exc)
Пример #57
0
def toyaml(*records, **kw):
    if kw: records += (kw,)
    # SafeDumper will not emit python/foo tags for unicode or objects
    return yaml.safe_dump_all(records, **DEFAULT_OPTIONS)
Пример #58
0
def dump_yaml(data):
    return yaml.safe_dump_all(data, default_flow_style=False, indent=3,
                         line_break=True) + '...'
Пример #59
0
def write_yaml(*records, **options):
    options = merge({}, DEFAULT_OPTIONS, options)
    return yaml.safe_dump_all(records, **options)
Пример #60
0
def main(argv=None):
    """Command line interface for running grocery simulations.

    This function handles command line parsing, configuration setup, and
    launching simulations.

    """
    parser = argparse.ArgumentParser()
    version = get_version()
    parser.add_argument(
        '--version', '-V', action='version', version=version,
        help='Show version and exit.')
    parser.add_argument(
        '--named', '-n', metavar='GROUP', dest='named_configs',
        action='append', default=[],
        help='Use named configuration %(metavar)s.')
    parser.add_argument(
        '--set', '-s', nargs=2, metavar=('KEY', 'VALUE'),
        action='append', default=[], dest='config_overrides',
        help='Override configuration KEY with VALUE expression.')
    parser.add_argument(
        '--factor', '-f', nargs=2, metavar=('KEYS', 'VALUES'),
        action='append', default=[], dest='factors',
        help='Add factorial KEYS with VALUES list of value expressions.')
    parser.add_argument(
        '--config', '-c', metavar='YAML', type=argparse.FileType('r'),
        action='append', default=[], dest='config_files',
        help='Read configuration from YAML file.')
    parser.add_argument(
        '--print-config', action='store_true',
        help='Print configuration and exit.')
    parser.add_argument(
        '--print-named', action='store_true',
        help='Print named configuration groups and exit.')

    extra_argv = shlex.split(os.environ.get('SB_EXTRA', ''))

    if argv is None:
        argv = sys.argv[1:]

    args = parser.parse_args(extra_argv + argv)

    if len(args.config_files) > 1 and args.factors:
        parser.error('argument --factor/-f: not allowed with multiple '
                     '--config/-c arguments')

    if args.print_named:
        print_named(named, sys.stdout)
        parser.exit()

    configs = []

    try:
        if args.config_files:
            named_overrides = named.resolve(*args.named_configs)
            for config_file in args.config_files:
                config = named.resolve('default')
                apply_user_config(config, parse_config_file(config_file))
                config.update(named_overrides)
                apply_user_overrides(config, args.config_overrides)
                configs.append(config)
        else:
            config = named.resolve('default', *args.named_configs)
            apply_user_overrides(config, args.config_overrides)
            configs.append(config)

        factors = parse_user_factors(configs[0], args.factors)
    except ConfigError as e:
        parser.error(str(e))

    if args.print_config:
        yaml.safe_dump_all(configs, stream=sys.stdout)
        parser.exit()

    try:
        if len(configs) == 1:
            config = configs[0]
            if factors:
                results = simulate_factors(config, factors, Top, Environment)
                return check_errors(results)
            else:
                simulate(config, Top, Environment)
        else:
            results = simulate_many(configs, Top, Environment)
            return check_errors(results)
    except KeyboardInterrupt:
        print("\nInterrupted by user", file=sys.stderr)
        return 1