예제 #1
0
    def __enter__(self):
        if isinstance(self.fn_or_stream, str):
            # Open the named file.
            try:
                self.stream = open(self.fn_or_stream, "r+")
            except FileNotFoundError:
                if not isinstance(self.default, (list, dict)):
                    # If there is no default and the file
                    # does not exist, re-raise the exception.
                    raise
                else:
                    # Create a new file holding the default,
                    # then seek back to the beginning so
                    # we can read it below.
                    self.stream = open(self.fn_or_stream, "w+")
                    rtyaml.dump(self.default, self.stream)
                    self.stream.seek(0)

            self.close_on_exit = True
        else:
            # Use the given stream.
            self.stream = self.fn_or_stream
        # Parse stream and return data.
        self.data = rtyaml.load(self.stream)
        return self.data
예제 #2
0
def process(selection, template_file, template_path, output_path, logger):
    logger.print("Checking {}".format(template_file))
    try:
        with open(template_file) as fp:
            output_file = rewrite(template_file, template_path, output_path)
            output_file_p = Path(output_file)
            if not output_file_p.parent.is_dir():
                output_file_p.parent.mkdir(parents=True, exist_ok=True)

            if template_file.name == 'component.yaml':
                logger.print("  Copying {} to {}".format(
                    template_file, output_file))
                shutil.copy(template_file, output_file)
            else:
                object = rtyaml.load(fp)
                object = select_controls(object, selection)
                controls = sorted(control['control_key']
                                  for control in object['satisfies'])
                logger.print("  Writing controls to {}".format(output_file))
                for control in controls:
                    logger.print("    {}".format(control))
                with open(output_file, "w") as out:
                    rtyaml.dump(object, out)

    except Exception as e:
        print("Exception {} processing {}".format(e, template_file))
예제 #3
0
파일: govinfo.py 프로젝트: d0tN3t/congress
def update_sitemap(url, current_lastmod, how_we_got_here, options):
    """Updates the local cache of a sitemap file."""

    # Skip if the year or congress flags are set and this sitemap is
    # not for that year or Congress.
    if should_skip_sitemap(url, options):
        return []

    # For debugging, remember what URLs we are stepping through.
    how_we_got_here = how_we_got_here + [url]

    # Get the file paths to cache:
    # * the sitemap XML for future runs
    # * its <lastmod> date (which comes from the parent sitemap) so we know if we need to re-download it now
    # * the <lastmod> dates of the packages listed in this sitemap so we know if we need to re-download any package files
    cache_file = get_sitemap_cache_file(url)
    cache_file = os.path.join("govinfo/sitemap", cache_file, "sitemap.xml")
    lastmod_cache_file = cache_file.replace(".xml", "-lastmod.yaml")
    lastmod_cache_file = os.path.join(utils.cache_dir(), lastmod_cache_file)
    if not os.path.exists(lastmod_cache_file):
        lastmod_cache = { }
    else:
        with open(lastmod_cache_file) as f:
            lastmod_cache = rtyaml.load(f)

    try:
        return update_sitemap2(url, current_lastmod, how_we_got_here, options, lastmod_cache, cache_file)
    finally:
        # Write the updated last modified dates to disk so we know the next time whether
        # we need to fetch the files. If we didn't download anything, no need to write an
        # empty file.
        with utils.NoInterrupt():
            with open(lastmod_cache_file, "w") as f:
                rtyaml.dump(lastmod_cache, f)
예제 #4
0
def yaml_dump(data, path):
    rtyaml.dump(data, open(path, "w"))

    # Store in a pickled file for fast access later.
    import cPickle as pickle, hashlib
    h = hashlib.sha1(open(path).read()).hexdigest()
    pickle.dump({ "hash": h, "data": data }, open(path+".pickle", "w"))
예제 #5
0
파일: import.py 프로젝트: GovReady/easyscap
def process_xccdf_group(xccdf, xccdf_path, outdir, rule_profiles, group_path, drop_id_prefix):
	# Process all of the rules here.
	rules = []
	for rule in xccdf.findall("{http://checklists.nist.gov/xccdf/1.2}Rule"):
		rules.append(process_rule(rule, rule_profiles, xccdf_path, group_path, outdir, drop_id_prefix))
	
	# Process all of the groups here
	groups = []
	for group in xccdf.findall("{http://checklists.nist.gov/xccdf/1.2}Group"):
		# a nice directory name for the group
		g = group.get('id')
		g = re.sub('^xccdf_org\.(.*)\.content_group_(.*)$', r'\1_\2', g)
		if drop_id_prefix and g.startswith(drop_id_prefix):
			g = g[len(drop_id_prefix):]
			child_drop_id_prefix = drop_id_prefix
		elif "_" in g:
			child_drop_id_prefix = g.split("_")[0] + "_"
		else:
			child_drop_id_prefix = None
		groups.append(g)

		process_xccdf_group(group, xccdf_path, outdir, rule_profiles, group_path + [g], child_drop_id_prefix)

	groupdict = collections.OrderedDict([
		("id", xccdf.get("id")),
		("title", xccdf.find("{http://checklists.nist.gov/xccdf/1.2}title").text),
		("description", pandoc(xccdf.find("{http://checklists.nist.gov/xccdf/1.2}description"), 'html', 'markdown')),
		("rules", rules),
		("subgroups", groups),
		])
	fn = os.path.join(*([outdir] + group_path + ['group.yaml']))
	os.makedirs(os.path.dirname(fn), exist_ok=True)
	with open(fn, "w") as f:
		rtyaml.dump(groupdict, f)
예제 #6
0
    def save_as(self, base_dir):
        "Save an OpenControl repo in a new location"
        root = self.dict(exclude={"standards", "components", "systems"})
        root["certifications"] = []
        for cert in self.certifications:
            cert_storage = cert.storage_path(base_dir)
            cert_storage.parent.mkdir(parents=True, exist_ok=True)
            with cert_storage.open("w") as cert_file:
                cert_file.write(rtyaml.dump(cert.dict()))
                FILE_SIGNAL.send(self, operation="write", path=cert_storage)
                root["certifications"].append(str(cert.storage_path()))

        root["standards"] = []
        for std in self.standards.values():
            std_storage = std.storage_path(base_dir)
            std_storage.parent.mkdir(parents=True, exist_ok=True)
            with std_storage.open("w") as std_file:
                std_file.write(rtyaml.dump(std.dict()))
                FILE_SIGNAL.send(self, operation="write", path=std_storage)
                root["standards"].append(str(std.storage_path()))

        root["components"] = [str(c.storage_path()) for c in self.components]

        root_storage = self.storage_path(base_dir)
        with root_storage.open("w") as root_file:
            root_file.write(rtyaml.dump(root))
            FILE_SIGNAL.send(self, operation="write", path=root_storage)

        for c in self.components:
            component_path = c.storage_path(base_dir)
            component_path.parent.mkdir(parents=True, exist_ok=True)

            with component_path.open("w") as component_file:
                component_file.write(rtyaml.dump(c.dict()))
            FILE_SIGNAL.send(self, operation="write", path=component_path)
예제 #7
0
def check_id_types(legislator, seen_ids, is_legislator, context):
    for key, value in legislator["id"].items():
        # Check that the id key is one we know about.
        if key not in id_types:
            error(context, rtyaml.dump({key: value}) + " is not a valid id.")

        # Check that the data type is correct.
        elif not isinstance(value, id_types[key]):
            error(context,
                  rtyaml.dump({key: value}) + " has an invalid data type.")

        else:
            # Check that the ID isn't duplicated across legislators.
            # Since some values are lists of IDs, check the elements.
            # Just make a list of ID occurrences here -- we'll check
            # uniqueness at the end.
            if not isinstance(value, list): value = [value]
            for v in value:
                seen_ids.setdefault((key, v), []).append(legislator)

    if is_legislator:
        # Check that every legislator has ids of the required types.
        for id_type in id_required:
            if id_type not in legislator["id"]:
                error(context, "Missing %s id." % id_type)
예제 #8
0
def yaml_dump(data, path):
    rtyaml.dump(data, open(path, "w"))

    # Store in a pickled file for fast access later.
    import cPickle as pickle, hashlib
    h = hashlib.sha1(open(path).read()).hexdigest()
    pickle.dump({"hash": h, "data": data}, open(path + ".pickle", "w"))
예제 #9
0
파일: govinfo.py 프로젝트: syyunn/congress
def update_sitemap(url, current_lastmod, how_we_got_here, options):
    """Updates the local cache of a sitemap file."""

    # Skip if the year or congress flags are set and this sitemap is
    # not for that year or Congress.
    if should_skip_sitemap(url, options):
        return []

    # For debugging, remember what URLs we are stepping through.
    how_we_got_here = how_we_got_here + [url]

    # Get the file paths to cache:
    # * the sitemap XML for future runs
    # * its <lastmod> date (which comes from the parent sitemap) so we know if we need to re-download it now
    # * the <lastmod> dates of the packages listed in this sitemap so we know if we need to re-download any package files
    cache_file = get_sitemap_cache_file(url)
    cache_file = os.path.join("govinfo/sitemap", cache_file, "sitemap.xml")
    lastmod_cache_file = cache_file.replace(".xml", "-lastmod.yaml")
    lastmod_cache_file = os.path.join(utils.cache_dir(), lastmod_cache_file)
    if not os.path.exists(lastmod_cache_file):
        lastmod_cache = {}
    else:
        with open(lastmod_cache_file) as f:
            lastmod_cache = rtyaml.load(f)

    try:
        return update_sitemap2(url, current_lastmod, how_we_got_here, options,
                               lastmod_cache, cache_file)
    finally:
        # Write the updated last modified dates to disk so we know the next time whether
        # we need to fetch the files. If we didn't download anything, no need to write an
        # empty file.
        with utils.NoInterrupt():
            with open(lastmod_cache_file, "w") as f:
                rtyaml.dump(lastmod_cache, f)
예제 #10
0
def check_name(name, context, is_other_names=False):
    # Check for required keys and data types of the values.
    for key, value in name.items():
        if key in ("start", "end") and is_other_names:
            if not isinstance(value, str):
                error(context,
                      rtyaml.dump({key: value}) + " has an invalid data type.")
        elif key not in name_keys:
            error(context, "%s is not a valid key in name." % key)
        elif key in ("first", "last"):
            # These are required.
            if not isinstance(value, str):
                error(context,
                      rtyaml.dump({key: value}) + " has an invalid data type.")
        else:
            # These can be set explicitly to None, but maybe we should just remove
            # those keys then.
            if not isinstance(value, (str, type(None))):
                error(context,
                      rtyaml.dump({key: value}) + " has an invalid data type.")

    # If a person as a first initial only, they should also have a middle name.
    # (GovTrack relies on this to generate name strings.)
    if isinstance(name.get("first"), str) and len(
            name["first"]) == 2 and name["first"].endswith(
                ".") and not name.get("middle"):
        error(
            context,
            rtyaml.dump(name) +
            " is missing a middle name to go with its first initial.")
예제 #11
0
def yaml_dump(data, path):
    # open for reading as well as writing
    rtyaml.dump(data, open(path, "r+"))

    # Store in a pickled file for fast access later.
    import pickle as pickle, hashlib
    h = hashlib.sha1(open(path, 'rb').read()).hexdigest()
    pickle.dump({ "hash": h, "data": data }, open(path+".pickle", "wb"))
예제 #12
0
 def __exit__(self, *exception):
     # Truncate stream and write new data.
     self.stream.seek(0)
     self.stream.truncate()
     rtyaml.dump(self.data, self.stream)
     # Close stream if we opened it.
     if getattr(self, "close_on_exit", False):
         self.stream.close()
def download_dc_register_notice2(noticeId, page):
    # Read page.
    url = page.geturl()
    page = page.read().decode("utf8")

    # Parse the page.
    dom = lxml.html.fromstring(page)

    # Find metadata elements --- these elements all have id="MainContent_lbl...".
    metadata = {}
    for node in dom.xpath("//*[@id]"):
        m = re.match(r"MainContent_(lbl|lnk)([A-Za-z].*)", node.get("id"))
        if m:
            id = m.group(2)
            if id in ("SubCategory", "EffectiveDateLabel"):
                # these are actually labels and not values
                continue
            inner_text = (node.text or '') + ''.join(
                etree.tostring(e) for e in node)
            inner_text = re.sub(r'\s+', ' ',
                                inner_text)  # cleanse HTML whitespace
            metadata[id] = inner_text

    if not metadata or metadata['Subject'] == "":
        raise ValueError("Subject is empty? " + noticeId)

    # Follow the "View text" link to get the binary content of the notice document.
    document = download_postback(url, 'ctl00$MainContent$lnkNoticeFile', page)

    # Update the metadata with the response headers of the download.
    metadata["textHttpHeaders"] = dict(document.info())

    # Save the metadata.
    os.makedirs("notices", exist_ok=True)
    with open("notices/" + noticeId + ".yaml", "w") as f:
        rtyaml.dump(metadata, f)

    # Check that there wasn't an Oops error downloading the blob. If there was,
    # simply don't write it to disk. Similarly if we got a blank document.
    document = document.read()
    if b"<h3> Oops!!  An Error Occurred." in document:
        return metadata
    if len(document) == 0:
        return metadata

    # Save the document to disk.
    with open("notices/" + noticeId + ".blob", "wb") as f:
        f.write(document)

    return metadata
예제 #14
0
def update_component_control(controlimpl):
    # Clean the inputs. Update controlimpl so the caller has the actual values we saved here.
    controlimpl["narrative"] = clean_text(controlimpl["narrative"])
    if controlimpl["implementation_status"]:
        controlimpl["implementation_status"] = clean_text(
            controlimpl["implementation_status"])

    # The control is defined in the component.yaml file given in controlimpl["source_file"].
    # Open that file for editing, find the control record, update it, and return.
    with open(controlimpl["source_file"], "r+", encoding="utf8") as f:
        # Parse the content.
        data = rtyaml.load(f)

        # Look for a matching control entry.
        for control in data["satisfies"]:
            # Skip over entries that are strings -- they hold (OpenControl non-conformant) filenames.
            if not isinstance(control, dict):
                continue

            if control["standard_key"] == controlimpl["standard"]["id"] \
              and control["control_key"] == controlimpl["control"]["id"]:

                for narrative_part in control.get("narrative", []):
                    if narrative_part.get("key") == controlimpl.get(
                            "control_part"):

                        # Found the right entry. Update the fields.

                        narrative_part["text"] = controlimpl["narrative"]

                        # Store implementation_status here. In OpenControl there is
                        # a `implementation_statuses` on the control. But our data
                        # model has a single implementation_status per control *part*.
                        # If the implementation status is cleared, remove the key.
                        if controlimpl["implementation_status"]:
                            narrative_part[
                                "implementation_status"] = controlimpl[
                                    "implementation_status"]
                        elif "implementation_status" in narrative_part:
                            del narrative_part["implementation_status"]

                        # Write back out to the data files.
                        f.seek(0)
                        f.truncate()
                        rtyaml.dump(data, f)

                        return True

    return False
예제 #15
0
def write_custom_dns_config(config, env):
	# We get a list of (qname, rtype, value) triples. Convert this into a
	# nice dictionary format for storage on disk.
	from collections import OrderedDict
	config = list(config)
	dns = OrderedDict()
	seen_qnames = set()

	# Process the qnames in the order we see them.
	for qname in [rec[0] for rec in config]:
		if qname in seen_qnames: continue
		seen_qnames.add(qname)

		records = [(rec[1], rec[2]) for rec in config if rec[0] == qname]
		if len(records) == 1 and records[0][0] == "A":
			dns[qname] = records[0][1]
		else:
			dns[qname] = OrderedDict()
			seen_rtypes = set()

			# Process the rtypes in the order we see them.
			for rtype in [rec[0] for rec in records]:
				if rtype in seen_rtypes: continue
				seen_rtypes.add(rtype)

				values = [rec[1] for rec in records if rec[0] == rtype]
				if len(values) == 1:
					values = values[0]
				dns[qname][rtype] = values

	# Write.
	config_yaml = rtyaml.dump(dns)
	with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), "w") as f:
		f.write(config_yaml)
예제 #16
0
파일: merge.py 프로젝트: allaud/yaml_merge
def merge(old, new, ask_user=False, warn_user=False):
    try:
        old_dict = yaml.load(old) or {}
        new_dict = yaml.load(new) or {}
    except Exception as e:
        print('Cannot parse yaml')
        sys.exit(-1)

    merged = OrderedDict()
    for key, value in new_dict.items():
        old_value = old_dict.get(key)
        if old_value is not None:
            if ask_user:
                value = _choose_value(key, old_value, value)
            else:
                value = old_value
        merged.update({key: value})

    data_from_comments = load_from_comments(new)
    for key, value in data_from_comments.items():
        old_value = old_dict.get(key)
        if old_value is None:
            continue
        if warn_user:
            sys.stderr.write("Uncommenting: %s \n" % key)
        merged.update({key: old_value})

    return old, new, yaml.dump(merged)
예제 #17
0
def write_custom_dns_config(config, env):
    # We get a list of (qname, rtype, value) triples. Convert this into a
    # nice dictionary format for storage on disk.
    from collections import OrderedDict
    config = list(config)
    dns = OrderedDict()
    seen_qnames = set()

    # Process the qnames in the order we see them.
    for qname in [rec[0] for rec in config]:
        if qname in seen_qnames: continue
        seen_qnames.add(qname)

        records = [(rec[1], rec[2]) for rec in config if rec[0] == qname]
        if len(records) == 1 and records[0][0] == "A":
            dns[qname] = records[0][1]
        else:
            dns[qname] = OrderedDict()
            seen_rtypes = set()

            # Process the rtypes in the order we see them.
            for rtype in [rec[0] for rec in records]:
                if rtype in seen_rtypes: continue
                seen_rtypes.add(rtype)

                values = [rec[1] for rec in records if rec[0] == rtype]
                if len(values) == 1:
                    values = values[0]
                dns[qname][rtype] = values

    # Write.
    config_yaml = rtyaml.dump(dns)
    with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), "w") as f:
        f.write(config_yaml)
예제 #18
0
def create_system(organization_name, system_name, description, repo_path):
    """Create a new system and its repository and return path to repo on file system"""

    # make repo directory
    if os.path.exists(repo_path):
        print("Path {} exists".format(repo_path))
    else:
        os.makedirs(repo_path)
        print("Path {} created".format(repo_path))

    # get default opencontrol.yaml configuration
    cfg = get_new_config(system_name, organization_name, description)
    print(cfg["name"])
    print("\npreparing system dir: {}".format(system_name))

    # create various directories
    os.makedirs(os.path.join(repo_path, "components"))
    os.makedirs(os.path.join(repo_path, "standards"))
    os.makedirs(os.path.join(repo_path, "certifications"))
    os.makedirs(os.path.join(repo_path, "outputs"))

    # create opencontrol.yaml config file
    with open(os.path.join(repo_path, "opencontrol.yaml"), 'w') as outfile:
        outfile.write(rtyaml.dump(cfg))
        print("wrote file: {}\n".format(
            os.path.join(repo_path, "opencontrol.yaml")))

    # populate reference directories from reference
    shutil.copyfile(
        os.path.join("ref", "standards", "NIST-SP-800-53-rev4.yaml"),
        os.path.join(repo_path, "standards", "NIST-SP-800-53-rev4.yaml"))
    print("wrote file: {}\n".format(
        os.path.join(repo_path, "standards", "NIST-SP-800-53-rev4.yaml")))
    shutil.copyfile(os.path.join("ref", "standards", "opencontrol.yaml"),
                    os.path.join(repo_path, "standards", "opencontrol.yaml"))
    print("wrote file: {}\n".format(
        os.path.join(repo_path, "standards", "opencontrol.yaml")))
    # shutil.copyfile(os.path.join("ref", "standards", "hipaa-draft.yaml"), os.path.join(repo_path, cfg["standards"][0], "hipaa-draft.yaml"))
    # print("wrote file: {}\n".format(os.path.join(repo_path, cfg["standards"][0], "hipaa-draft.yaml")))
    shutil.copyfile(
        os.path.join("ref", "certifications", "fisma-low-impact.yaml"),
        os.path.join(repo_path, "certifications", "fisma-low-impact.yaml"))
    print("wrote file: {}\n".format(
        os.path.join(repo_path, "certifications", "fisma-low-impact.yaml")))

    # make stub README.md file
    with open(os.path.join(repo_path, "README.md"), 'w') as outfile:
        outfile.write(
            "Machine readable representation of 800-53 control implementations for {}.\n\n# Notes\n\n"
            .format(system_name))
        print("wrote file: {}\n".format(os.path.join(repo_path, "README.md")))

    # append repo path to repos.conf
    # TODO - read and clean repos.conf and then append;use clean_text function?
    with open("repos.conf", 'a') as outfile:
        outfile.write("\n{}".format(repo_path))
        print("appended {} to file: repos.conf\n".format(repo_path))

    # Now return the path to the repository
    return repo_path
예제 #19
0
     def make_widget(key, label, widget, help_text, show_for_types):
         if key != "_remaining_":
             if value is not None and key in value:
                 val = value[key]
                 del value[
                     key]  # only the unrecognized keys are left at the end
             else:
                 val = ""
         elif value is None:
             # Nothing unrecognized.
             val = ""
         else:
             # Serialize unrecognized keys in YAML.
             import rtyaml
             val = rtyaml.dump(value)
         return """
 	    	<div style="clear: both; padding-bottom: .75em" class="{}">
 	        	<label for="id_{}_{}">{}:</label>
 	    		{}
 	    		<p class="help">{}</p>
 	    	</div>""".format(
             ("show_if_type " + " ".join(
                 ("show_if_type_" + s)
                 for s in show_for_types) if show_for_types else ""),
             escape_html(name),
             key,
             escape_html(label),
             widget.render(name + "_" + key, val),
             escape_html(help_text or ""),
         )
예제 #20
0
def create_pledge_donation(pledge, recipients):
	# Pledge execution --- make a credit card charge and return
	# the DE donation record and other details.

	# Compute the amount to charge the user. We can only make whole-penny
	# contributions, so the exact amount of the charge may be less than
	# what the user pledged. recip_contribs is the line item amounts for
	# each recipient as a tuple of (recipient, action, amount).
	recip_contribs, fees, total_charge = compute_charge(pledge, recipients)

	# Prepare line items for the API.
	line_items = []

	# Create the line item for fees.
	line_items.append({
		"recipient_id": DemocracyEngineAPI.fees_recipient_id,
		"amount": DemocracyEngineAPI.format_decimal(fees),
		})

	# Create the line items for campaign recipients.
	for action, recipient_type, recipient, amount in recip_contribs:
		line_items.append({
			"recipient_id": recipient.de_id,
			"amount": DemocracyEngineAPI.format_decimal(amount),
			})

	# Prepare the donation record for authorization & capture.
	de_don_req = create_de_donation_basic_dict(pledge)
	de_don_req.update({
		# billing info
		"token": pledge.profile.extra['billing']['de_cc_token'],

		# line items
		"line_items": line_items,

		# reported to the recipient
		"source_code": "",
		"ref_code": "",

		# tracking info for internal use
		"aux_data": rtyaml.dump({ # DE will gives this back to us encoded as YAML, but the dict encoding is ruby-ish so to be sure we can parse it, we'll encode it first
			"trigger": pledge.trigger.id,
			"campaign": pledge.via_campaign.id,
			"pledge": pledge.id,
			"user": pledge.user.id if pledge.user else None,
			"email": pledge.get_email(),
			"pledge_created": pledge.created,
			})
		})

	# Sanity check the total.
	if sum(decimal.Decimal(li['amount'].replace("$", "")) for li in de_don_req['line_items']) \
		!= total_charge:
		raise ValueError("Sum of line items does not match total charge.")
	
	# Create the 'donation', which creates a transaction and performs cc authorization.
	don = DemocracyEngineAPI.create_donation(de_don_req)

	# Return.
	return (recip_contribs, fees, total_charge, don)
예제 #21
0
def parse_wkd_list():
    removed = []
    uidlist = []
    with open(wkdpath, "a+") as wkdfile:
        wkdfile.seek(0)
        config = {}
        try:
            config = rtyaml.load(wkdfile)
            if (type(config) != dict):
                config = {}
        except:
            config = {}

        writeable = copy.deepcopy(config)
        for u, k in config.items():
            try:
                key = email_compatible_with_key(u, k)
                # Key is compatible

                writeable[
                    u] = key.fpr  # Swap with the full-length fingerprint (if somehow this was changed by hand)
                uidlist.append((u, key.fpr))
            except:
                writeable.pop(u)
                removed.append((u, k))
        # Shove the updated configuration back in the file
        wkdfile.truncate(0)
        wkdfile.write(rtyaml.dump(writeable))
    return (removed, uidlist)
예제 #22
0
def check_name(name, is_other_names=False):
    for key, value in name.items():
        if key in ("start", "end") and is_other_names:
            if not isinstance(value, str):
                error(rtyaml.dump({key: value}) + " has an invalid data type.")
        elif key not in name_keys:
            error("%s is not a valid key in name." % key)
        elif key in ("first", "last"):
            # These are required.
            if not isinstance(value, str):
                error(rtyaml.dump({key: value}) + " has an invalid data type.")
        else:
            # These can be set explicitly to None, but maybe we should just remove
            # those keys then.
            if not isinstance(value, (str, type(None))):
                error(rtyaml.dump({key: value}) + " has an invalid data type.")
예제 #23
0
파일: export.py 프로젝트: GovReady/easyscap
def process_test(test, oval_nodes, rule_id, test_index, var_map):
    # Create an OVAL definition for the test.

    # The object and state have to be moved into their own parts.
    for key, idslug in (("object", "obj"), ("state", "ste")):
        if key not in test:
            continue

        # Generate an id.
        oval_nodes[key + "_count"] += 1
        test[key]["id"] = "oval:easyscap_generated:%s:%d" % (idslug, oval_nodes[key + "_count"])

        # Generate an implicit type.
        if "type" not in test[key]:
            if not test.get("type", "").endswith("_test"):
                raise ValueError("Invalid test type: " + test)
            test[key]["type"] = test["type"][:-4] + key

        dict_to_node(oval_nodes[key + "s"], test[key], var_map=var_map, oval_nodes=oval_nodes)

        test[test["type"].split(":")[0] + ":" + key] = {key + "_ref": test[key]["id"]}

        del test[key]

        # Convert the rest.
    try:
        node = dict_to_node(oval_nodes["tests"], test, var_map=var_map, oval_nodes=oval_nodes)
        node.set("id", "oval:%s:tst:%d" % (rule_id, test_index + 1))
    except Exception as e:
        raise Exception("Error processing test (%s) in (%s)" % (str(e), rtyaml.dump(test)))
    return node
예제 #24
0
def check_id_types(legislator):
    for key, value in legislator["id"].items():
        # Check that the id key is one we know about.
        if key not in id_types:
            error(rtyaml.dump({key: value}) + " is not a valid id.")

        # Check that the data type is correct.
        elif not isinstance(value, id_types[key]):
            error(rtyaml.dump({key: value}) + " has an invalid data type.")

        else:
            # Check that the ID isn't duplicated across legislators.
            # Since some values are lists of IDs, check the elements.
            if not isinstance(value, list): value = [value]
            for v in value:
                if (key, v) in seen_ids:
                    error(rtyaml.dump({key: v}) + " is duplicated.")
                seen_ids.add((key, v))
예제 #25
0
def create_component(project, component_path, component_name):
    # Create a new OpenControl component.

    # Create the stub data structure.
    component_opencontrol = OrderedDict()
    component_opencontrol['schema_version'] = '3.0.0'
    component_opencontrol['name'] = component_name

    # Create the path.
    os.makedirs(os.path.join(project['path'], component_path))

    # Write the component.yaml file.
    with open(os.path.join(project['path'], component_path, 'component.yaml'),
              'w',
              encoding="utf8") as f:
        f.write(rtyaml.dump(component_opencontrol))

    # Add the path to the project's opencontrol.yaml file.
    with open(os.path.join(project["path"], 'opencontrol.yaml'),
              "r+",
              encoding="utf8") as f:
        # Parse the content.
        data = rtyaml.load(f)

        # Create the "components" array if it does not exist.
        if not isinstance(data.get("components"), list):
            data["components"] = []

        # Append the new component path.
        data["components"].append(component_path)

        # Write back out to the data files.
        f.seek(0)
        f.truncate()
        rtyaml.dump(data, f)

    # Read the component back and return it.
    for component in load_project_components(project):
        if component["path"] == os.path.join(project['path'], component_path):
            return component

    raise ValueError(
        "Component {} does not exist in project {} even after creating it.".
        format(component_path, project["id"]))
예제 #26
0
def process_xccdf_group(xccdf, xccdf_path, outdir, rule_profiles, group_path,
                        drop_id_prefix):
    # Process all of the rules here.
    rules = []
    for rule in xccdf.findall("{http://checklists.nist.gov/xccdf/1.2}Rule"):
        rules.append(
            process_rule(rule, rule_profiles, xccdf_path, group_path, outdir,
                         drop_id_prefix))

    # Process all of the groups here
    groups = []
    for group in xccdf.findall("{http://checklists.nist.gov/xccdf/1.2}Group"):
        # a nice directory name for the group
        g = group.get('id')
        g = re.sub('^xccdf_org\.(.*)\.content_group_(.*)$', r'\1_\2', g)
        if drop_id_prefix and g.startswith(drop_id_prefix):
            g = g[len(drop_id_prefix):]
            child_drop_id_prefix = drop_id_prefix
        elif "_" in g:
            child_drop_id_prefix = g.split("_")[0] + "_"
        else:
            child_drop_id_prefix = None
        groups.append(g)

        process_xccdf_group(group, xccdf_path, outdir, rule_profiles,
                            group_path + [g], child_drop_id_prefix)

    groupdict = collections.OrderedDict([
        ("id", xccdf.get("id")),
        ("title",
         xccdf.find("{http://checklists.nist.gov/xccdf/1.2}title").text),
        ("description",
         pandoc(
             xccdf.find("{http://checklists.nist.gov/xccdf/1.2}description"),
             'html', 'markdown')),
        ("rules", rules),
        ("subgroups", groups),
    ])
    fn = os.path.join(*([outdir] + group_path + ['group.yaml']))
    os.makedirs(os.path.dirname(fn), exist_ok=True)
    with open(fn, "w") as f:
        rtyaml.dump(groupdict, f)
예제 #27
0
def check_executive_term(term):
  # Check type.
  if term.get("type") not in ("prez", "viceprez"):
    error(rtyaml.dump(term) + " has invalid type.")

  # Check how.
  if term.get("how") not in ("election", "succession", "appointment"):
    error(rtyaml.dump(term) + " has invalid 'how'.")

  # Check date range.
  start = check_date(term.get('start'))
  end = check_date(term.get('end'))
  if start and end:
    if end < start:
      error(rtyaml.dump(term) + " has end before start.")

  if end.year > 2000:
    # Check party of current members (historical is too difficult and even recent ones incorrectly have Democratic instead of Democrat, which is inconsistent with the legislators files).
    if term.get("party") not in ("Republican", "Democrat"):
      error(rtyaml.dump({ "party": term.get("party") }) + " is invalid.")
예제 #28
0
def check_executive_term(term):
    # Check type.
    if term.get("type") not in ("prez", "viceprez"):
        error(rtyaml.dump(term) + " has invalid type.")

    # Check how.
    if term.get("how") not in ("election", "succession", "appointment"):
        error(rtyaml.dump(term) + " has invalid 'how'.")

    # Check date range.
    start = check_date(term.get('start'))
    end = check_date(term.get('end'))
    if start and end:
        if end < start:
            error(rtyaml.dump(term) + " has end before start.")

    if end.year > 2000:
        # Check party of current members (historical is too difficult and even recent ones incorrectly have Democratic instead of Democrat, which is inconsistent with the legislators files).
        if term.get("party") not in ("Republican", "Democrat"):
            error(rtyaml.dump({"party": term.get("party")}) + " is invalid.")
예제 #29
0
def check_bio(bio, is_current_legislator, context):
  for key, value in bio.items():
    if key not in bio_keys:
      error(context, "%s is not a valid key in bio." % key)
    elif not isinstance(value, str):
      error(context, rtyaml.dump({ key: value }) + " has an invalid data type.")
  if is_current_legislator:
    # These keys are required only for current legislators.
    # We don't always have the information for historical members of Congress or presidents.
    for key in bio_keys:
      if key not in bio:
        error(context, "Missing bio->{}.".format(key))
예제 #30
0
def check_leadership_roles(roles, current, context):
    for role in roles:
        # All of these fields must be strings.
        for key, value in role.items():
            if not isinstance(value, str):
                error(context,
                      rtyaml.dump({key: value}) + " has an invalid data type.")

        # Check required fields.
        if "title" not in role:
            error(context, rtyaml.dump(role) + " is missing title.")
        if role.get("chamber") not in ("house", "senate"):
            error(context, rtyaml.dump(role) + " has an invalid chamber.")
        if "start" not in role:
            error(context, rtyaml.dump(role) + " is missing start.")
        if "end" not in role and not current:
            # end is required only in the historical file
            error(context, rtyaml.dump(role) + " is missing end.")

        # Check dates.
        start = check_date(role['start'], context)
        if "end" in role:
            end = check_date(role['end'], context)
            if start and end and end < start:
                error(context, rtyaml.dump(role) + " has end before start.")
예제 #31
0
def check_bio(bio, is_current_legislator, context):
    for key, value in bio.items():
        if key not in (bio_keys | old_allowed_other_bio_keys):
            error(context, "%s is not a valid key in bio." % key)
        elif not isinstance(value, str):
            error(context,
                  rtyaml.dump({key: value}) + " has an invalid data type.")
    if is_current_legislator:
        # These keys are required only for current legislators.
        # We don't always have the information for historical members of Congress or presidents.
        for key in bio_keys:
            if key not in bio:
                error(context, "Missing bio->{}.".format(key))
예제 #32
0
def check_name(name, is_other_names=False):
  # Check for required keys and data types of the values.
  for key, value in name.items():
    if key in ("start", "end") and is_other_names:
      if not isinstance(value, str):
        error(rtyaml.dump({ key: value }) + " has an invalid data type.")
    elif key not in name_keys:
      error("%s is not a valid key in name." % key)
    elif key in ("first", "last"):
      # These are required.
      if not isinstance(value, str):
        error(rtyaml.dump({ key: value }) + " has an invalid data type.")
    else:
      # These can be set explicitly to None, but maybe we should just remove
      # those keys then.
      if not isinstance(value, (str, type(None))):
        error(rtyaml.dump({ key: value }) + " has an invalid data type.")

  # If a person as a first initial only, they should also have a middle name.
  # (GovTrack relies on this to generate name strings.)
  if isinstance(name.get("first"), str) and len(name["first"]) == 2 and name["first"].endswith(".") and not name.get("middle"):
        error(rtyaml.dump(name) + " is missing a middle name to go with its first initial.")
예제 #33
0
def update_wkd_config(config_sample):
    config = dict(config_sample)
    for email, fingerprint in config_sample.items():
        try:
            if fingerprint is None or fingerprint == "":
                config.pop(email)
            else:
                email_compatible_with_key(email, fingerprint)
        except Exception as err:
            raise err

    # All conditions met, do the necessary modifications
    with open(wkdpath, "w") as wkdfile:
        wkdfile.write(rtyaml.dump(config))
예제 #34
0
def error(*args):
    global has_error
    has_error = True
    if len(args) == 1:
        incident, consequence, message = None, None, args[0]
    elif len(args) == 2:
        incident, consequence, message = args[0], None, args[1]
    elif len(args) == 3:
        incident, consequence, message = args
    else:
        raise ValueError(args)
    if incident:
        print("In <",
              rtyaml.dump(incident)[:64].replace("\n", " --- "),
              ">",
              file=sys.stderr)
    if consequence:
        print("... <",
              rtyaml.dump(consequence)[:64].replace("\n", " --- "),
              ">",
              file=sys.stderr)
    print(message, file=sys.stderr)
    print(file=sys.stderr)
예제 #35
0
 def save(self):
     "Write back an OpenControl repo to where it was loaded"
     root_dir = self._root_dir
     root = self.dict(exclude={"standards", "components", "systems"})
     root["certifications"] = [
         str(cert.storage_path(root_dir)) for cert in self.certifications
     ]
     root["standards"] = [
         str(std.storage_path(root_dir)) for std in self.standards.values()
     ]
     root["components"] = [
         str(c.storage_path(root_dir)) for c in self.components
     ]
     print(rtyaml.dump(root))
예제 #36
0
def run_authorization_test(pledge, ccnum, cccvc, aux_data):
	# Runs an authorization test at the time the user is making a pledge,
	# which tests the card info and also gets a credit card token that
	# can be used later to make a real charge without other billing
	# details.

	# Logging.
	aux_data.update({
		"trigger": pledge.trigger.id,
		"campaign": pledge.via_campaign.id,
		"pledge": pledge.id,
		"user": pledge.user.id if pledge.user else 0,
		"email": pledge.user.email if pledge.user else pledge.anon_user.email,
		"pledge_created": pledge.created,
	})

	# Basic contributor details.
	de_don_req = create_de_donation_basic_dict(pledge)

	# Add billing details.
	de_don_req.update({
		"authtest_request":  True,
		"token_request": True,

		# billing details
		"cc_number": ccnum,
		"cc_month": pledge.profile.extra['billing']['cc_exp_month'],
		"cc_year": pledge.profile.extra['billing']['cc_exp_year'],
		"cc_verification_value": cccvc,

		# no line items are necessary for an authorization test
		"line_items": [],

		# tracking info, which for an auth test stays private?
		"source_code": "itfsite pledge auth", 
		"ref_code": "", 
		"aux_data": rtyaml.dump(aux_data), # DE will gives this back to us encoded as YAML, but the dict encoding is ruby-ish so to be sure we can parse it, we'll encode it first
		})

	# Perform the authorization test and return the transaction record.
	#
	#   a) This tests that the billing info is valid.
	#   b) We get a token that we can use on future transactions so that we
	#      do not need to collect the credit card info again.
	de_txn = DemocracyEngineAPI.create_donation(de_don_req)

	# Store the transaction authorization, which contains the credit card token,
	# into the pledge.
	pledge.profile.extra['billing']['authorization'] = de_txn
	pledge.profile.extra['billing']['de_cc_token'] = de_txn['token']
예제 #37
0
def check_id_types(legislator, seen_ids, is_legislator):
  for key, value in legislator["id"].items():
    # Check that the id key is one we know about.
    if key not in id_types:
      error(rtyaml.dump({ key: value }) + " is not a valid id.")

    # Check that the data type is correct.
    elif not isinstance(value, id_types[key]):
      error(rtyaml.dump({ key: value }) + " has an invalid data type.")

    else:
      # Check that the ID isn't duplicated across legislators.
      # Since some values are lists of IDs, check the elements.
      # Just make a list of ID occurrences here -- we'll check
      # uniqueness at the end.
      if not isinstance(value, list): value = [value]
      for v in value:
        seen_ids.setdefault((key, v), []).append(legislator)

  if is_legislator:
    # Check that every legislator has ids of the required types.
    for id_type in id_required:
      if id_type not in legislator["id"]:
        error("Missing %s id in:\n%s" % (id_type, rtyaml.dump(legislator['id'])))
예제 #38
0
파일: de.py 프로젝트: yfrigi/if.then.fund
	def handle(self, *args, **options):
		if len(args) == 0:
			print("Specify a method name.")
			return

		# get the function to call
		args = list(args)
		method = args.pop(0)
		method = getattr(DemocracyEngineAPI, method)

		# invoke
		ret = method(*args)

		# display
		print(rtyaml.dump(ret))
예제 #39
0
def build_app(component, options):

    # create buffer for output
    from io import StringIO
    buf = StringIO()

    # Load the standards in use by this project.
    # standards = opencontrol.load_project_standards(project)

    # Collect all of the control narratives.
    # narratives = []
    # for component in opencontrol.load_project_components(project):
    #   # Iterate over its controls...
    #   for controlimpl in opencontrol.load_project_component_controls(component, standards):
    #     # If only one control family is requested, then skip others.
    #     if options.get("only-family"):
    #       if controlimpl["family"]["abbrev"] != options["only-family"]:
    #         continue

    #     # Add the narrative to the list of narratives to output.
    #     narratives.append(controlimpl)

    # # Sort the narratives by standard, family, control, part, and then by component.
    # narratives.sort(key = lambda narrative : (
    #   narrative["standard"]["name"],
    #   narrative["family"]["sort_key"],
    #   narrative["control"]["sort_key"],
    #   narrative["control_part"] is not None, # narratives for the null part go first
    #   narrative["control_part"],
    #   narrative["component"]["name"] )
    # )

    # Dump the component information to app.yaml
    #   import csv
    #   csvwriter = csv.writer(buf, delimiter=',',quotechar='"', quoting=csv.QUOTE_MINIMAL)
    #   csvwriter.writerow(["Control", "Control Part", "Standard Name", "Component Name", "Control Narrative"])
    #   for narrative in narratives:
    # #    if narrative["control_part"] is not None:
    #       csvwriter.writerow([narrative["control"]["id"],
    #                           narrative["control_part"],
    #                           narrative["standard"]["name"],
    #                           narrative["component"]["name"],
    #                           narrative["narrative"].strip()
    #                           ])
    # buf.write(component)
    # return buf.getvalue()
    # print("componenyaml\n", rtyaml.dump(component))
    return rtyaml.dump(component)
예제 #40
0
def process_test(test, oval_nodes, rule_id, test_index, var_map):
    # Create an OVAL definition for the test.

    # The object and state have to be moved into their own parts.
    for key, idslug in (("object", "obj"), ("state", "ste")):
        if key not in test: continue

        # Generate an id.
        oval_nodes[key + "_count"] += 1
        test[key]['id'] = "oval:easyscap_generated:%s:%d" % (
            idslug, oval_nodes[key + "_count"])

        # Generate an implicit type.
        if "type" not in test[key]:
            if not test.get("type", "").endswith("_test"):
                raise ValueError("Invalid test type: " + test)
            test[key]["type"] = test["type"][:-4] + key

        dict_to_node(oval_nodes[key + "s"],
                     test[key],
                     var_map=var_map,
                     oval_nodes=oval_nodes)

        test[test["type"].split(":")[0] + ":" + key] = {
            key + "_ref": test[key]['id']
        }

        del test[key]

    # Convert the rest.
    try:
        node = dict_to_node(oval_nodes["tests"],
                            test,
                            var_map=var_map,
                            oval_nodes=oval_nodes)
        node.set("id", "oval:%s:tst:%d" % (rule_id, test_index + 1))
    except Exception as e:
        raise Exception("Error processing test (%s) in (%s)" %
                        (str(e), rtyaml.dump(test)))
    return node
예제 #41
0
def main():
    repo = git.Repo(repo_dir)

    print('loading all legislators')
    legis = rtyaml.load(open(repo_dir / 'legislators-historical.yaml'))
    legis.extend(rtyaml.load(open(repo_dir / 'legislators-current.yaml')))

    # examine each commit to the social yaml file and merge into results
    for commit in repo.iter_commits(paths=['legislators-social-media.yaml']):
        created = datetime.datetime.fromtimestamp(commit.committed_date)
        print('examining', created)
        for blob in commit.tree.blobs:
            if blob.path == 'legislators-social-media.yaml':
                try:
                    social = rtyaml.load(blob.data_stream)
                    merge(social, legis, created)
                except rtyaml.yaml.error.YAMLError as e:
                    print("yaml in commit didn't parse: {}".format(commit))

    output = path.Path('legislators.yaml')
    print('writing {}'.format(output))
    output.open('w').write(rtyaml.dump(legis))
def parse():
    """
    expects FETCHED_FILE_PATH to exist
    prints YAML formatted schema to screen
    """
    from lxml import html
    from collections import OrderedDict
    from re import search
    import rtyaml

    # open stashed file
    txt = open(FETCHED_FILE_PATH).read()
    doc = html.fromstring(txt)
    schema = OrderedDict()
    for tr in doc.cssselect("table tr")[1:]:
        tds = [t.text_content().strip() for t in tr.xpath("./td")]
        cname = tds[0]
        d = schema[cname] = OrderedDict()
        d["description"] = tds[1]
        d["nullable"] = True if tds[3] == "Y" else False
        if tds[5]:  # optional description field
            d["description"] += " -- " + tds[5]
        ctype = tds[4]
        if "VARCHAR" in ctype:
            d["type"] = "String"
            d["length"] = int(search(r"(?<=\()\d+", ctype).group())
        elif "Number" in ctype:
            d["type"] = "Float"
            d["length"] = [int(_d) for _d in search(r"(\w+),(\w+)", ctype).groups()]
        elif "DATE(MM/DD/YYYY)" == ctype:
            d["type"] = "Date"
            d["format"] = "%m/%d/%Y"
        else:
            raise Exception("Unexpected column data type:", ctype)
    # print to screen
    print(rtyaml.dump(schema))
예제 #43
0
def check_leadership_roles(roles, current, context):
  for role in roles:
    # All of these fields must be strings.
    for key, value in role.items():
      if not isinstance(value, str):
        error(context, rtyaml.dump({ key: value }) + " has an invalid data type.")

    # Check required fields.
    if "title" not in role:
      error(context, rtyaml.dump(role) + " is missing title.")
    if role.get("chamber") not in ("house", "senate"):
      error(context, rtyaml.dump(role) + " has an invalid chamber.")
    if "start" not in role:
      error(context, rtyaml.dump(role) + " is missing start.")
    if "end" not in role and not current:
      # end is required only in the historical file
      error(context, rtyaml.dump(role) + " is missing end.")

    # Check dates.
    start = check_date(role['start'], context)
    if "end" in role:
      end = check_date(role['end'], context)
      if start and end and end < start:
        error(context, rtyaml.dump(role) + " has end before start.")
예제 #44
0
def write_backup_config(env, newconfig):
    backup_root = os.path.join(env["STORAGE_ROOT"], "backup")
    with open(os.path.join(backup_root, "custom.yaml"), "w") as f:
        f.write(rtyaml.dump(newconfig))
예제 #45
0
def check_legislators_file(fn, seen_ids, current=None, current_mocs=None):
  # Open and iterate over the entries.
  with open(fn) as f:
    legislators = rtyaml.load(f)
  for legislator in legislators:
    
    # Check the IDs.
    if "id" not in legislator:
      error(repr(legislator) + " is missing 'id'.")
    else:
      # Check that the IDs are valid.
      check_id_types(legislator, seen_ids, True)

    # Check the name.
    if "name" not in legislator:
      error(repr(legislator) + " is missing 'name'.")
    else:
      check_name(legislator["name"])
    for name in legislator.get("other_names", []):
      check_name(name, is_other_names=True)

    # Check the biographical fields.
    if "bio" not in legislator:
      error(repr(legislator) + " is missing 'bio'.")
    else:
      check_bio(legislator["bio"])

    # Check the terms.
    if "terms" not in legislator:
      error(repr(legislator) + " is missing 'terms'.")
    elif not isinstance(legislator["terms"], list):
      error(repr(legislator) + " terms has an invalid data type.")
    elif len(legislator["terms"]) == 0:
      error(repr(legislator) + " terms is empty.")
    else:
      prev_term = None
      for i, term in enumerate(legislator["terms"]):
        check_term(term, prev_term,
          current=(current and i==len(legislator["terms"])-1),
          current_mocs=current_mocs)
        prev_term = term

    # Check the leadership roles.
    for role in legislator.get("leadership_roles", []):
      # All of these fields must be strings.
      for key, value in role.items():
        if not isinstance(value, str):
          error(rtyaml.dump({ key: value }) + " has an invalid data type.")

      # Check required fields.
      if "title" not in role:
        error(rtyaml.dump(role) + " is missing title.")
      if role.get("chamber") not in ("house", "senate"):
        error(rtyaml.dump(role) + " has an invalid chamber.")
      if "start" not in role:
        error(rtyaml.dump(role) + " is missing start.")
      if "end" not in role and not current:
        # end is required only in the historical file
        error(rtyaml.dump(role) + " is missing end.")

      # Check dates.
      start = check_date(role['start'])
      if "end" in role:
        end = check_date(role['end'])
        if start and end and end < start:
          error(rtyaml.dump(role) + " has end before start.")
예제 #46
0
def check_term(term, prev_term, context, current=None, current_mocs=None):
  # Check type.
  if term.get("type") not in ("rep", "sen"):
    error(context, "Term has invalid 'type'.")

  # Check date range.
  start = check_date(term.get('start'), context)
  end = check_date(term.get('end'), context)
  if start and end:
    context += "({} to {})".format(start, end)

    if end < start:
      error(context, "Term has end before start.")

    # TODO: Remove 'and end > "2000-"'. I'm just adding it because
    # lots of historical data fails this test.
    if prev_term and end > date(2000,1,1):
      prev_end = check_date(prev_term.get("end"), context)
      if prev_end:
        if start < prev_end:
          error(context, "Term has start before previous term's end.")

    if not current and (end > now):
      error(context, "Term has an end date in the future but is a past term.")
    if current and (end < now):
      error(context, "Term has an end date in the past but is a most recent term in the current file.")

  # Check how.
  if term.get("how") not in (None, "appointment",):
    error(context, "Term has invalid 'how'.")

  # Check end-type.
  if term.get("end-type") not in (None, "special-election",):
    error(context, "Term has invalid 'end-type'.")
  if term.get("end-type") == "special-election" and term.get("how") != "appointment":
    error(context, "Term can't have an 'end-type' without being an appointed senator.")

  # Check state, district, class, state_rank.
  if term.get("state") not in utils.states:
    error(context, "Term has invalid state.")
  if term.get("type") == "rep":
    if not isinstance(term.get("district"), int):
      error(context, "Term has invalid district.")
  if term.get("type") == "sen":
    if term.get("class") not in (1, 2, 3):
      error(context, "Term has invalid class.")
    if term.get("state_rank") not in ("junior", "senior", None):
      error(context, "Term has invalid senator state_rank.")
    elif current and term.get("state_rank") is None:
      error(context, "Term is missing senator state_rank.")

  if current:
    # Check uniqueness of office for current members.

    # Check office.
    office = (term.get("type"), term.get("state"), term.get("district") if term.get("type") == "rep" else term.get("class"))
    if office in current_mocs:
      error(context, "Term duplicates an office.")
    current_mocs.add(office)

    # Check senator rank isn't duplicated.
    if term.get("type") == "sen":
      office = (term.get("state"), term.get("state_rank"))
      if office in current_mocs:
        error(context, "Term duplicates state_rank in a state.")
      current_mocs.add(office)

    # Check party of current members (historical is too difficult).
    if term.get("party") not in ("Republican", "Democrat", "Independent"):
      error(context, rtyaml.dump({ "party": term.get("party") }) + " is invalid.")

    # Check caucus of Independent members.
    if term.get("party") == "Independent" and term.get("caucus") not in ("Republican", "Democrat"):
      error(context, rtyaml.dump({ "caucus": term.get("caucus") }) + " is invalid when party is Independent.")

    # Check website -- it's optional, so warn.
    if not term.get("url"):
      print(context, "Term is missing a website url.")
예제 #47
0
def write_backup_config(env, newconfig):
    backup_root = os.path.join(env["STORAGE_ROOT"], "backup")
    with open(os.path.join(backup_root, "custom.yaml"), "w") as f:
        f.write(rtyaml.dump(newconfig))


if __name__ == "__main__":
    import sys

    if sys.argv[-1] == "--verify":
        # Run duplicity's verification command to check a) the backup files
        # are readable, and b) report if they are up to date.
        run_duplicity_verification()

    elif sys.argv[-1] == "--status":
        # Show backup status.
        ret = backup_status(load_environment())
        print(rtyaml.dump(ret["backups"]))

    elif len(sys.argv) >= 2 and sys.argv[1] == "--restore":
        # Run duplicity restore. Rest of command line passed as arguments
        # to duplicity. The restore path should be specified.
        run_duplicity_restore(sys.argv[2:])

    else:
        # Perform a backup. Add --full to force a full backup rather than
        # possibly performing an incremental backup.
        full_backup = "--full" in sys.argv
        perform_backup(full_backup)
예제 #48
0
def write_custom_dns_config(config, env):
	config_yaml = rtyaml.dump(config)
	with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), "w") as f:
		f.write(config_yaml)
	def handle(self, *args, **options):
		print(rtyaml.dump(DemocracyEngineAPI.recipients()))
예제 #50
0
    def process_message(self, msg):
        self.logger.debug("%s from %s to %s: %s", msg["message-id"], msg["from"], msg["to"], msg["subject"])
        
        msg_date = parse_date(msg["Date"])
        
        ## group the parts by type
        msg_parts = {}
        for content_type, part in [(p.get_content_type(), p) for p in msg.walk()]:
            if content_type not in msg_parts:
                msg_parts[content_type] = []
            
            msg_parts[content_type].append(part)
        
        assert "text/plain" in msg_parts, "can't find plain text body"
        
        ## start building post frontmatter from headers
        fm = frontmatter = OrderedDict()
        
        fm["date"] = msg_date.isoformat()
        fm["title"] = post_title = decode_header(msg["Subject"])
        slug = "%s-%s" % (msg_date.astimezone(UTC).strftime("%Y-%m-%d"), slugify(fm["title"].encode("unicode_escape")))

        fm["layout"] = "post"
        
        fm["categories"] = "blog"
        fm["tags"] = []
        
        author_name, fm["author"] = email.utils.parseaddr(decode_header(msg["From"]))
        
        ## message body, decoded
        body = unicode(
            msg_parts["text/plain"][0].get_payload(decode=True),
            msg_parts["text/plain"][0].get_content_charset("utf-8"),
        )

        post_rel_fn = slug + ".md"
        post_full_fn = os.path.join(self.git.repo_path, "_posts", "blog", post_rel_fn)
        
        if os.path.exists(post_full_fn):
            raise PostExistsException(post_rel_fn)

        ## strip signature from body
        ## find last occurrence of the regex and drop everything else
        body_lines = body.split("\n")
        
        ## reverse list so we look from the end
        body_lines.reverse()
        
        sig_start_ind = 0
        for line in body_lines:
            sig_start_ind += 1

            if self.SIG_DELIMITER.match(line):
                break
        
        if sig_start_ind < len(body_lines):
            ## signature found
            body_lines = body_lines[sig_start_ind:]
        
        body_lines.reverse()
        
        if body_lines[0].lower().startswith("tags:"):
            fm["tags"].extend([t.strip() for t in body_lines[0][5:].strip().split(",")])
            del body_lines[0]
        
        ## recreate body
        body = u"\n".join(body_lines)
        
        if "image/jpeg" in msg_parts:
            fm["tags"].append("photo")
            fm["images"] = []
            for photo in msg_parts["image/jpeg"]:
                fm["images"].append(self.__process_image(slug, photo))
        
        self.logger.debug("generating %s", post_full_fn)

        with self.git.lock():
            if self.commit_changes:
                ## make the current master the same as the origin's master
                self.git.clean_sweep()
            
            ## @todo consider making every change a PR and automatically approving them

            if not os.path.exists(os.path.dirname(post_full_fn)):
                os.makedirs(os.path.dirname(post_full_fn))
            
            with codecs.open(post_full_fn, "w", encoding="utf-8") as ofp:
                ## I *want* to use yaml, but I can't get it to properly to encode
                ## "Test 🔫"; kept getting "Test \uD83D\uDD2B" which the Go yaml parser
                ## bitched about.
                ## but I'm not hitched to hugo, yet, and yaml is what jekyll uses, so…
                ofp.write("---\n")
                
                ## hack for title which the yaml generator won't do properly
                ofp.write('title: "%s"\n' % fm["title"])
                del fm["title"]
                yaml.dump(frontmatter, ofp)

                ## we want an space between the frontmatter and the body
                ofp.write("---\n\n")
                ofp.write(body)
            
            self.logger.info("generated %s", post_rel_fn)
            
            if self.commit_changes:
                ## add the new file
                self.git.add_file(post_full_fn)
                
                ## commit the change
                self.git.commit(author_name, fm["author"], msg["date"], post_title)
                
                ## push the change
                self.git.push()
            else:
                self.logger.warn("not committing changes")
        
        return post_rel_fn
예제 #51
0
def buy_ssl_certificate(api_key, domain, command, env):
	if domain != env['PRIMARY_HOSTNAME'] \
		and domain not in get_web_domains(env):
		raise ValueError("Domain is not %s or a domain we're serving a website for." % env['PRIMARY_HOSTNAME'])

	# Initialize.

	gandi = xmlrpc.client.ServerProxy('https://rpc.gandi.net/xmlrpc/')

	try:
		existing_certs = gandi.cert.list(api_key)
	except Exception as e:
		if "Invalid API key" in str(e):
			print("Invalid API key. Check that you copied the API Key correctly from https://www.gandi.net/admin/api_key.")
			sys.exit(1)
		else:
			raise

	# Where is the SSL cert stored?

	ssl_key, ssl_certificate, ssl_csr_path = get_domain_ssl_files(domain, env)	

	# Have we already created a cert for this domain?

	for cert in existing_certs:
		if cert['cn'] == domain:
			break
	else:
		# No existing cert found. Purchase one.
		if command != 'purchase':
			print("No certificate or order found yet. If you haven't yet purchased a certificate, run ths script again with the 'purchase' command. Otherwise wait a moment and try again.")
			sys.exit(1)
		else:
			# Start an order for a single standard SSL certificate.
			# Use DNS validation. Web-based validation won't work because they
			# require a file on HTTP but not HTTPS w/o redirects and we don't
			# serve anything plainly over HTTP. Email might be another way but
			# DNS is easier to automate.
			op = gandi.cert.create(api_key, {
				"csr": open(ssl_csr_path).read(),
				"dcv_method": "dns",
				"duration": 1, # year?
				"package": "cert_std_1_0_0",
				})
			print("An SSL certificate has been ordered.")
			print()
			print(op)
			print()
			print("In a moment please run this script again with the 'setup' command.")

	if cert['status'] == 'pending':
		# Get the information we need to update our DNS with a code so that
		# Gandi can verify that we own the domain.

		dcv = gandi.cert.get_dcv_params(api_key, {
				"csr": open(ssl_csr_path).read(),
				"cert_id": cert['id'],
				"dcv_method": "dns",
				"duration": 1, # year?
				"package": "cert_std_1_0_0",
				})
		if dcv["dcv_method"] != "dns":
			raise Exception("Certificate ordered with an unknown validation method.")

		# Update our DNS data.

		dns_config = env['STORAGE_ROOT'] + '/dns/custom.yaml'
		if os.path.exists(dns_config):
			dns_records = rtyaml.load(open(dns_config))
		else:
			dns_records = { }

		qname = dcv['md5'] + '.' + domain
		value = dcv['sha1'] + '.comodoca.com.'
		dns_records[qname] = { "CNAME": value }

		with open(dns_config, 'w') as f:
			f.write(rtyaml.dump(dns_records))

		shell('check_call', ['tools/dns_update'])

		# Okay, done with this step.

		print("DNS has been updated. Gandi will check within 60 minutes.")
		print()
		print("See https://www.gandi.net/admin/ssl/%d/details for the status of this order." % cert['id'])

	elif cert['status'] == 'valid':
		# The certificate is ready.

		# Check before we overwrite something we shouldn't.
		if os.path.exists(ssl_certificate):
			cert_status, cert_status_details = check_certificate(None, ssl_certificate, None)
			if cert_status != "SELF-SIGNED":
				print("Please back up and delete the file %s so I can save your new certificate." % ssl_certificate)
				sys.exit(1)

		# Form the certificate.

		# The certificate comes as a long base64-encoded string. Break in
		# into lines in the usual way.
		pem = "-----BEGIN CERTIFICATE-----\n"
		pem += "\n".join(chunk for chunk in re.split(r"(.{64})", cert['cert']) if chunk != "")
		pem += "\n-----END CERTIFICATE-----\n\n"

		# Append intermediary certificates.
		pem += urllib.request.urlopen("https://www.gandi.net/static/CAs/GandiStandardSSLCA.pem").read().decode("ascii")

		# Write out.

		with open(ssl_certificate, "w") as f:
			f.write(pem)

		print("The certificate has been installed in %s. Restarting services..." % ssl_certificate)

		# Restart dovecot and if this is for PRIMARY_HOSTNAME.

		if domain == env['PRIMARY_HOSTNAME']:
			shell('check_call', ["/usr/sbin/service", "dovecot", "restart"])
			shell('check_call', ["/usr/sbin/service", "postfix", "restart"])

		# Restart nginx in all cases.

		shell('check_call', ["/usr/sbin/service", "nginx", "restart"])

	else:
		print("The certificate has an unknown status. Please check https://www.gandi.net/admin/ssl/%d/details for the status of this order." % cert['id'])
예제 #52
0
# Wrap the urlib.request data in a BytesIO to make it seekable.
pdf = PyPDF2.PdfFileReader(io.BytesIO(urllib.request.urlopen(pdf_url).read()))

# Build YAML.

data = collections.OrderedDict()

data["id"] = resource_id
data["type"] = "authoritative-document or policy-document --- change this!"

if "/Title" in pdf.documentInfo:
	data["title"] = str(pdf.documentInfo["/Title"])

if "/Subject" in pdf.documentInfo:
	data["alt-titles"] = [str(pdf.documentInfo["/Subject"])]

data["owner"] = None # for user to fill in

data["url"] = pdf_url # should be updated if document is copied into Document Cloud
data["authoritative-url"] = pdf_url
data["format"] = "pdf"

# Save.

fn = os.path.join("resources", "documents", resource_id + ".yaml")
print("Writing", fn, "...")
with open(fn, 'w') as f:
	f.write(rtyaml.dump(data))
print("Don't forget to update the 'type' field and make sure the other fields are OK.")

예제 #53
0
def set_custom_dns_record(qname, rtype, value, env):
	# validate qname
	for zone, fn in get_dns_zones(env):
		# It must match a zone apex or be a subdomain of a zone
		# that we are otherwise hosting.
		if qname == zone or qname.endswith("."+zone):
			break
	else:
		# No match.
		raise ValueError("%s is not a domain name or a subdomain of a domain name managed by this box." % qname)

	# validate rtype
	rtype = rtype.upper()
	if value is not None:
		if rtype in ("A", "AAAA"):
			v = ipaddress.ip_address(value)
			if rtype == "A" and not isinstance(v, ipaddress.IPv4Address): raise ValueError("That's an IPv6 address.")
			if rtype == "AAAA" and not isinstance(v, ipaddress.IPv6Address): raise ValueError("That's an IPv4 address.")
		elif rtype in ("CNAME", "TXT"):
			# anything goes
			pass
		else:
			raise ValueError("Unknown record type '%s'." % rtype)

	# load existing config
	config = get_custom_dns_config(env)

	# update
	if qname not in config:
		if value is None:
			# Is asking to delete a record that does not exist.
			return False
		elif rtype == "A":
			# Add this record using the short form 'qname: value'.
			config[qname] = value
		else:
			# Add this record. This is the qname's first record.
			config[qname] = { rtype: value }
	else:
		if isinstance(config[qname], str):
			# This is a short-form 'qname: value' implicit-A record.
			if value is None and rtype != "A":
				# Is asking to delete a record that doesn't exist.
				return False
			elif value is None and rtype == "A":
				# Delete record.
				del config[qname]
			elif rtype == "A":
				# Update, keeping short form.
				if config[qname] == "value":
					# No change.
					return False
				config[qname] = value
			else:
				# Expand short form so we can add a new record type.
				config[qname] = { "A": config[qname], rtype: value }
		else:
			# This is the qname: { ... } (dict) format.
			if value is None:
				if rtype not in config[qname]:
					# Is asking to delete a record that doesn't exist.
					return False
				else:
					# Delete the record. If it's the last record, delete the domain.
					del config[qname][rtype]
					if len(config[qname]) == 0:
						del config[qname]
			else:
				# Update the record.
				if config[qname].get(rtype) == "value":
					# No change.
					return False
				config[qname][rtype] = value

	# serialize & save
	config_yaml = rtyaml.dump(config)
	with open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml'), "w") as f:
		f.write(config_yaml)

	return True
예제 #54
0
파일: export.py 프로젝트: GovReady/easyscap
def dict_to_node(parent, dictobj, default_type=None, var_map=None, oval_nodes=None):
    my_type = dictobj.get("type", default_type)
    if my_type is None:
        raise Exception("Invalid data: Missing type. (%s)" % rtyaml.dump(dictobj))

    node = make_node(parent, expand_tag_name(my_type))

    for k, v in dictobj.items():
        if k == "type":
            # already handled
            continue

        elif k == "var_ref" and var_map is not None:
            # Re-map variables.
            node.set("var_ref", var_map[v])

        elif k == "object_ref" and isinstance(v, dict) and oval_nodes is not None:
            # Re-create object.
            oval_nodes["object_count"] += 1
            objid = "oval:easyscap_generated:%s:%d" % ("obj", oval_nodes["object_count"])
            n = dict_to_node(oval_nodes["objects"], v, var_map=var_map, oval_nodes=oval_nodes)
            n.set("id", objid)
            node.set(expand_tag_name(k), objid)

        elif expand_tag_name(my_type) == "{http://oval.mitre.org/XMLSchema/oval-definitions-5}filter" and k == "value":
            # Re-create object.
            oval_nodes["state_count"] += 1
            objid = "oval:easyscap_generated:%s:%d" % ("ste", oval_nodes["state_count"])
            n = dict_to_node(oval_nodes["states"], v, var_map=var_map, oval_nodes=oval_nodes)
            n.set("id", objid)
            node.text = objid

        elif k == "value":
            # This content goes right into the node's inner text.
            v, dt = get_data_type(v)
            if dt:
                node.set("datatype", dt)
            node.text = v

        elif not isinstance(v, dict) and (
            (":" not in k and "{" not in k and not k.startswith("<")) or k.startswith("@")
        ):
            # This is an attribute.
            if k[0] == "@":
                k = k[1:]
            node.set(expand_tag_name(k), str(v))

        elif not isinstance(v, dict):
            # This is a simple element.
            v, dt = get_data_type(v)
            if dt:
                dt = {"datatype": dt}
            else:
                dt = {}
            make_node(node, expand_tag_name(k), v, **dt)

        else:
            # This is obviously an element because it has a complex child.
            dict_to_node(node, v, default_type=expand_tag_name(k), oval_nodes=oval_nodes, var_map=var_map)

    return node
예제 #55
0
파일: utils.py 프로젝트: jkaberg/mailinabox
def write_settings(config, env):
    import rtyaml
    fn = os.path.join(env['STORAGE_ROOT'], 'settings.yaml')
    with open(fn, "w") as f:
        f.write(rtyaml.dump(config))
예제 #56
0
def check_term(term, prev_term, current=None, current_mocs=None):
  # Check type.
  if term.get("type") not in ("rep", "sen"):
    error(rtyaml.dump(term) + " has invalid type.")

  # Check date range.
  start = check_date(term.get('start'))
  end = check_date(term.get('end'))
  if start and end:
    if end < start:
      error(rtyaml.dump(term) + " has end before start.")

    # TODO: Remove 'and end > "2000-"'. I'm just adding it because
    # lots of historical data fails this test.
    if prev_term and end > date(2000,1,1):
      prev_end = check_date(prev_term.get("end"))
      if prev_end:
        if start < prev_end:
          error(rtyaml.dump(term) + " has start before previous term's end.")

    if not current and (end > now):
      error(rtyaml.dump(term) + " has an end date in the future but is in the historical file.")
    if current and (end < now):
      error(rtyaml.dump(term) + " has an end date in the past but is in the current file.")

  # Check state, district, class, state_rank.
  if term.get("state") not in utils.states:
    error(rtyaml.dump(term) + " has invalid state.")
  if term.get("type") == "rep":
    if not isinstance(term.get("district"), int):
      error(rtyaml.dump(term) + " has invalid district.")
  if term.get("type") == "sen":
    if term.get("class") not in (1, 2, 3):
      error(rtyaml.dump(term) + " has invalid class.")
    if term.get("state_rank") not in ("junior", "senior", None):
      error(rtyaml.dump(term) + " has invalid senator state_rank.")
    elif current and term.get("state_rank") is None:
      error(rtyaml.dump(term) + " is missing senator state_rank.")

  if current:
    # Check uniqueness of office for current members.

    # Check office.
    office = (term.get("type"), term.get("state"), term.get("district") if term.get("type") == "rep" else term.get("class"))
    if office in current_mocs:
      error(rtyaml.dump(term) + " duplicates an office.")
    current_mocs.add(office)

    # Check senator rank isn't duplicated.
    if term.get("type") == "sen":
      office = (term.get("state"), term.get("state_rank"))
      if office in current_mocs:
        error(rtyaml.dump(term) + " duplicates state_rank in a state.")
      current_mocs.add(office)

    # Check party of current members (historical is too difficult).
    if term.get("party") not in ("Republican", "Democrat", "Independent"):
      error(rtyaml.dump({ "party": term.get("party") }) + " is invalid.")

    # Check caucus of Independent members.
    if term.get("party") == "Independent" and term.get("caucus") not in ("Republican", "Democrat"):
      error(rtyaml.dump({ "caucus": term.get("caucus") }) + " is invalid when party is Independent.")
        doc.save()

    print(doc.small_image_url)

    sys.exit(0)

# Get the URL to the PDF.

url = res['authoritative-url']

# DocumentCloud's upload API gets confused if it's passed a URL that redirects.
# Use urllib.request.urlopen to resolve the redirect.
import urllib.request
url = urllib.request.urlopen(url).geturl()

# Upload to DocumentCloud.

doc = documentcloud.documents.upload(
    url,
    title=res.get('title'),
    access="public")

# Update YAML.

res['url'] = doc.canonical_url

with open(fn, "w") as f:
    f.write(rtyaml.dump(res))

print("Done.")
예제 #58
0
def check_bio(bio):
  for key, value in bio.items():
    if key not in bio_keys:
      error("%s is not a valid key in bio." % key)
    elif not isinstance(value, str):
      error(rtyaml.dump({ key: value }) + " has an invalid data type.")