Exemplo n.º 1
0
def merge(old, new, ask_user=False, warn_user=False):
    try:
        old_dict = yaml.load(old) or {}
        new_dict = yaml.load(new) or {}
    except Exception as e:
        print('Cannot parse yaml')
        sys.exit(-1)

    merged = OrderedDict()
    for key, value in new_dict.items():
        old_value = old_dict.get(key)
        if old_value is not None:
            if ask_user:
                value = _choose_value(key, old_value, value)
            else:
                value = old_value
        merged.update({key: value})

    data_from_comments = load_from_comments(new)
    for key, value in data_from_comments.items():
        old_value = old_dict.get(key)
        if old_value is None:
            continue
        if warn_user:
            sys.stderr.write("Uncommenting: %s \n" % key)
        merged.update({key: old_value})

    return old, new, yaml.dump(merged)
Exemplo n.º 2
0
def get_custom_dns_config(env):
    try:
        custom_dns = rtyaml.load(
            open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
        if not isinstance(custom_dns, dict): raise ValueError()  # caught below
    except:
        return []

    for qname, value in custom_dns.items():
        # Short form. Mapping a domain name to a string is short-hand
        # for creating A records.
        if isinstance(value, str):
            values = [("A", value)]

        # A mapping creates multiple records.
        elif isinstance(value, dict):
            values = value.items()

        # No other type of data is allowed.
        else:
            raise ValueError()

        for rtype, value2 in values:
            if isinstance(value2, str):
                yield (qname, rtype, value2)
            elif isinstance(value2, list):
                for value3 in value2:
                    yield (qname, rtype, value3)
            # No other type of data is allowed.
            else:
                raise ValueError()
Exemplo n.º 3
0
def yaml_load(path, use_cache=True):
    # Loading YAML is ridiculously slow, so cache the YAML data
    # in a pickled file which loads much faster.

    # Check if the .pickle file exists and a hash stored inside it
    # matches the hash of the YAML file, and if so unpickle it.
    import cPickle as pickle, os.path, hashlib

    h = hashlib.sha1(open(path).read()).hexdigest()
    if use_cache and os.path.exists(path + ".pickle"):

        try:
            store = pickle.load(open(path + ".pickle"))
            if store["hash"] == h:
                return store["data"]
        except EOFError:
            pass  # bad .pickle file, pretend it doesn't exist

    # No cached pickled data exists, so load the YAML file.
    data = rtyaml.load(open(path))

    # Store in a pickled file for fast access later.
    pickle.dump({"hash": h, "data": data}, open(path + ".pickle", "w"))

    return data
Exemplo n.º 4
0
def parse_wkd_list():
    removed = []
    uidlist = []
    with open(wkdpath, "a+") as wkdfile:
        wkdfile.seek(0)
        config = {}
        try:
            config = rtyaml.load(wkdfile)
            if (type(config) != dict):
                config = {}
        except:
            config = {}

        writeable = copy.deepcopy(config)
        for u, k in config.items():
            try:
                key = email_compatible_with_key(u, k)
                # Key is compatible

                writeable[
                    u] = key.fpr  # Swap with the full-length fingerprint (if somehow this was changed by hand)
                uidlist.append((u, key.fpr))
            except:
                writeable.pop(u)
                removed.append((u, k))
        # Shove the updated configuration back in the file
        wkdfile.truncate(0)
        wkdfile.write(rtyaml.dump(writeable))
    return (removed, uidlist)
Exemplo n.º 5
0
def get_backup_config(env, for_save=False, for_ui=False):
    backup_root = os.path.join(env["STORAGE_ROOT"], "backup")

    # Defaults.
    config = {"min_age_in_days": 3, "target": "local"}

    # Merge in anything written to custom.yaml.
    try:
        custom_config = rtyaml.load(open(os.path.join(backup_root, "custom.yaml")))
        if not isinstance(custom_config, dict):
            raise ValueError()  # caught below
        config.update(custom_config)
    except:
        pass

        # When updating config.yaml, don't do any further processing on what we find.
    if for_save:
        return config

        # When passing this back to the admin to show the current settings, do not include
        # authentication details. The user will have to re-enter it.
    if for_ui:
        for field in ("target_user", "target_pass"):
            if field in config:
                del config[field]

                # helper fields for the admin
    config["file_target_directory"] = os.path.join(backup_root, "encrypted")
    config["enc_pw_file"] = os.path.join(backup_root, "secret_key.txt")
    if config["target"] == "local":
        # Expand to the full URL.
        config["target"] = "file://" + config["file_target_directory"]

    return config
Exemplo n.º 6
0
def get_custom_dns_config(env):
	try:
		custom_dns = rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
		if not isinstance(custom_dns, dict): raise ValueError() # caught below
	except:
		return [ ]

	for qname, value in custom_dns.items():
		# Short form. Mapping a domain name to a string is short-hand
		# for creating A records.
		if isinstance(value, str):
			values = [("A", value)]

		# A mapping creates multiple records.
		elif isinstance(value, dict):
			values = value.items()

		# No other type of data is allowed.
		else:
			raise ValueError()

		for rtype, value2 in values:
			if isinstance(value2, str):
				yield (qname, rtype, value2)
			elif isinstance(value2, list):
				for value3 in value2:
					yield (qname, rtype, value3)
			# No other type of data is allowed.
			else:
				raise ValueError()
Exemplo n.º 7
0
def authoring_edit_module(request, task):
    try:
        # Update the module.
        import rtyaml
        spec = rtyaml.load(request.POST["spec"])

        # Validate.
        from .validate_module_specification import validate_module
        spec = validate_module(spec, is_authoring_tool=True)
    except ValueError as e:
        return JsonResponse({ "status": "error", "message": str(e) })

    # Save.
    task.module.spec = spec
    task.module.save()

    # Update task & project title to app title so the user can see it.
    task.title = task.module.spec.get("title") or task.title
    task.save()
    project = task.root_of.first()
    project.title = task.module.spec.get("title") or project.title
    project.save()

    # Write to disk. Errors writing should not be suppressed because
    # saving to disk is a part of the contract of how app editing works.
    try:
        task.module.serialize_to_disk()
    except Exception as e:
        return JsonResponse({ "status": "error", "message": "Could not update local YAML file: " + str(e) })

    # Return status. The browser will reload/redirect --- if the question key
    # changed, this sends the new key.
    return JsonResponse({ "status": "ok", "redirect": task.get_absolute_url() })
Exemplo n.º 8
0
def update_sitemap(url, current_lastmod, how_we_got_here, options):
    """Updates the local cache of a sitemap file."""

    # Skip if the year or congress flags are set and this sitemap is
    # not for that year or Congress.
    if should_skip_sitemap(url, options):
        return []

    # For debugging, remember what URLs we are stepping through.
    how_we_got_here = how_we_got_here + [url]

    # Get the file paths to cache:
    # * the sitemap XML for future runs
    # * its <lastmod> date (which comes from the parent sitemap) so we know if we need to re-download it now
    # * the <lastmod> dates of the packages listed in this sitemap so we know if we need to re-download any package files
    cache_file = get_sitemap_cache_file(url)
    cache_file = os.path.join("govinfo/sitemap", cache_file, "sitemap.xml")
    lastmod_cache_file = cache_file.replace(".xml", "-lastmod.yaml")
    lastmod_cache_file = os.path.join(utils.cache_dir(), lastmod_cache_file)
    if not os.path.exists(lastmod_cache_file):
        lastmod_cache = { }
    else:
        with open(lastmod_cache_file) as f:
            lastmod_cache = rtyaml.load(f)

    try:
        return update_sitemap2(url, current_lastmod, how_we_got_here, options, lastmod_cache, cache_file)
    finally:
        # Write the updated last modified dates to disk so we know the next time whether
        # we need to fetch the files. If we didn't download anything, no need to write an
        # empty file.
        with utils.NoInterrupt():
            with open(lastmod_cache_file, "w") as f:
                rtyaml.dump(lastmod_cache, f)
Exemplo n.º 9
0
def update_sitemap(url, current_lastmod, how_we_got_here, options):
    """Updates the local cache of a sitemap file."""

    # Skip if the year or congress flags are set and this sitemap is
    # not for that year or Congress.
    if should_skip_sitemap(url, options):
        return []

    # For debugging, remember what URLs we are stepping through.
    how_we_got_here = how_we_got_here + [url]

    # Get the file paths to cache:
    # * the sitemap XML for future runs
    # * its <lastmod> date (which comes from the parent sitemap) so we know if we need to re-download it now
    # * the <lastmod> dates of the packages listed in this sitemap so we know if we need to re-download any package files
    cache_file = get_sitemap_cache_file(url)
    cache_file = os.path.join("govinfo/sitemap", cache_file, "sitemap.xml")
    lastmod_cache_file = cache_file.replace(".xml", "-lastmod.yaml")
    lastmod_cache_file = os.path.join(utils.cache_dir(), lastmod_cache_file)
    if not os.path.exists(lastmod_cache_file):
        lastmod_cache = {}
    else:
        with open(lastmod_cache_file) as f:
            lastmod_cache = rtyaml.load(f)

    try:
        return update_sitemap2(url, current_lastmod, how_we_got_here, options,
                               lastmod_cache, cache_file)
    finally:
        # Write the updated last modified dates to disk so we know the next time whether
        # we need to fetch the files. If we didn't download anything, no need to write an
        # empty file.
        with utils.NoInterrupt():
            with open(lastmod_cache_file, "w") as f:
                rtyaml.dump(lastmod_cache, f)
Exemplo n.º 10
0
def read_yaml_from_file(yaml_layout):
    # TODO cache this at application level
    t = template.loader.get_template(yaml_layout)
    # aqui é importante converter para str pois, dependendo do ambiente,
    # o rtyaml pode usar yaml.CSafeLoader, que exige str ou stream
    rendered = str(t.render())
    return rtyaml.load(rendered)
Exemplo n.º 11
0
def team(request, organization, project):
    """Show settings for the project"""

    # Load the project.
    try:
        project = load_project(organization, project)
    except ValueError:
        return "Organization `{}` project `{}` in URL not found.".format(
            organization, project)

    # Read the team file
    try:
        with open(os.path.join(project["path"], "team", "team.yaml"),
                  encoding="utf8") as f:
            team_data = rtyaml.load(f)
            team = team_data["team"]
        message = None
    except:
        team = []
        message = ("Capture your team information in the file: `{}`.".format(
            os.path.join(project["path"], "team", "team.yaml")))

    # Prepare modify page message
    edit_file = os.path.join(project["path"], "team", "team.yaml")
    modify_msg = "To modify team information, update file: `{}`".format(
        edit_file)

    return render_template(request,
                           'team.html',
                           project=project,
                           message=message,
                           modify_msg=modify_msg,
                           team=team)
Exemplo n.º 12
0
def process(selection, template_file, template_path, output_path, logger):
    logger.print("Checking {}".format(template_file))
    try:
        with open(template_file) as fp:
            output_file = rewrite(template_file, template_path, output_path)
            output_file_p = Path(output_file)
            if not output_file_p.parent.is_dir():
                output_file_p.parent.mkdir(parents=True, exist_ok=True)

            if template_file.name == 'component.yaml':
                logger.print("  Copying {} to {}".format(
                    template_file, output_file))
                shutil.copy(template_file, output_file)
            else:
                object = rtyaml.load(fp)
                object = select_controls(object, selection)
                controls = sorted(control['control_key']
                                  for control in object['satisfies'])
                logger.print("  Writing controls to {}".format(output_file))
                for control in controls:
                    logger.print("    {}".format(control))
                with open(output_file, "w") as out:
                    rtyaml.dump(object, out)

    except Exception as e:
        print("Exception {} processing {}".format(e, template_file))
Exemplo n.º 13
0
def load_opencontrol_yaml(fn, schema_type, expected_schema_versions):
    # Load a YAML file holding a mapping, and check that its schema_version is recognized.
    # Specify the encoding explicitly because YAML files are always(?) UTF-8 encoded and
    # that may not be the system default encoding (e.g. on Windows the default is based on
    # the system locale). schema_type holds e.g. "system", "standards", or "component," a
    # string to display to the user describing the type of file expected in error messages.
    try:
        with open(fn, encoding="utf8") as f:
            try:
                opencontrol = rtyaml.load(f)
            except Exception as e:
                raise ValueError("OpenControl {} file {} has invalid data (is not valid YAML: {}).".format(
                    schema_type,
                    fn,
                    str(e) ))
            if not isinstance(opencontrol, dict):
                raise ValueError("OpenControl {} file {} has invalid data (should be a mapping, is a {}).".format(
                    schema_type,
                    fn,
                    type(opencontrol) ))
            if expected_schema_versions and opencontrol.get("schema_version") not in expected_schema_versions:
                raise ValueError("Don't know how to read OpenControl {} file {} which has unsupported schema_version {}.".format(
                    schema_type,
                    fn,
                    repr(opencontrol.get("schema_version"))))
            return opencontrol
    except IOError as e:
        raise ValueError("OpenControl {} file {} could not be loaded: {}.".format(
            schema_type,
            fn,
            str(e) ))
Exemplo n.º 14
0
def read_yaml_from_file(yaml_layout):
    # TODO cache this at application level
    t = template.loader.get_template(yaml_layout)
    # aqui é importante converter para str pois, dependendo do ambiente,
    # o rtyaml pode usar yaml.CSafeLoader, que exige str ou stream
    rendered = str(t.render())
    return rtyaml.load(rendered)
Exemplo n.º 15
0
    def StartApps(fn, outdir):
        # Create stub data structures that are required to do module logic
        # but that have no end-user-visible presence.
        Command.dummy_org = Organization.objects.create(
            subdomain=get_random_string(12))
        Command.dummy_user = User.objects.create(
            username=get_random_string(12))

        # Cache app sources and app instances as we load app data into the
        # database so that when sources and apps occur multiple times we
        # reuse the existing instances in the database.
        Command.app_sources = {}
        Command.app_instances = {}

        # Open the end-user data file.
        data = rtyaml.load(open(fn))

        # Start the app.
        project = Command.start_app(data.get("app"))

        # Fill in the answers.
        Command.set_answers(project.root_task, data.get("questions", []))

        # Generate outputs.
        os.makedirs(outdir, exist_ok=True)
        for path, outputdoc in Command.generate_task_outputs(
            [], project.root_task):
            path = "_".join(path)
            for ext, format in (("html", "html"), ("md", "markdown")):
                if format in outputdoc:
                    fn = os.path.join(outdir, path + "." + ext)
                    with open(fn, "w") as f:
                        f.write(outputdoc[format])
Exemplo n.º 16
0
def test_read_layout_from_yaml(tmpdir):

    stub_content = '''
ModelName:
  Cool Legend:
  - name:9  place  tiny
  - field  nature:2
  - kind:1  date  unit:5 status
  More data:
  - equalA  equalB  equalC
  - highlander '''

    with mock.patch('sapl.crispy_layout_mixin.read_yaml_from_file') as ryff:
        ryff.return_value = rtyaml.load(stub_content)
        assert read_layout_from_yaml('....', 'ModelName') == [
            ['Cool Legend',
             [('name', 9),  ('place', 2), ('tiny', 1)],
             [('field', 10), ('nature', 2)],
             [('kind', 1), ('date', 3), ('unit', 5), ('status', 3)],
             ],
            ['More data',
             [('equalA', 4), ('equalB', 4), ('equalC', 4)],
             [('highlander', 12)],
             ],
        ]
Exemplo n.º 17
0
def make_domain_config(domain, template, template_for_primaryhost, env):
	# How will we configure this domain.

	# Where will its root directory be for static files?

	root = get_web_root(domain, env)

	# What private key and SSL certificate will we use for this domain?
	ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)

	# For hostnames created after the initial setup, ensure we have an SSL certificate
	# available. Make a self-signed one now if one doesn't exist.
	ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env)

	# Put pieces together.
	nginx_conf_parts = re.split("\s*# ADDITIONAL DIRECTIVES HERE\s*", template)
	nginx_conf = nginx_conf_parts[0] + "\n"
	if domain == env['PRIMARY_HOSTNAME']:
		nginx_conf += template_for_primaryhost + "\n"

	# Replace substitution strings in the template & return.
	nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
	nginx_conf = nginx_conf.replace("$HOSTNAME", domain)
	nginx_conf = nginx_conf.replace("$ROOT", root)
	nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
	nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)

	# Because the certificate may change, we should recognize this so we
	# can trigger an nginx update.
	def hashfile(filepath):
		import hashlib
		sha1 = hashlib.sha1()
		f = open(filepath, 'rb')
		try:
			sha1.update(f.read())
		finally:
			f.close()
		return sha1.hexdigest()
	nginx_conf += "# ssl files sha1: %s / %s\n" % (hashfile(ssl_key), hashfile(ssl_certificate))

	# Add in any user customizations in YAML format.
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		yaml = rtyaml.load(open(nginx_conf_custom_fn))
		if domain in yaml:
			yaml = yaml[domain]
			for path, url in yaml.get("proxies", {}).items():
				nginx_conf += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)
			for path, url in yaml.get("redirects", {}).items():
				nginx_conf += "\trewrite %s %s permanent;\n" % (path, url)

	# Add in any user customizations in the includes/ folder.
	nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
	if os.path.exists(nginx_conf_custom_include):
		nginx_conf += "\tinclude %s;\n" % (nginx_conf_custom_include)

	# Ending.
	nginx_conf += nginx_conf_parts[1]

	return nginx_conf
Exemplo n.º 18
0
def make_domain_config(domain, template, env):
	# How will we configure this domain.

	# Where will its root directory be for static files?

	root = get_web_root(domain, env)

	# What private key and SSL certificate will we use for this domain?
	ssl_key, ssl_certificate, csr_path = get_domain_ssl_files(domain, env)

	# For hostnames created after the initial setup, ensure we have an SSL certificate
	# available. Make a self-signed one now if one doesn't exist.
	ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, csr_path, env)

	# Replace substitution strings in the template & return.
	nginx_conf = template
	nginx_conf = nginx_conf.replace("$HOSTNAME", domain)
	nginx_conf = nginx_conf.replace("$ROOT", root)
	nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
	nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)

	# Add in any user customizations.
	nginx_conf_parts = re.split("(# ADDITIONAL DIRECTIVES HERE\n)", nginx_conf)
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		yaml = rtyaml.load(open(nginx_conf_custom_fn))
		if domain in yaml:
			yaml = yaml[domain]
			if "proxy" in yaml:
				nginx_conf_parts[1] += "\tlocation / {\n\t\tproxy_pass %s;\n\t}\n" % yaml["proxy"]

	# Put it all together.	
	nginx_conf = "".join(nginx_conf_parts)

	return nginx_conf
Exemplo n.º 19
0
def check_executive_file(fn):
  # Open and iterate over the entries.
  with open(fn) as f:
    people = rtyaml.load(f)
  for person in people:
    
    # Check the IDs.
    if "id" not in person:
      error(repr(person) + " is missing 'id'.")
    else:
      # Check that the IDs are valid.
      check_id_types(person, {}, False)

    # Check the name.
    if "name" not in person:
      error(repr(legislator) + " is missing 'name'.")
    else:
      check_name(person["name"])

    # Check the biographical fields.
    if "bio" not in person:
      error(repr(person) + " is missing 'bio'.")
    else:
      check_bio(person["bio"])

    # Check the terms.
    if "terms" not in person:
      error(repr(person) + " is missing 'terms'.")
    elif not isinstance(person["terms"], list):
      error(repr(person) + " terms has an invalid data type.")
    elif len(person["terms"]) == 0:
      error(repr(person) + " terms is empty.")
    else:
      for i, term in enumerate(person["terms"]):
        check_executive_term(term)
Exemplo n.º 20
0
def get_new_config(system_name="MySystem", organization_name="MyOrg", description="My shiny new IT system"):
    """Create the config file (opencontrol.yaml) data and return values"""
  
    cfg_str = """schema_version: 1.0.0
name: AgencyApp
metadata:
  authorization_id: ~
  description: Imaginary application for to show faked control narratives.
  organization:
    name: Department of Sobriety
    abbreviation: DOS
  repository: ~
components: []
standards:
- ./standards/NIST-SP-800-53-rev4.yaml
certifications:
- ./certifications/fisma-low-impact.yaml
"""

    # read default opencontrol.yaml into object
    cfg = rtyaml.load(cfg_str)
    # customize values
    cfg["name"] = system_name
    cfg["metadata"]["organization"]["name"] = organization_name
    cfg["metadata"]["description"] = description
    cfg["metadata"]["organization"]["abbreviation"] = None
    if organization_name:
        cfg["metadata"]["organization"]["abbreviation"] = "".join([word[0].upper() for word in organization_name.split(" ")])

    return cfg
Exemplo n.º 21
0
def make_domain_config(domain, template, template_for_primaryhost, env):
	# How will we configure this domain.

	# Where will its root directory be for static files?

	root = get_web_root(domain, env)

	# What private key and SSL certificate will we use for this domain?
	ssl_key, ssl_certificate, csr_path = get_domain_ssl_files(domain, env)

	# For hostnames created after the initial setup, ensure we have an SSL certificate
	# available. Make a self-signed one now if one doesn't exist.
	ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, csr_path, env)

	# Put pieces together.
	nginx_conf_parts = re.split("\s*# ADDITIONAL DIRECTIVES HERE\s*", template)
	nginx_conf = nginx_conf_parts[0] + "\n"
	if domain == env['PRIMARY_HOSTNAME']:
		nginx_conf += template_for_primaryhost + "\n"

	# Replace substitution strings in the template & return.
	nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
	nginx_conf = nginx_conf.replace("$HOSTNAME", domain.encode("idna").decode("ascii"))
	nginx_conf = nginx_conf.replace("$ROOT", root)
	nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
	nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)

	# Because the certificate may change, we should recognize this so we
	# can trigger an nginx update.
	def hashfile(filepath):
		import hashlib
		sha1 = hashlib.sha1()
		f = open(filepath, 'rb')
		try:
			sha1.update(f.read())
		finally:
			f.close()
		return sha1.hexdigest()
	nginx_conf += "# ssl files sha1: %s / %s\n" % (hashfile(ssl_key), hashfile(ssl_certificate))

	# Add in any user customizations in YAML format.
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		yaml = rtyaml.load(open(nginx_conf_custom_fn))
		if domain in yaml:
			yaml = yaml[domain]
			for path, url in yaml.get("proxies", {}).items():
				nginx_conf += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)
			for path, url in yaml.get("redirects", {}).items():
				nginx_conf += "\trewrite %s %s permanent;\n" % (path, url)

	# Add in any user customizations in the includes/ folder.
	nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
	if os.path.exists(nginx_conf_custom_include):
		nginx_conf += "\tinclude %s;\n" % (nginx_conf_custom_include)

	# Ending.
	nginx_conf += nginx_conf_parts[1]

	return nginx_conf
Exemplo n.º 22
0
    def resolve_standards(self, relative_to):
        standards = {}
        for standard in self.standards:
            standard_path = relative_to / standard
            if standard_path.is_file():
                FILE_SIGNAL.send(self, operation="read", path=standard_path)
                with standard_path.open() as f:
                    obj = rtyaml.load(f)
                    name = obj.pop("name")

                    # TODO: source and license are not in the spec?

                    source = obj.pop("source", "")
                    license = obj.pop("license", "")

                    controls = {
                        control: StandardControl.parse_obj(desc)
                        for control, desc in obj.items() if "family" in desc
                    }

                    std = Standard(name=name,
                                   controls=controls,
                                   source=source,
                                   license=license)
                    std._file = standard
                    standards[name] = std
            else:
                raise Exception(f"Can't open standard file '{standard_path}'")
        return standards
Exemplo n.º 23
0
    def __enter__(self):
        if isinstance(self.fn_or_stream, str):
            # Open the named file.
            try:
                self.stream = open(self.fn_or_stream, "r+")
            except FileNotFoundError:
                if not isinstance(self.default, (list, dict)):
                    # If there is no default and the file
                    # does not exist, re-raise the exception.
                    raise
                else:
                    # Create a new file holding the default,
                    # then seek back to the beginning so
                    # we can read it below.
                    self.stream = open(self.fn_or_stream, "w+")
                    rtyaml.dump(self.default, self.stream)
                    self.stream.seek(0)

            self.close_on_exit = True
        else:
            # Use the given stream.
            self.stream = self.fn_or_stream
        # Parse stream and return data.
        self.data = rtyaml.load(self.stream)
        return self.data
Exemplo n.º 24
0
def run():

    print("Finding highest bioguide numbers we know of...")
    highest_num_by_letter = {}
    for fn in ('legislators-current', 'legislators-historical'):
        P = rtyaml.load(open('../%s.yaml' % fn))
        for p in P:
            if not p['id'].get('bioguide'): continue
            if p['id']['bioguide'] == "TODO":
                continue  # 114th Congress staging
            letter = p['id']['bioguide'][0]
            num = p['id']['bioguide'][1:]
            highest_num_by_letter[letter] = max(
                highest_num_by_letter.get(letter, ''), num)

    print("Checking for new bioguide pages...")
    for letter in sorted(highest_num_by_letter):
        num = int(highest_num_by_letter[letter])
        while True:
            num += 1
            bioguide = "%s%06d" % (letter, num)
            try:
                dom = fetch_bioguide_page(bioguide, True)
            except Exception:
                break
            print(bioguide, dom.cssselect("title")[0].text)
Exemplo n.º 25
0
def check_executive_file(fn):
    # Open and iterate over the entries.
    with open(fn) as f:
        people = rtyaml.load(f)
    for person in people:

        # Check the IDs.
        if "id" not in person:
            error(repr(person) + " is missing 'id'.")
        else:
            # Check that the IDs are valid.
            check_id_types(person, {}, False)

        # Check the name.
        if "name" not in person:
            error(repr(person) + " is missing 'name'.")
        else:
            check_name(person["name"])

        # Check the biographical fields.
        if "bio" not in person:
            error(repr(person) + " is missing 'bio'.")
        else:
            check_bio(person["bio"])

        # Check the terms.
        if "terms" not in person:
            error(repr(person) + " is missing 'terms'.")
        elif not isinstance(person["terms"], list):
            error(repr(person) + " terms has an invalid data type.")
        elif len(person["terms"]) == 0:
            error(repr(person) + " terms is empty.")
        else:
            for i, term in enumerate(person["terms"]):
                check_executive_term(term)
Exemplo n.º 26
0
def get_backup_config(env, for_save=False):
	backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')

	# Defaults.
	config = {
		"min_age_in_days": 3,
		"target": "local",
	}

	# Merge in anything written to custom.yaml.
	try:
		custom_config = rtyaml.load(open(os.path.join(backup_root, 'custom.yaml')))
		if not isinstance(custom_config, dict): raise ValueError() # caught below
		config.update(custom_config)
	except:
		pass

	# When updating config.yaml, don't do any further processing on what we find.
	if for_save:
		return config

	# helper fields for the admin
	config["file_target_directory"] = os.path.join(backup_root, 'encrypted')
	config["enc_pw_file"] = os.path.join(backup_root, 'secret_key.txt')
	if config["target"] == "local":
		# Expand to the full URL.
		config["target"] = "file://" + config["file_target_directory"]

	return config
Exemplo n.º 27
0
    def value_from_datadict(self, data, files, name):
        # Override Django Forms widget method `value_from_datadict`
        # Start with the extra data.
        import rtyaml, collections, json
        value = rtyaml.load(
            data[name + "__remaining_"]) or collections.OrderedDict()

        # Add other values.
        for key, label, widget, help_text, show_for_types in self.fields:
            if key == "_remaining_": continue  # already got this
            val = data.get(name + "_" + key)
            if val:
                value[key] = val

        # Map some data.
        if value is None:
            value = ""
        elif value.get("type") == "git-web":
            value["type"] = "git"
            value["url"] = str(value.get("url-web"))
            del value["url-web"]
        elif value.get("type") == "git-ssh":
            value["type"] = "git"
            value["url"] = str(value.get("url-ssh"))
            del value["url-ssh"]

        # Adjust for possible change in Django
        return json.dumps(value)
Exemplo n.º 28
0
def get_backup_config(env, for_save=False):
    backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')

    # Defaults.
    config = {
        "min_age_in_days": 3,
        "target": "local",
    }

    # Merge in anything written to custom.yaml.
    try:
        custom_config = rtyaml.load(
            open(os.path.join(backup_root, 'custom.yaml')))
        if not isinstance(custom_config, dict):
            raise ValueError()  # caught below
        config.update(custom_config)
    except:
        pass

    # When updating config.yaml, don't do any further processing on what we find.
    if for_save:
        return config

    # helper fields for the admin
    config["file_target_directory"] = os.path.join(backup_root, 'encrypted')
    config["enc_pw_file"] = os.path.join(backup_root, 'secret_key.txt')
    if config["target"] == "local":
        # Expand to the full URL.
        config["target"] = "file://" + config["file_target_directory"]

    return config
Exemplo n.º 29
0
	def process_donation(self, don, pledge_donations):
		if don["authtest_request"]:
			# This was an authorization test. There's no need to
			# reconcile these. The pledge may have been cancelled,
			# whatever.
			return

		# This is an actual transaction.

		# Sanity checks.

		if not don["authcapture_request"]:
			print(don["donation_id"], "has authtest_request, authcapture_request both False")
			return

		if len(don["line_items"]) == 0:
			print(don["donation_id"], "has no line items")
			return

		txns = set()
		for line_item in don["line_items"]:
			txns.add(line_item["transaction_guid"])
		if len(txns) != 1:
			print(don["donation_id"], "has more than one transaction (should be one)")
			return
		
		# What pledge does this correspond to?

		pledge = Pledge.objects.get(id=rtyaml.load(don["aux_data"])["pledge"])

		# Map the pledge to the donation(s) we see for it.

		pledge_donations[pledge].append(don)
Exemplo n.º 30
0
def test_read_layout_from_yaml(tmpdir):

    stub_content = '''
ModelName:
  Cool Legend:
  - name:9  place  tiny
  - field  nature:2
  - kind:1  date  unit:5 status
  More data:
  - equalA  equalB  equalC
  - highlander '''

    with mock.patch('saap.crispy_layout_mixin.read_yaml_from_file') as ryff:
        ryff.return_value = rtyaml.load(stub_content)
        assert read_layout_from_yaml('....', 'ModelName') == [
            [
                'Cool Legend',
                [('name', 9), ('place', 2), ('tiny', 1)],
                [('field', 10), ('nature', 2)],
                [('kind', 1), ('date', 3), ('unit', 5), ('status', 3)],
            ],
            [
                'More data',
                [('equalA', 4), ('equalB', 4), ('equalC', 4)],
                [('highlander', 12)],
            ],
        ]
Exemplo n.º 31
0
def read_yaml_file(f):
    # Use the safe YAML loader via rtyaml, which loads mappings with
    # OrderedDicts so that order is not lost, and catch errors.
    import rtyaml, yaml.scanner, yaml.parser, yaml.constructor
    try:
        return rtyaml.load(f)
    except (yaml.scanner.ScannerError, yaml.parser.ParserError, yaml.constructor.ConstructorError) as e:
        raise ModuleDefinitionError("There was an error parsing the YAML file: " + str(e))
Exemplo n.º 32
0
def load_selection(selection):
    if selection:
        objects = rtyaml.load(selection)
        standards = objects['standards']
        return set((standard, control_key) for standard in standards
                   for control_key in standards[standard])
    else:
        return None
Exemplo n.º 33
0
def load_settings(env):
    fn = os.path.join(env['STORAGE_ROOT'], 'settings.yaml')
    try:
        config = rtyaml.load(open(fn, "r"))
        if not isinstance(config, dict): raise ValueError() # caught below
        return config
    except:
        return { }
Exemplo n.º 34
0
 def resolve_component(self, component_path):
     FILE_SIGNAL.send(self, operation="read", path=component_path)
     with component_path.open() as f:
         obj = rtyaml.load(f)
         if self._is_fen(obj):
             return self.resolve_fen_component(obj, component_path)
         else:
             comp = Component.parse_obj(obj)
         return comp
Exemplo n.º 35
0
def load_settings(env):
    import rtyaml
    fn = os.path.join(env['STORAGE_ROOT'], 'settings.yaml')
    try:
        config = rtyaml.load(open(fn, "r"))
        if not isinstance(config, dict): raise ValueError()  # caught below
        return config
    except:
        return {}
Exemplo n.º 36
0
def validate_document(doc, error_message_name, app):
    # The document must be either a string which points to another
    # file holding the document, or a dictionary. But the string
    # form isn't available if we're validating a new spec submitted
    # by the authoring tool since we don't have the app virtual
    # filesystem at that point.
    if app:
        if not isinstance(doc, (str, dict)):
            raise ValidationError(error_message_name, "Must be a file name or dictionary, not a %s." % type(doc).__name__)
    else:
        if not isinstance(doc, dict):
            raise ValidationError(error_message_name, "Must be a dictionary, not a %s." % type(doc).__name__)

    # If it's a string, slurp in the document from an external file.
    # The document begins with YAML dictionary terminated by a line
    # containing three dots. The subsequent content is stored in
    # the dictionary's 'template' field. The file name is stored
    # in the 'filename' field so that we can re-generate the original
    # filesystem layout in Module::serialize_to_disk.
    if isinstance(doc, str):
        error_message_name += " ({})".format(doc)

        # Read the external file.
        blob = app.read_file(doc)

        # Split the file on the first ocurrence of three dots. This
        # is YAML's standard end-of-stream marker. But PyYAML doesn't
        # have a way to read just up to the "...", so we handle that
        # ourselves.
        sep = "\n...\n"
        if sep not in blob:
            raise ValidationError(error_message_name, "File does not contain a line with just '...'.")
        data, template = blob.split(sep, 1)

        # Parse the YAML above the "...".
        data = rtyaml.load(data)

        # Trim the template so that it looks good if the revised
        # module spec is serialized to YAML.
        template = template.rstrip() + "\n"

        # Store the filename and template in it.
        data['filename'] = doc
        data['template'] = template
        doc = data

    # Check that the template is valid.
    try:
        render_content(doc, None, "PARSE_ONLY", "(document template)")
    except KeyError as e:
        raise ValidationError(error_message_name, "Missing field: %s" % str(e))
    except ValueError as e:
        raise ValidationError(error_message_name, "Invalid template: %s" % str(e))

    return doc
Exemplo n.º 37
0
def main():
    rawdata = ryaml.load(SRC_PATH.open())
    # Let's try sorting by time period
    data = sorted(rawdata, key=sortfoo, reverse=True)

    tablerows = []
    for d in data:
        course = '{0} » {1}'.format(
            d['title'],
            d['time_period']) if d.get('time_period') else d['title']
        if d.get('description'):
            desc = re.sub(r'\s+', ' ', d['description'])
            desc = desc[:DESC_LENGTH] + '...' if len(
                desc) >= DESC_LENGTH else desc
        else:
            desc = ""

        if d.get('instructors'):
            teachers = '<p>Instructors: {0}</p>'.format(', '.join(
                d['instructors']))
        else:
            teachers = ''

        if d.get('homepage') == d.get('syllabus'):
            links = """<a href="{0}">Homepage/Syllabus</a>""".format(
                d['homepage'])
        else:
            links = ' / '.join([
                """\n<a href="{1}">{0}</a>""".format(n.capitalize(), d[n])
                for n in ('homepage', 'syllabus') if d.get(n)
            ])

        tablerows.append(
            ROW_TEMPLATE.substitute(
                course=course,
                description=desc,
                links=links,
                teachers=teachers,
                organization=(d['org'] if d.get('org') else '')))

    table_header = TABLE_TEMPLATE.substitute(rowcount=len(tablerows))
    boilerplate_text = BOIL_PATH.read_text()

    try:
        with DEST_PATH.open('w') as f:
            f.write(boilerplate_text)
            f.write(table_header)
            f.write("\n".join(tablerows))
            f.write("</tbody></table>")
            print(f"Success: {len(tablerows)} courses listed")

    # worst error-handling code ever:
    except Exception as err:
        stderr.write(f"Aborting...Error: {err}\n")
    def setUp(self):
        self.maxDiff = None

        yaml_filename = os.path.join(_script_dir, "..", "Impl", "AllTypes.yaml")
        assert os.path.isfile(yaml_filename), yaml_filename

        with open(yaml_filename) as f:
            yaml_content = rtyaml.load(f)

        self._yaml_filename = yaml_filename
        self._yaml_content = yaml_content
def Deserialize_named_filters(
    items,
    process_additional_data=False,
    always_include_optional=False,
    is_root=False,
):
    """Deserializes 'named_filters' from a YAML object to a python object"""

    if isinstance(items, six.string_types):
        if FileSystem.IsFilename(items):
            with open(items) as f:
                items = rtyaml.load(f)
        else:
            items = rtyaml.load(items)

    if not isinstance(items, list):
        if isinstance(items, dict) and "named_filters" in items:
            items = items["named_filters"]
        elif not isinstance(items, dict) and hasattr(items, "named_filters"):
            items = getattr(items, "named_filters")
        elif is_root:
            items = DoesNotExist

    try:
        try:
            items = Deserializer().named_filters(
                items,
                process_additional_data=process_additional_data,
                always_include_optional=always_include_optional,
            )

            if items is DoesNotExist:
                items = []
        except:
            _DecorateActiveException("named_filters")
    except SerializationException:
        raise
    except Exception as ex:
        raise DeserializeException(ex)

    return items
Exemplo n.º 40
0
def check_legislators_file(fn, seen_ids, current=None, current_mocs=None):
    # Open and iterate over the entries.
    with open(fn) as f:
        legislators = rtyaml.load(f)
    for legislator in legislators:
        # Create a string for error messages to tell us where problems are ocurring.
        context = "{} in {}".format(fn, repr(legislator))

        # Check the IDs.
        if "id" not in legislator:
            error(context, "Missing 'id' mapping.")
        else:
            # Check that the IDs are valid.
            check_id_types(legislator, seen_ids, True, context)

        # Create a string for error messages to tell us where problems are ocurring.
        context = "{}:{}".format(fn, legislator['id']['bioguide'])

        # Check the name.
        if "name" not in legislator:
            error(context, "Missing 'name' mapping.")
        else:
            check_name(legislator["name"], context)
        for name in legislator.get("other_names", []):
            check_name(name, context + ":other_names", is_other_names=True)

        # Check the biographical fields.
        if "bio" not in legislator:
            error(context, "Missing 'bio' mapping.")
        else:
            check_bio(legislator["bio"], current, context)

        # Check the terms.
        if "terms" not in legislator:
            error(context, "Missing 'terms' list.")
        elif not isinstance(legislator["terms"], list):
            error(context, "'terms' has an invalid data type.")
        elif len(legislator["terms"]) == 0:
            error(context, "'terms' is empty.")
        else:
            prev_term = None
            for i, term in enumerate(legislator["terms"]):
                check_term(term,
                           prev_term,
                           context + ":terms[{}]".format(i),
                           current=(current
                                    and i == len(legislator["terms"]) - 1),
                           current_mocs=current_mocs)
                prev_term = term

        # Check the leadership roles.
        check_leadership_roles(legislator.get("leadership_roles", []), current,
                               context)
def Deserialize(
    root,
    process_additional_data=False,
    always_include_optional=False,
):
    """Convenience method that deserializes all top-level elements"""

    if isinstance(root, six.string_types):
        if FileSystem.IsFilename(root):
            with open(root) as f:
                root = rtyaml.load(f)
        else:
            root = rtyaml.load(root)

    result = _CreatePythonObject(attributes=None, )

    this_result = Deserialize_filter(
        root,
        is_root=True,
        process_additional_data=process_additional_data,
        always_include_optional=always_include_optional,
    )
    if this_result is not DoesNotExist:
        setattr(result, "filter", this_result)
    elif always_include_optional:
        setattr(result, "filter", None)

    this_result = Deserialize_named_filters(
        root,
        is_root=True,
        process_additional_data=process_additional_data,
        always_include_optional=always_include_optional,
    )
    if this_result is not DoesNotExist:
        setattr(result, "named_filters", this_result)
    elif always_include_optional:
        setattr(result, "named_filters", [])

    return result
Exemplo n.º 42
0
def get_web_domains_with_root_overrides(env):
	# Load custom settings so we can tell what domains have a redirect or proxy set up on '/',
	# which means static hosting is not happening.
	root_overrides = { }
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
		for domain, settings in custom_settings.items():
			for type, value in [('redirect', settings.get('redirects', {}).get('/')),
				('proxy', settings.get('proxies', {}).get('/'))]:
				if value:
					root_overrides[domain] = (type, value)
	return root_overrides
Exemplo n.º 43
0
def get_web_domains_with_root_overrides(env):
    # Load custom settings so we can tell what domains have a redirect or proxy set up on '/',
    # which means static hosting is not happening.
    root_overrides = {}
    nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
    if os.path.exists(nginx_conf_custom_fn):
        custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
        for domain, settings in custom_settings.items():
            for type, value in [('redirect', settings.get('redirects', {}).get('/')),
                                ('proxy', settings.get('proxies', {}).get('/'))]:
                if value:
                    root_overrides[domain] = (type, value)
    return root_overrides
Exemplo n.º 44
0
def check_legislators_file(fn, seen_ids, current=None, current_mocs=None):
  # Open and iterate over the entries.
  with open(fn) as f:
    legislators = rtyaml.load(f)
  for legislator in legislators:
    # Create a string for error messages to tell us where problems are ocurring.
    context = "{} in {}".format(fn, repr(legislator))

    # Check the IDs.
    if "id" not in legislator:
      error(context, "Missing 'id' mapping.")
    else:
      # Check that the IDs are valid.
      check_id_types(legislator, seen_ids, True, context)

    # Create a string for error messages to tell us where problems are ocurring.
    context = "{}:{}".format(fn, legislator['id']['bioguide'])

    # Check the name.
    if "name" not in legislator:
      error(context, "Missing 'name' mapping.")
    else:
      check_name(legislator["name"], context)
    for name in legislator.get("other_names", []):
      check_name(name, context + ":other_names", is_other_names=True)

    # Check the biographical fields.
    if "bio" not in legislator:
      error(context, "Missing 'bio' mapping.")
    else:
      check_bio(legislator["bio"], current, context)

    # Check the terms.
    if "terms" not in legislator:
      error(context, "Missing 'terms' list.")
    elif not isinstance(legislator["terms"], list):
      error(context, "'terms' has an invalid data type.")
    elif len(legislator["terms"]) == 0:
      error(context, "'terms' is empty.")
    else:
      prev_term = None
      for i, term in enumerate(legislator["terms"]):
        check_term(term, prev_term, context+":terms[{}]".format(i),
          current=(current and i==len(legislator["terms"])-1),
          current_mocs=current_mocs)
        prev_term = term

    # Check the leadership roles.
    check_leadership_roles(legislator.get("leadership_roles", []), current, context)
Exemplo n.º 45
0
def process_rule(filename, oval_nodes):
    # A rule is metadata + zero or more tests.

    yaml = rtyaml.load(open(filename))

    # Create the rule definition.
    oval_nodes["definition_count"] += 1
    defnode = make_node(
        oval_nodes["definitions"],
        "definition",
        None,
        id="oval:easyscap_generated:def:%d" % oval_nodes["definition_count"],
        version="1",
    )
    defnode.set("class", "compliance")
    defnodemeta = make_node(defnode, "metadata")
    make_node(defnodemeta, "title", yaml["title"])
    affected = make_node(defnodemeta, "affected", family="unix")
    make_node(affected, "platform", "Unknown")
    make_node(
        defnodemeta, "description", pandoc(yaml["description"], "markdown", "html")
    )  # should be inserted raw, not escaped
    defnodecriteria = None

    # Create OVAL definitions for the variables.
    var_map = {}
    try:
        for key, var in yaml.get("variables", {}).items():
            node = dict_to_node(oval_nodes["variables"], var, oval_nodes=oval_nodes)
            varid = "oval:%s:var:%d" % (yaml["id"], key + 1)
            node.set("id", varid)
            var_map[key] = varid
    except Exception as e:
        raise Exception("Error processing rule %s: %s" % (filename, str(e)))

        # Create OVAL definitions for the tests.
    try:
        for i, test in enumerate(yaml.get("tests", [])):
            node = process_test(test, oval_nodes, yaml["id"], i, var_map)

            if defnodecriteria is None:
                defnodecriteria = make_node(defnode, "criteria", None, operator=yaml["operator"])
            make_node(defnodecriteria, "criterion", test_ref=node.get("id"))

    except Exception as e:
        raise Exception("Error processing rule %s: %s" % (filename, str(e)))
Exemplo n.º 46
0
def make_domain_config(domain, template, template_for_primaryhost, env):
	# How will we configure this domain.

	# Where will its root directory be for static files?

	root = get_web_root(domain, env)

	# What private key and SSL certificate will we use for this domain?
	ssl_key, ssl_certificate, csr_path = get_domain_ssl_files(domain, env)

	# For hostnames created after the initial setup, ensure we have an SSL certificate
	# available. Make a self-signed one now if one doesn't exist.
	ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, csr_path, env)

	# Put pieces together.
	nginx_conf_parts = re.split("\s*# ADDITIONAL DIRECTIVES HERE\s*", template)
	nginx_conf = nginx_conf_parts[0] + "\n"
	if domain == env['PRIMARY_HOSTNAME']:
		nginx_conf += template_for_primaryhost + "\n"

	# Replace substitution strings in the template & return.
	nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
	nginx_conf = nginx_conf.replace("$HOSTNAME", domain)
	nginx_conf = nginx_conf.replace("$ROOT", root)
	nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
	nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)

	# Add in any user customizations in YAML format.
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		yaml = rtyaml.load(open(nginx_conf_custom_fn))
		if domain in yaml:
			yaml = yaml[domain]
			for path, url in yaml.get("proxies", {}).items():
				nginx_conf += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)

	# Add in any user customizations in the includes/ folder.
	nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
	if os.path.exists(nginx_conf_custom_include):
		nginx_conf += "\tinclude %s;\n" % (nginx_conf_custom_include)

	# Ending.
	nginx_conf += nginx_conf_parts[1]

	return nginx_conf
Exemplo n.º 47
0
def missing_data(request):
    # What data are we missing?

    # What data are we missing about current legislators?

    # Load the pronunciation guide.
    import os.path
    if not hasattr(settings, 'PRONUNCIATION_DATABASE_PATH'):
        pronunciation_guide = None
    else:
        import rtyaml
        pronunciation_guide = { p["id"]["govtrack"]: p for p in rtyaml.load(open(settings.PRONUNCIATION_DATABASE_PATH)) }

    from person.models import Person
    from person.analysis import load_scorecards_for
    people = { }
    def add_person(p):
        return people.setdefault(p.id, {
            "id": p.id,
            "name": p.sortname,
            "link": p.get_absolute_url(),
        })
    for p in Person.objects.filter(roles__current=True):
        if not p.has_photo():
            add_person(p).update({ "photo": "✘" })
        if not p.birthday:
            add_person(p).update({ "birthday": "✘" })
        if not p.twitterid:
            add_person(p).update({ "twitter": "✘" })
        if pronunciation_guide:
            if p.id not in pronunciation_guide:
                add_person(p).update({ "pronunciation": "✘" })
            # Check that the name in the guide matches the name we display.
            elif pronunciation_guide[p.id]['name'] != p.firstname + " // " + p.lastname:
                add_person(p).update({ "pronunciation": "mismatch" })
        if not load_scorecards_for(p):
            # new legislators won't have scorecards for a while
            add_person(p).update({ "scorecards": "✘" })
    people = sorted(people.values(), key=lambda p : p['name'])

    return {
        "people": people,
    }
Exemplo n.º 48
0
def load_from_comments(text):

    result = {}
    blocks = re.findall(r'^(\s*#.*?)\n(?:\w|$)', text, re.DOTALL | re.MULTILINE)

    for block in blocks:
        block_lines = block.splitlines()

        while (len(block_lines) > 0) and (':' not in block_lines[0]):
            block_lines.pop(0)

        while (len(block_lines) > 0):
            try:
                block = '\n'.join(block_lines)
                result.update(yaml.load(block.replace('#', '')))
                break
            except Exception as e:
                block_lines.pop()

    return result
Exemplo n.º 49
0
def process_group(filename, oval_nodes):
    yaml = rtyaml.load(open(filename))

    # Process all test definitions in this group.
    for rule in yaml.get("rules", []):
        fn = os.path.join(os.path.dirname(filename), rule + ".yaml")
        process_rule(fn, oval_nodes)

        # Recursively process all subgroups mentioned in this group.
    for subgroup in yaml.get("subgroups", []):
        # Subgroups are specified either as a relative path name, or as a
        # relative directory name in which we look for group.yaml.
        fn = os.path.join(os.path.dirname(filename), subgroup)
        if os.path.exists(fn) and not os.path.isdir(fn):
            process_group(fn, oval_nodes)
            continue

        fn = os.path.join(os.path.dirname(filename), subgroup, "group.yaml")
        if os.path.exists(fn):
            process_group(fn, oval_nodes)
            continue
Exemplo n.º 50
0
def load_scorecards():
    global _scorecards
    if _scorecards is None and hasattr(settings, 'SCORECARDS_DATABASE_PATH'):
        _scorecards = []
        for fn in sorted(glob.glob(settings.SCORECARDS_DATABASE_PATH + "/*.yaml")):
            with open(fn) as f:
                # Split on "...", read the top as YAML and the
                # bottom as CSV.
                metadata, scores = f.read().split("\n...\n")
                metadata = rtyaml.load(metadata)
                scores = list(csv.reader(io.StringIO(scores)))

                # Store scores as a mapping from person IDs to score info.
                letter_grades = ("F", "D-", "D", "D+", "C-", "C", "C+", "B-", "B", "B+", "A-", "A", "A+")
                def format_score(score, info):
                    try:
                        if metadata.get("type") == "percent":
                            return {
                                "display": str(int(score)) + "%",
                                "sort": int(score),
                            }
                        if metadata.get("type") == "grade":
                            # for sorting, turn the grade into a number from 0 to 100
                            score = score.strip()
                            return {
                                "display": score,
                                "sort": letter_grades.index(score)/float(len(letter_grades)-1)*100,
                            }
                        raise ValueError()
                    except:
                        raise ValueError("Invalid scorecard entry for %s: %s %s." % (info, repr(metadata.get("type")), repr(score)))
                metadata["scores"] = {
                    int(row[0]): format_score(row[1].strip(), [fn, row[0]] + row[2:])
                    for row in scores
                    if row[0].strip() != ""
                }
                metadata["based_on"] = metadata["based-on"]
                _scorecards.append(metadata)
        _scorecards.sort(key = lambda scorecard : scorecard.get("abbrev") or scorecard["name"])
    return _scorecards
Exemplo n.º 51
0
def get_web_domains_info(env):
	# load custom settings so we can tell what domains have a redirect or proxy set up on '/',
	# which means static hosting is not happening
	custom_settings = { }
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		custom_settings = rtyaml.load(open(nginx_conf_custom_fn))
	def has_root_proxy_or_redirect(domain):
		return custom_settings.get(domain, {}).get('redirects', {}).get('/') or custom_settings.get(domain, {}).get('proxies', {}).get('/')

	# for the SSL config panel, get cert status
	def check_cert(domain):
		from status_checks import check_certificate
		ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)
		if not os.path.exists(ssl_certificate):
			return ("danger", "No Certificate Installed")
		cert_status, cert_status_details = check_certificate(domain, ssl_certificate, ssl_key)
		if cert_status == "OK":
			if not ssl_via:
				return ("success", "Signed & valid. " + cert_status_details)
			else:
				# This is an alternate domain but using the same cert as the primary domain.
				return ("success", "Signed & valid. " + ssl_via)
		elif cert_status == "SELF-SIGNED":
			return ("warning", "Self-signed. Get a signed certificate to stop warnings.")
		else:
			return ("danger", "Certificate has a problem: " + cert_status)

	return [
		{
			"domain": domain,
			"root": get_web_root(domain, env),
			"custom_root": get_web_root(domain, env, test_exists=False),
			"ssl_certificate": check_cert(domain),
			"static_enabled": not has_root_proxy_or_redirect(domain),
		}
		for domain in get_web_domains(env)
	]
def convert():
    data = rtyaml.load(open("legislators-current.yaml", "rb"))
    reps = [x for x in data if x["terms"][-1]["type"] == "rep"]
    fields = [
        "official_full",
        "first",
        "middle",
        "last",
        "birthday",
        "gender",
        "start",
        "end",
        "state",
        "district",
        "party",
        "state_district"
    ]
    with open("legislators-current.csv", "wb") as fp:
        writer = csv.DictWriter(fp, fields)
        writer.writeheader()
        rep_data = {}
        for rep in reps:
            rep_data["official_full"] = rep["name"]["official_full"].encode("utf-8")
            rep_data["first"] = rep["name"]["first"].encode("utf-8")
            rep_data["middle"] = rep["name"].get("middle", "").encode("utf-8")
            rep_data["last"] = rep["name"]["last"].encode("utf-8")
            rep_data["birthday"] = rep["bio"]["birthday"]
            rep_data["gender"] = rep["bio"]["gender"]
            rep_data["start"] = rep["terms"][-1]["start"]
            rep_data["end"] = rep["terms"][-1]["end"]
            rep_data["state"] = rep["terms"][-1]["state"]
            rep_data["district"] = rep["terms"][-1]["district"]
            rep_data["party"] = rep["terms"][-1]["party"]
            rep_data["state_district"] = "{0}-{1}".format(
                rep["terms"][-1]["state"],
                rep["terms"][-1]["district"]
            )
            writer.writerow(rep_data)
Exemplo n.º 53
0
def check_executive_file(fn):
  # Open and iterate over the entries.
  with open(fn) as f:
    people = rtyaml.load(f)
  for person in people:
    # Create a string for error messages to tell us where problems are ocurring.
    context = "{} in {}".format(fn, repr(person))

    # Check the IDs.
    if "id" not in person:
      error(context, "Missing 'id' mapping.")
    else:
      # Check that the IDs are valid.
      check_id_types(person, {}, False, context)

    # Check the name.
    if "name" not in person:
      error(context, "Missing 'name' mapping.")
    else:
      check_name(person["name"], context)

    # Check the biographical fields.
    if "bio" not in person:
      error(context, "Missing 'bio' mapping.")
    else:
      check_bio(person["bio"], False, repr(person))

    # Check the terms.
    if "terms" not in person:
      error(context, "Missing 'terms' list.")
    elif not isinstance(person["terms"], list):
      error(context, "'terms' has an invalid data type.")
    elif len(person["terms"]) == 0:
      error(context, "'terms' is empty.")
    else:
      for i, term in enumerate(person["terms"]):
        check_executive_term(term, context+":term[{}]".format(i))
def run():

  print("Finding highest bioguide numbers we know of...")
  highest_num_by_letter = { }
  for fn in ('legislators-current', 'legislators-historical'):
    P = rtyaml.load(open('../%s.yaml' % fn))
    for p in P:
      if not p['id'].get('bioguide'): continue
      if p['id']['bioguide'] == "TODO": continue # 114th Congress staging
      letter = p['id']['bioguide'][0]
      num = p['id']['bioguide'][1:]
      highest_num_by_letter[letter] = max(highest_num_by_letter.get(letter, ''), num)

  print("Checking for new bioguide pages...")
  for letter in sorted(highest_num_by_letter):
    num = int(highest_num_by_letter[letter])
    while True:
      num += 1
      bioguide = "%s%06d" % (letter, num)
      try:
        dom = fetch_bioguide_page(bioguide, True)
      except Exception:
        break
      print(bioguide, dom.cssselect("title")[0].text)
Exemplo n.º 55
0
def get_custom_dns_config(env):
	try:
		return rtyaml.load(open(os.path.join(env['STORAGE_ROOT'], 'dns/custom.yaml')))
	except:
		return { }
Exemplo n.º 56
0
    'committees': "congress_legislators/committees",
    'fec_ids': "congress_legislators/fec_ids",
    'legislators': "congress_legislators/legislators",
    'social_media_accounts': "congress_legislators/social_media_accounts",
    'terms': "congress_legislators/terms",
    # examples of mapping a slug to a different filename
    'fec_candidate_summaries': 'fec/candidate_summaries',
    'friendships': "twitter/friendships",
    'tweets': "twitter/tweets",
    'twitter_profiles': "twitter/twitter_profiles"
}
## load up the schemas
xs = {}
for slug, bn in SCHEMA_PATHS.items():
    fn = join(SCHEMAS_DIR, bn + '.yaml')
    xs[slug] = rtyaml.load(open(fn))

MyTable = namedtuple('MyTable', "table_name, schema")

## Tables I want
mytables = [
    MyTable('committee_memberships', xs['committee_memberships']),
    MyTable('committees', xs['committees']),
    MyTable('fec_ids', xs['fec_ids']),
    MyTable('legislators', xs['legislators']),
    MyTable('social_media_accounts', xs['social_media_accounts']),
    MyTable('terms', xs['terms']),
    MyTable('fec_candidate_summaries', xs['fec_candidate_summaries']),
    MyTable('tweets', xs['tweets']),
    MyTable('congress_twitter_profiles', xs['twitter_profiles'])
]
Exemplo n.º 57
0
def make_domain_config(domain, templates, env):
	# GET SOME VARIABLES

	# Where will its root directory be for static files?
	root = get_web_root(domain, env)

	# What private key and SSL certificate will we use for this domain?
	ssl_key, ssl_certificate, ssl_via = get_domain_ssl_files(domain, env)

	# For hostnames created after the initial setup, ensure we have an SSL certificate
	# available. Make a self-signed one now if one doesn't exist.
	ensure_ssl_certificate_exists(domain, ssl_key, ssl_certificate, env)

	# ADDITIONAL DIRECTIVES.

	nginx_conf_extra = ""

	# Because the certificate may change, we should recognize this so we
	# can trigger an nginx update.
	def hashfile(filepath):
		import hashlib
		sha1 = hashlib.sha1()
		f = open(filepath, 'rb')
		try:
			sha1.update(f.read())
		finally:
			f.close()
		return sha1.hexdigest()
	nginx_conf_extra += "# ssl files sha1: %s / %s\n" % (hashfile(ssl_key), hashfile(ssl_certificate))

	# Add in any user customizations in YAML format.
	nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
	if os.path.exists(nginx_conf_custom_fn):
		yaml = rtyaml.load(open(nginx_conf_custom_fn))
		if domain in yaml:
			yaml = yaml[domain]
			for path, url in yaml.get("proxies", {}).items():
				nginx_conf_extra += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)
			for path, url in yaml.get("redirects", {}).items():
				nginx_conf_extra += "\trewrite %s %s permanent;\n" % (path, url)

	# Add in any user customizations in the includes/ folder.
	nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
	if os.path.exists(nginx_conf_custom_include):
		nginx_conf_extra += "\tinclude %s;\n" % (nginx_conf_custom_include)
	# PUT IT ALL TOGETHER

	# Combine the pieces. Iteratively place each template into the "# ADDITIONAL DIRECTIVES HERE" placeholder
	# of the previous template.
	nginx_conf = "# ADDITIONAL DIRECTIVES HERE\n"
	for t in templates + [nginx_conf_extra]:
		nginx_conf = re.sub("[ \t]*# ADDITIONAL DIRECTIVES HERE *\n", t, nginx_conf)

	# Replace substitution strings in the template & return.
	nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
	nginx_conf = nginx_conf.replace("$HOSTNAME", domain)
	nginx_conf = nginx_conf.replace("$ROOT", root)
	nginx_conf = nginx_conf.replace("$SSL_KEY", ssl_key)
	nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", ssl_certificate)
	nginx_conf = nginx_conf.replace("$REDIRECT_DOMAIN", re.sub(r"^www\.", "", domain)) # for default www redirects to parent domain

	return nginx_conf
Exemplo n.º 58
0
def make_domain_config(domain, templates, ssl_certificates, env):
    # GET SOME VARIABLES

    # Where will its root directory be for static files?
    root = get_web_root(domain, env)

    # What private key and SSL certificate will we use for this domain?
    tls_cert = get_domain_ssl_files(domain, ssl_certificates, env)

    # ADDITIONAL DIRECTIVES.

    nginx_conf_extra = ""

    # Because the certificate may change, we should recognize this so we
    # can trigger an nginx update.
    def hashfile(filepath):
        import hashlib
        sha1 = hashlib.sha1()
        f = open(filepath, 'rb')
        try:
            sha1.update(f.read())
        finally:
            f.close()
        return sha1.hexdigest()

    nginx_conf_extra += "# ssl files sha1: %s / %s\n" % (
    hashfile(tls_cert["private-key"]), hashfile(tls_cert["certificate"]))

    # Add in any user customizations in YAML format.
    hsts = "yes"
    nginx_conf_custom_fn = os.path.join(env["STORAGE_ROOT"], "www/custom.yaml")
    if os.path.exists(nginx_conf_custom_fn):
        yaml = rtyaml.load(open(nginx_conf_custom_fn))
        if domain in yaml:
            yaml = yaml[domain]

            # any proxy or redirect here?
            for path, url in yaml.get("proxies", {}).items():
                nginx_conf_extra += "\tlocation %s {\n\t\tproxy_pass %s;\n\t}\n" % (path, url)
            for path, url in yaml.get("redirects", {}).items():
                nginx_conf_extra += "\trewrite %s %s permanent;\n" % (path, url)

            # override the HSTS directive type
            hsts = yaml.get("hsts", hsts)

    # Add the HSTS header.
    if hsts == "yes":
        nginx_conf_extra += "add_header Strict-Transport-Security max-age=31536000;\n"
    elif hsts == "preload":
        nginx_conf_extra += "add_header Strict-Transport-Security \"max-age=10886400; includeSubDomains; preload\";\n"

    # Add in any user customizations in the includes/ folder.
    nginx_conf_custom_include = os.path.join(env["STORAGE_ROOT"], "www", safe_domain_name(domain) + ".conf")
    if os.path.exists(nginx_conf_custom_include):
        nginx_conf_extra += "\tinclude %s;\n" % (nginx_conf_custom_include)
    # PUT IT ALL TOGETHER

    # Combine the pieces. Iteratively place each template into the "# ADDITIONAL DIRECTIVES HERE" placeholder
    # of the previous template.
    nginx_conf = "# ADDITIONAL DIRECTIVES HERE\n"
    for t in templates + [nginx_conf_extra]:
        nginx_conf = re.sub("[ \t]*# ADDITIONAL DIRECTIVES HERE *\n", t, nginx_conf)

    # Replace substitution strings in the template & return.
    nginx_conf = nginx_conf.replace("$STORAGE_ROOT", env['STORAGE_ROOT'])
    nginx_conf = nginx_conf.replace("$HOSTNAME", domain)
    nginx_conf = nginx_conf.replace("$ROOT", root)
    nginx_conf = nginx_conf.replace("$SSL_KEY", tls_cert["private-key"])
    nginx_conf = nginx_conf.replace("$SSL_CERTIFICATE", tls_cert["certificate"])
    nginx_conf = nginx_conf.replace("$REDIRECT_DOMAIN",
                                    re.sub(r"^www\.", "", domain))  # for default www redirects to parent domain

    return nginx_conf
Exemplo n.º 59
0
def load_misconduct_data():
    global misconduct_data
    if not misconduct_data:
        # Load data.
        import os.path, rtyaml
        if not hasattr(settings, 'MISCONDUCT_DATABASE_PATH'):
            # debugging
            misconduct_data = []
        else:
            misconduct_data = rtyaml.load(open(settings.MISCONDUCT_DATABASE_PATH))

        # Pre-fetch all members then add references to Person instances from numeric IDs.
        from person.models import Person
        people_map = Person.objects.in_bulk(set(entry["person"] for entry in misconduct_data))
        for entry in misconduct_data:
            entry["person"] = people_map[int(entry["person"])]

        for entry in misconduct_data:
            for consequence in entry.get("consequences", []):
                # Pre-render consequence dates.
                if isinstance(consequence.get("date"), (int, str)):
                    if len(str(consequence["date"])) == 4: # year alone
                        consequence["date_rendered"] = str(consequence["date"])
                        consequence["date_year"] = int(consequence["date"])
                    elif len(consequence["date"]) == 7: # YYYY-MM, but it's historical so we can't use strftime directly
                        consequence["date_rendered"] = date(2000, int(consequence["date"][5:7]), 1).strftime("%B") + " " + str(int(consequence["date"][0:4]))
                        consequence["date_year"] = int(consequence["date"][0:4])
                    else:
                        raise ValueError(consequence["date"])
                elif isinstance(consequence.get("date"), date):
                    consequence["date_rendered"] = date(2000, consequence["date"].month, consequence["date"].day).strftime("%b. %d").replace(" 0", " ") + ", " + str(consequence["date"].year)
                    consequence["date_year"] = consequence["date"].year
                else:
                    raise ValueError(consequence["date"])

                # Normalize links to list.
                if isinstance(consequence.get("link"), str):
                    consequence["links"] = [consequence["link"]]
                else:
                    consequence["links"] = consequence.get("link", [])
                consequence["wrap_link"] = (len(consequence["links"]) == 1) and (len(consequence.get("action", "") + consequence.get("text", "")) < 100)

                # Parse tags.
                if "tags" in consequence:
                    consequence["tags"] = set(consequence["tags"].split(" "))
                else:
                    consequence["tags"] = set()

                # Map consequence back to main entry. When plotting by consequence,
                # we may want to know what entry it was for.
                consequence["entry"] = entry


            # Split all tags and percolate consequence tags to the top level.
            if "tags" in entry:
                entry["tags"] = set(entry["tags"].split(" "))
            else:
                entry["tags"] = set()
            for cons in entry["consequences"]:
                entry["tags"] |= cons["tags"]

            # Mark the entry as 'alleged' if no guilty consequence tag (which we've percolated to the top) is present.
            entry["alleged"] = (len(entry["tags"] & misconduct_tags_guilty) == 0)

    return misconduct_data