Beispiel #1
0
    def mock_request_network_topology(self, file_name):
        cached_file_path = path.join(
            path.dirname(__file__), file_name + '.json')

        if path.isfile(cached_file_path):
            LOG.debug('Loading topology from file: %r', cached_file_path)
            with open(cached_file_path, 'rt') as fd:
                topology = jsonutils.loads(str(fd.read()), encoding='utf-8')
        else:
            LOG.debug(
                'Getting topology from ODL: %r', self.NETOWORK_TOPOLOGY_URL)
            request = requests.get(
                self.NETOWORK_TOPOLOGY_URL, auth=('admin', 'admin'),
                headers={'Content-Type': 'application/json'})
            request.raise_for_status()

            with open(cached_file_path, 'wt') as fd:
                LOG.debug('Saving topology to file: %r', cached_file_path)
                topology = request.json()
                jsonutils.dump(
                    topology, fd, sort_keys=True, indent=4,
                    separators=(',', ': '))

        mocked_request = self.patch(
            mech_driver.odl_client.requests, 'request',
            return_value=mock.MagicMock(
                spec=requests.Response,
                json=mock.MagicMock(return_value=topology)))

        return mocked_request
Beispiel #2
0
def dump_tables(format_):
    if format_ == FORMAT_PRETTY:
        for table in db_tables:
            print_whole_table(table)
    elif format_ == FORMAT_JSON:
        result = {table: _get_table_struct(table) for table in db_tables}
        jsonutils.dump(result, sys.stdout)
 def test_old_default_policy_json_file_fail_upgrade(self):
     self.flags(policy_file='policy.json', group="oslo_policy")
     tmpfilename = os.path.join(self.temp_dir.path, 'policy.json')
     with open(tmpfilename, 'w') as fh:
         jsonutils.dump(self.data, fh)
     self.assertEqual(upgradecheck.Code.FAILURE,
                      self.cmd._check_policy_json().code)
Beispiel #4
0
    def setUp(self):
        super(TestUpgradeCheckPolicyJSON, self).setUp()
        conf_fixture = self.useFixture(config.Config())
        conf_fixture.load_raw_values()
        self.conf = conf_fixture.conf
        self.conf.register_opts(policy_opts._options,
                                group=policy_opts._option_group)

        self.cmd = upgradecheck.UpgradeCommands()
        self.cmd._upgrade_checks = (('Policy File JSON to YAML Migration',
                                     (common_checks.check_policy_json, {
                                         'conf': self.conf
                                     })), )

        self.data = {'rule_admin': 'True', 'rule_admin2': 'is_admin:True'}
        self.temp_dir = self.useFixture(fixtures.TempDir())
        fd, self.json_file = tempfile.mkstemp(dir=self.temp_dir.path)
        fd, self.yaml_file = tempfile.mkstemp(dir=self.temp_dir.path)
        with open(self.json_file, 'w') as fh:
            jsonutils.dump(self.data, fh)
        with open(self.yaml_file, 'w') as fh:
            yaml.dump(self.data, fh)

        original_search_dirs = cfg._search_dirs

        def fake_search_dirs(dirs, name):
            dirs.append(self.temp_dir.path)
            return original_search_dirs(dirs, name)

        mock_search_dir = self.useFixture(
            fixtures.MockPatch('oslo_config.cfg._search_dirs')).mock
        mock_search_dir.side_effect = fake_search_dirs
    def mock_request_network_topology(self, file_name):
        cached_file_path = path.join(
            path.dirname(__file__), file_name + '.json')

        if path.isfile(cached_file_path):
            LOG.debug('Loading topology from file: %r', cached_file_path)
            with open(cached_file_path, 'rt') as fd:
                topology = jsonutils.loads(str(fd.read()), encoding='utf-8')
        else:
            LOG.debug(
                'Getting topology from ODL: %r', self.NETOWORK_TOPOLOGY_URL)
            request = requests.get(
                self.NETOWORK_TOPOLOGY_URL, auth=('admin', 'admin'),
                headers={'Content-Type': 'application/json'})
            request.raise_for_status()

            with open(cached_file_path, 'wt') as fd:
                LOG.debug('Saving topology to file: %r', cached_file_path)
                topology = request.json()
                jsonutils.dump(
                    topology, fd, sort_keys=True, indent=4,
                    separators=(',', ': '))

        mocked_request = self.patch(
            mech_driver.odl_client.requests, 'request',
            return_value=mock.MagicMock(
                spec=requests.Response,
                json=mock.MagicMock(return_value=topology)))

        return mocked_request
 def _write_file(self, port_id, mapping_dict, file_format):
     filename = file_format % port_id
     if not os.path.exists(os.path.dirname(filename)):
         os.makedirs(os.path.dirname(filename))
     with open(filename, 'w') as f:
         jsonutils.dump(mapping_dict, f, indent=4)
     return filename
Beispiel #7
0
def upgrade_policy(args=None):
    logging.basicConfig(level=logging.WARN)
    conf = cfg.ConfigOpts()
    conf.register_cli_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS)
    conf.register_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS)
    conf(args)
    with open(conf.policy, 'r') as input_data:
        policies = policy.parse_file_contents(input_data.read())
    default_policies = get_policies_dict(conf.namespace)

    _upgrade_policies(policies, default_policies)

    if conf.output_file:
        if conf.format == 'yaml':
            yaml.safe_dump(policies,
                           open(conf.output_file, 'w'),
                           default_flow_style=False)
        elif conf.format == 'json':
            jsonutils.dump(policies, open(conf.output_file, 'w'), indent=4)
    else:
        if conf.format == 'yaml':
            sys.stdout.write(yaml.safe_dump(policies,
                                            default_flow_style=False))
        elif conf.format == 'json':
            sys.stdout.write(jsonutils.dumps(policies, indent=4))
Beispiel #8
0
def upgrade_policy(args=None, conf=None):
    logging.basicConfig(level=logging.WARN)
    # Allow the caller to pass in a local conf object for unit testing
    if conf is None:
        conf = cfg.CONF
    conf.register_cli_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS)
    conf.register_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS)
    conf(args)
    _check_for_namespace_opt(conf)
    with open(conf.policy, 'r') as input_data:
        policies = policy.parse_file_contents(input_data.read())
    default_policies = get_policies_dict(conf.namespace)

    _upgrade_policies(policies, default_policies)

    if conf.output_file:
        with open(conf.output_file, 'w') as fh:
            if conf.format == 'yaml':
                yaml.safe_dump(policies, fh, default_flow_style=False)
            elif conf.format == 'json':
                LOG.warning(policy.WARN_JSON)
                jsonutils.dump(policies, fh, indent=4)
    else:
        if conf.format == 'yaml':
            sys.stdout.write(yaml.safe_dump(policies,
                                            default_flow_style=False))
        elif conf.format == 'json':
            LOG.warning(policy.WARN_JSON)
            sys.stdout.write(jsonutils.dumps(policies, indent=4))
Beispiel #9
0
    def test_dump_namedtuple(self):
        expected = '[1, 2]'
        json_dict = collections.namedtuple("foo", "bar baz")(1, 2)

        fp = six.StringIO()
        jsonutils.dump(json_dict, fp)

        self.assertEqual(expected, fp.getvalue())
Beispiel #10
0
 def _write_files(self, tempdir, templates):
     files = []
     for template in templates:
         fp = tempfile.NamedTemporaryFile(suffix=".json", mode="w", dir=tempdir, delete=False)
         json.dump(template, fp)
         files.append(fp.name)
         fp.close()
     return files
Beispiel #11
0
def write_config(cfg, version):
    with open("sahara/plugins/ambari/resources/configs-%s.json" % version,
              "w") as fp:
        jsonutils.dump(cfg,
                       fp,
                       indent=4,
                       sort_keys=True,
                       separators=(",", ": "))
Beispiel #12
0
def save_environment(home):
    """Read the environment from the terminal where the scheduler is
    initialized and save the environment variables to be reused within the
    windows service
    """
    env_path = os.path.join(home, 'env.json')
    with open(env_path, 'wb') as tmp:
        json.dump(os.environ.copy(), tmp)
Beispiel #13
0
    def test_dump(self):
        expected = '{"a": "b"}'
        json_dict = {"a": "b"}

        fp = six.StringIO()
        jsonutils.dump(json_dict, fp)

        self.assertEqual(expected, fp.getvalue())
Beispiel #14
0
    def test_dump_namedtuple(self):
        expected = "[1, 2]"
        json_dict = collections.namedtuple("foo", "bar baz")(1, 2)

        fp = six.StringIO()
        jsonutils.dump(json_dict, fp)

        self.assertEqual(expected, fp.getvalue())
Beispiel #15
0
    def test_dump(self):
        expected = '{"a": "b"}'
        json_dict = {'a': 'b'}

        fp = six.StringIO()
        jsonutils.dump(json_dict, fp)

        self.assertEqual(expected, fp.getvalue())
Beispiel #16
0
    def _prepare_policy(self):
        # Convert all actions to require the specified role
        policy = {}
        for rule in policies.list_rules():
            policy[rule.name] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
Beispiel #17
0
    def _prepare_policy(self):
        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')

        # load the fake_policy data and add the missing default rules.
        policy_rules = jsonutils.loads(fake_policy.policy_data)

        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy_rules, f)
        CONF.set_override('policy_dirs', [], group='oslo_policy')
Beispiel #18
0
    def _modify_policy_file(self, rules):
        with open(self.policy_file, 'r+b') as policy_file:
            existing_policy = jsonutils.load(policy_file)

        existing_policy.update(rules)

        with open(self.policy_file, 'w') as policy_file:
            jsonutils.dump(existing_policy, policy_file)

        time.sleep(2)
Beispiel #19
0
    def _modify_policy_file(self, rules):
        with open(self.policy_file, 'r+b') as policy_file:
            existing_policy = jsonutils.load(policy_file)

        existing_policy.update(rules)

        with open(self.policy_file, 'w') as policy_file:
            jsonutils.dump(existing_policy, policy_file)

        time.sleep(2)
Beispiel #20
0
    def test_only_old_default_policy_file_exist(self, mock_get):
        mock_get.return_value = cfg.LocationInfo(cfg.Locations.set_default,
                                                 'None')
        tmpfilename = os.path.join(self.tmpdir.path, 'policy.json')
        with open(tmpfilename, 'w') as fh:
            jsonutils.dump(self.data, fh)

        selected_policy_file = policy.pick_policy_file(policy_file=None)
        self.assertEqual(policy.CONF.oslo_policy.policy_file, 'policy.yaml')
        self.assertEqual(selected_policy_file, 'policy.json')
Beispiel #21
0
    def _prepare_policy(self):
        # Convert all actions to require the specified role
        policy = {}
        for rule in policies.list_rules():
            policy[rule.name] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
Beispiel #22
0
 def _write_files(self, tempdir, templates):
     files = []
     for template in templates:
         fp = tempfile.NamedTemporaryFile(suffix=".json",
                                          dir=tempdir,
                                          delete=False)
         json.dump(template, fp)
         files.append(fp.name)
         fp.close()
     return files
Beispiel #23
0
    def _prepare_policy(self):
        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')

        # load the fake_policy data and add the missing default rules.
        policy_rules = jsonutils.loads(fake_policy.policy_data)

        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy_rules, f)
        CONF.set_override('policy_dirs', [], group='oslo_policy')
Beispiel #24
0
    def _prepare_policy(self):
        policy = jsonutils.load(open(CONF.oslo_policy.policy_file))

        # Convert all actions to require specified role
        for action, rule in six.iteritems(policy):
            policy[action] = "role:%s" % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, "policy.json")
        with open(self.policy_file, "w") as f:
            jsonutils.dump(policy, f)
Beispiel #25
0
    def setUp(self):
        super(DefaultPolicyTestCase, self).setUp()
        tmpfilename = self.get_temp_file_path("policy.json")
        self.rules = {"default": "", "example:exist": "!"}
        with open(tmpfilename, "w") as policyfile:
            jsonutils.dump(self.rules, policyfile)
        cfg.CONF.set_override("policy_file", tmpfilename)
        policy.refresh()
        self.addCleanup(policy.reset)

        self.context = context.Context("fake", "fake")
Beispiel #26
0
    def test_both_default_policy_file_exist(self):
        tmpfilename1 = os.path.join(self.tmpdir.path, 'policy.json')
        with open(tmpfilename1, 'w') as fh:
            jsonutils.dump(self.data, fh)
        tmpfilename2 = os.path.join(self.tmpdir.path, 'policy.yaml')
        with open(tmpfilename2, 'w') as fh:
            yaml.dump(self.data, fh)

        selected_policy_file = policy.pick_policy_file(policy_file=None)
        self.assertEqual(policy.CONF.oslo_policy.policy_file, 'policy.yaml')
        self.assertEqual(selected_policy_file, 'policy.yaml')
Beispiel #27
0
    def _prepare_policy(self):
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
Beispiel #28
0
    def _prepare_policy(self):
        policy_dir = self.useFixture(fixtures.TempDir())
        policy_file = os.path.join(policy_dir.path, 'policy.yaml')
        # load the fake_policy data and add the missing default rules.
        policy_rules = jsonutils.loads('{}')
        self.add_missing_default_rules(policy_rules)
        with open(policy_file, 'w') as f:
            jsonutils.dump(policy_rules, f)

        BaseTestCase.conf_override(policy_file=policy_file, group='oslo_policy')
        BaseTestCase.conf_override(policy_dirs=[], group='oslo_policy')
Beispiel #29
0
    def setUp(self):
        super(DefaultPolicyTestCase, self).setUp()
        tmpfilename = self.get_temp_file_path('policy.json')
        self.rules = {
            "default": '',
            "example:exist": '!',
        }
        with open(tmpfilename, "w") as policyfile:
            jsonutils.dump(self.rules, policyfile)
        policy.refresh(policy_file=tmpfilename)

        self.context = context.Context('fake', 'fake')
Beispiel #30
0
    def _prepare_policy(self):
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path,
                                            'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
Beispiel #31
0
    def setUp(self):
        super(DefaultPolicyTestCase, self).setUp()
        tmpfilename = self.get_temp_file_path('policy.json')
        self.rules = {
            "default": '',
            "example:exist": '!',
        }
        with open(tmpfilename, "w") as policyfile:
            jsonutils.dump(self.rules, policyfile)
        policy.refresh(policy_file=tmpfilename)

        self.context = context.Context('fake', 'fake')
Beispiel #32
0
    def _prepare_policy(self):
        policy_dir = self.useFixture(fixtures.TempDir())
        policy_file = os.path.join(policy_dir.path, 'policy.yaml')
        # load the fake_policy data and add the missing default rules.
        policy_rules = jsonutils.loads('{}')
        self.add_missing_default_rules(policy_rules)
        with open(policy_file, 'w') as f:
            jsonutils.dump(policy_rules, f)

        BaseTestCase.conf_override(policy_file=policy_file,
                                   group='oslo_policy')
        BaseTestCase.conf_override(policy_dirs=[], group='oslo_policy')
    def _write_config(self, container_id, ifname, port_name, vif):
        """Write vhostuser configuration file

        This function writes configuration file, this file will be used by
        application inside container and for cleanup (def disconnect)
        procedure.
        """
        vhost_conf = {}
        vhost_conf["vhostname"] = port_name
        vhost_conf["vhostmac"] = vif.address
        vhost_conf["mode"] = vif.mode
        with open(self._config_file_path(container_id, ifname), "w") as f:
            jsonutils.dump(vhost_conf, f)
Beispiel #34
0
    def _prepare_policy(self):
        with open(CONF.oslo_policy.policy_file) as fp:
            policy = fp.read()
        policy = jsonutils.loads(policy)
        self.add_missing_default_rules(policy)

        # Convert all actions to require specified role
        for action in policy:
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path, 'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
Beispiel #35
0
 def write_datapath_allocation(self):
     if not cfg.CONF.df.write_datapath_allocation:
         return
     dppath = cfg.CONF.df.datapath_allocation_output_path
     if (path.isfile(dppath) and
             not cfg.CONF.df.overwrite_datapath_allocation_output_path):
         LOG.warning("File %s exists, but cannot overwrite", dppath)
         return
     try:
         with open(dppath, 'w') as f:
             dp_allocs = self._get_dp_allocs_basic_dictionary()
             jsonutils.dump(dp_allocs, f)
     except IOError:
         LOG.exception("Cannot open file %s", dppath)
Beispiel #36
0
    def _prepare_policy(self):
        with open(CONF.oslo_policy.policy_file) as fp:
            policy = fp.read()
        policy = jsonutils.loads(policy)
        self.add_missing_default_rules(policy)

        # Convert all actions to require specified role
        for action in policy:
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file = os.path.join(self.policy_dir.path,
                                            'policy.json')
        with open(self.policy_file, 'w') as f:
            jsonutils.dump(policy, f)
    def _take_action(self, client, parsed_args):
        kwargs = {}
        if parsed_args.show_progress or parsed_args.full_dump_events:
            kwargs['show_progress'] = True
        data = utils.get_resource(
            client.clusters, parsed_args.cluster, **kwargs).to_dict()
        provision_steps = data.get('provision_progress', [])
        provision_steps = utils.created_at_sorted(provision_steps)

        if parsed_args.full_dump_events:
            file_name = utils.random_name('event-logs')
            # making full dump
            with open(file_name, 'w') as file:
                jsonutils.dump(provision_steps, file, indent=4)
            sys.stdout.write('Event log dump saved to file: %s\n' % file_name)
        return data, provision_steps
Beispiel #38
0
    def _take_action(self, client, parsed_args):
        kwargs = {}
        if parsed_args.show_progress or parsed_args.full_dump_events:
            kwargs['show_progress'] = True
        data = utils.get_resource(client.clusters, parsed_args.cluster,
                                  **kwargs).to_dict()
        provision_steps = data.get('provision_progress', [])
        provision_steps = utils.created_at_sorted(provision_steps)

        if parsed_args.full_dump_events:
            file_name = utils.random_name('event-logs')
            # making full dump
            with open(file_name, 'w') as file:
                jsonutils.dump(provision_steps, file, indent=4)
            sys.stdout.write('Event log dump saved to file: %s\n' % file_name)
        return data, provision_steps
    def as_create(self, alloc):
        asvc = {
            "uuid":
            alloc["uuid"],
            "interface-name":
            SVC_OVS_PORT,
            "service-mac":
            self.svc_ovsport_mac,
            "domain-policy-space":
            alloc["domain-policy-space"],
            "domain-name":
            alloc["domain-name"],
            "service-mapping": [
                {
                    "service-ip": "169.254.169.254",
                    "gateway-ip": "169.254.1.1",
                    "next-hop-ip": alloc["next-hop-ip"],
                },
            ],
        }

        try:
            self.mgr.add_ip(alloc["next-hop-ip"])
        except Exception as e:
            LOG.warn("EPwatcher: Exception in adding IP: %s" % str(e))

        asfilename = AS_FILE_NAME_FORMAT % asvc["uuid"]
        asfilename = "%s/%s" % (AS_MAPPING_DIR, asfilename)
        try:
            with open(asfilename, "w") as f:
                jsonutils.dump(asvc, f)
        except Exception as e:
            LOG.warn("EPwatcher: Exception in writing services file: %s" %
                     str(e))

        proxyfilename = PROXY_FILE_NAME_FORMAT % asvc["uuid"]
        proxyfilename = "%s/%s" % (MD_DIR, proxyfilename)
        proxystr = self.proxyconfig(alloc)
        try:
            with open(proxyfilename, "w") as f:
                f.write(proxystr)
            pidfile = PID_FILE_NAME_FORMAT % asvc["uuid"]
            self.mgr.sh("rm -f %s" % pidfile)
        except Exception as e:
            LOG.warn("EPwatcher: Exception in writing proxy file: %s" % str(e))
Beispiel #40
0
    def take_action(self, parsed_args):
        self.log.debug("take_action(%s)" % parsed_args)
        client = self.app.client_manager.data_processing

        if not parsed_args.file:
            parsed_args.file = parsed_args.plugin

        data = client.plugins.get_version_details(parsed_args.plugin, parsed_args.version).to_dict()

        if path.exists(parsed_args.file):
            self.log.error('File "%s" already exists. Chose another one with ' "--file argument." % parsed_args.file)
        else:
            with open(parsed_args.file, "w") as f:
                jsonutils.dump(data, f, indent=4)
                self.log.info(
                    '"%(plugin)s" plugin configs was saved in "%(file)s"'
                    "file" % {"plugin": parsed_args.plugin, "file": parsed_args.file}
                )
Beispiel #41
0
def print_table(table, format_):
    if table == db_common.UNIQUE_KEY_TABLE:
        keys = nb_api.driver.get_all_keys(table)
        values = [{'id': key} for key in keys]
        _print_list(['id'], values, format_)
        return
    model = _get_model_or_exit(table)
    instances = nb_api.get_all(model)

    if not instances:
        if format_ == FORMAT_PRETTY:
            print('Table is empty: ' + table)
        elif format_ == FORMAT_JSON:
            jsonutils.dump([], sys.stdout)
        return

    keys = [{'key': instance.id} for instance in instances]
    _print_list(['key'], keys, 'Keys for table', format_)
    def take_action(self, parsed_args):
        self.log.debug("take_action(%s)" % parsed_args)
        client = self.app.client_manager.data_processing

        if not parsed_args.file:
            parsed_args.file = parsed_args.job_type

        data = client.jobs.get_configs(parsed_args.job_type).to_dict()

        if path.exists(parsed_args.file):
            self.log.error('File "%s" already exists. Choose another one with '
                           '--file argument.' % parsed_args.file)
        else:
            with open(parsed_args.file, 'w') as f:
                jsonutils.dump(data, f, indent=4)
            sys.stdout.write(
                '"%(type)s" job configs were saved in "%(file)s"'
                'file' % {'type': parsed_args.job_type,
                          'file': parsed_args.file})
Beispiel #43
0
    def take_action(self, parsed_args):
        self.log.debug("take_action(%s)" % parsed_args)
        client = self.app.client_manager.data_processing

        kwargs = {}
        if parsed_args.show_progress or parsed_args.full_dump_events:
            kwargs['show_progress'] = True
        data = utils.get_resource(client.clusters, parsed_args.cluster,
                                  **kwargs).to_dict()
        provision_steps = data.get('provision_progress', [])
        provision_steps = utils.created_at_sorted(provision_steps)

        if parsed_args.full_dump_events:
            file_name = utils.random_name('event-logs')
            # making full dump
            with open(file_name, 'w') as file:
                jsonutils.dump(provision_steps, file, indent=4)
            sys.stdout.write('Event log dump saved to file: %s\n' % file_name)

        _format_cluster_output(data)
        fields = []
        if parsed_args.verification:
            ver_data, fields = _prepare_health_checks(data)
            data.update(ver_data)
        fields.extend(CLUSTER_FIELDS)

        data = self.dict2columns(utils.prepare_data(data, fields))

        if parsed_args.show_progress:
            output_steps = []
            for step in provision_steps:
                st_name, st_type = step['step_name'], step['step_type']
                description = "%s: %s" % (st_type, st_name)
                if step['successful'] is None:
                    progress = "Step in progress"
                elif step['successful']:
                    progress = "Step completed successfully"
                else:
                    progress = 'Step has failed events'
                output_steps += [(description, progress)]
            data = utils.extend_columns(data, output_steps)

        return data
    def as_create(self, alloc):
        asvc = {
            "uuid": alloc["uuid"],
            "interface-name": SVC_OVS_PORT,
            "service-mac": self.svc_ovsport_mac,
            "domain-policy-space": alloc["domain-policy-space"],
            "domain-name": alloc["domain-name"],
            "service-mapping": [
                {
                    "service-ip": "169.254.169.254",
                    "gateway-ip": "169.254.1.1",
                    "next-hop-ip": alloc["next-hop-ip"],
                },
            ],
        }

        try:
            self.mgr.add_ip(alloc["next-hop-ip"])
        except Exception as e:
            LOG.warn("EPwatcher: Exception in adding IP: %s" %
                     str(e))

        asfilename = AS_FILE_NAME_FORMAT % asvc["uuid"]
        asfilename = "%s/%s" % (AS_MAPPING_DIR, asfilename)
        try:
            with open(asfilename, "w") as f:
                jsonutils.dump(asvc, f)
        except Exception as e:
            LOG.warn("EPwatcher: Exception in writing services file: %s" %
                     str(e))

        proxyfilename = PROXY_FILE_NAME_FORMAT % asvc["uuid"]
        proxyfilename = "%s/%s" % (MD_DIR, proxyfilename)
        proxystr = self.proxyconfig(alloc)
        try:
            with open(proxyfilename, "w") as f:
                f.write(proxystr)
            pidfile = PID_FILE_NAME_FORMAT % asvc["uuid"]
            self.mgr.sh("rm -f %s" % pidfile)
        except Exception as e:
            LOG.warn("EPwatcher: Exception in writing proxy file: %s" %
                     str(e))
Beispiel #45
0
    def setUp(self):
        """Copy live policy.json file and convert all actions to
           allow users of the specified role only
        """
        super(RoleBasedPolicyFixture, self).setUp()
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file_name = os.path.join(self.policy_dir.path,
                                             'policy.json')
        with open(self.policy_file_name, 'w') as policy_file:
            jsonutils.dump(policy, policy_file)
        CONF.set_override('policy_file', self.policy_file_name)
        nova.policy.reset()
        nova.policy.init()
        self.addCleanup(nova.policy.reset)
    def take_action(self, parsed_args):
        self.log.debug("take_action(%s)" % parsed_args)
        client = self.app.client_manager.data_processing

        if not parsed_args.file:
            parsed_args.file = parsed_args.job_type

        data = client.jobs.get_configs(parsed_args.job_type).to_dict()

        if path.exists(parsed_args.file):
            self.log.error('File "%s" already exists. Choose another one with '
                           '--file argument.' % parsed_args.file)
        else:
            with open(parsed_args.file, 'w') as f:
                jsonutils.dump(data, f, indent=4)
            sys.stdout.write('"%(type)s" job configs were saved in "%(file)s"'
                             'file' % {
                                 'type': parsed_args.job_type,
                                 'file': parsed_args.file
                             })
Beispiel #47
0
    def setUp(self):
        """Copy live policy.json file and convert all actions to
           allow users of the specified role only
        """
        super(RoleBasedPolicyFixture, self).setUp()
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file_name = os.path.join(self.policy_dir.path,
                                            'policy.json')
        with open(self.policy_file_name, 'w') as policy_file:
            jsonutils.dump(policy, policy_file)
        CONF.set_override('policy_file', self.policy_file_name)
        nova.policy.reset()
        nova.policy.init()
        self.addCleanup(nova.policy.reset)
    def setUp(self):
        super(TestUpgradeCheckPolicyJSON, self).setUp()
        self.cmd = status.UpgradeCommands()
        policy.CONF.clear_override('policy_file', group='oslo_policy')
        self.data = {'rule_admin': 'True', 'rule_admin2': 'is_admin:True'}
        self.temp_dir = self.useFixture(fixtures.TempDir())
        fd, self.json_file = tempfile.mkstemp(dir=self.temp_dir.path)
        fd, self.yaml_file = tempfile.mkstemp(dir=self.temp_dir.path)

        with open(self.json_file, 'w') as fh:
            jsonutils.dump(self.data, fh)
        with open(self.yaml_file, 'w') as fh:
            yaml.dump(self.data, fh)

        original_search_dirs = cfg._search_dirs

        def fake_search_dirs(dirs, name):
            dirs.append(self.temp_dir.path)
            return original_search_dirs(dirs, name)

        self.stub_out('oslo_config.cfg._search_dirs', fake_search_dirs)
    def mapping_to_file(self, port, mapping, ips, device_owner):
        """Mapping to file.

        Converts the port mapping into file.
        """
        # if device_owner == n_constants.DEVICE_OWNER_DHCP:
        #     ips.append(METADATA_DEFAULT_IP)
        mapping_dict = {
            "policy-space-name": mapping['ptg_tenant'],
            "endpoint-group-name": (mapping['app_profile_name'] + "|" +
                                    mapping['endpoint_group_name']),
            "interface-name": port.port_name,
            "ip": ips,
            "mac": port.vif_mac,
            "uuid": port.vif_id,
            "promiscuous-mode": mapping['promiscuous_mode']}
        if 'vm-name' in mapping:
            mapping_dict['attributes'] = {'vm-name': mapping['vm-name']}
        filename = self.epg_mapping_file % port.vif_id
        if not os.path.exists(os.path.dirname(filename)):
            os.makedirs(os.path.dirname(filename))
        with open(filename, 'w') as f:
            jsonutils.dump(mapping_dict, f)
Beispiel #50
0
def action_generate_nodes_fixture(params):
    from oslo_serialization import jsonutils
    from nailgun.logger import logger
    from nailgun.utils import fake_generator

    logger.info('Generating new nodes fixture...')
    total_nodes_count = params.total_nodes
    fixtures_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                'nailgun/fixtures/')
    file_path = os.path.join(
        fixtures_dir,
        '{0}_fake_nodes_environment.json'.format(total_nodes_count)
    )
    generator = fake_generator.FakeNodesGenerator()
    res = generator.generate_fake_nodes(
        total_nodes_count, error_nodes_count=params.error_nodes,
        offline_nodes_count=params.offline_nodes,
        min_ifaces_num=params.min_ifaces_num)

    with open(file_path, 'w') as file_to_write:
        jsonutils.dump(res, file_to_write, indent=4)

    logger.info('Done. New fixture was stored in {0} file'.format(file_path))
    def take_action(self, parsed_args):
        self.log.debug("take_action(%s)", parsed_args)
        client = self.app.client_manager.data_processing

        if not parsed_args.file:
            parsed_args.file = (parsed_args.plugin + '-' +
                                parsed_args.plugin_version)

        if path.exists(parsed_args.file):
            msg = ('File "%s" already exists. Choose another one with '
                   '--file argument.' % parsed_args.file)
            raise exceptions.CommandError(msg)
        else:
            data = client.plugins.get_version_details(
                parsed_args.plugin, parsed_args.plugin_version).to_dict()

            with open(parsed_args.file, 'w') as f:
                jsonutils.dump(data, f, indent=4)
            sys.stdout.write(
                '"%(plugin)s" plugin "%(version)s" version configs '
                'was saved in "%(file)s" file\n' % {
                    'plugin': parsed_args.plugin,
                    'version': parsed_args.plugin_version,
                    'file': parsed_args.file})
Beispiel #52
0
 def dump(self, obj, fp):
     return jsonutils.dump(obj, fp)
Beispiel #53
0
def save_doc_to_json_file(doc, fname, debug=False):
    with open(fname, 'w') as fd:
        json.dump(doc, fd, indent=4)
    if debug:
        print('Saved doc to file: {0}'.format(fname))
Beispiel #54
0
 def _write_policy_file(self, filename, rules):
     with open(os.path.join(self.conf_dir, filename), 'w') as pol_file:
         jsonutils.dump(rules, pol_file)
def write_jsonfile(name, data):
    try:
        with open(name, "w") as f:
            jsonutils.dump(data, f)
    except Exception as e:
        LOG.warn("Exception in writing file: %s" % str(e))
Beispiel #56
0
def write_config(cfg, version):
    with open("sahara/plugins/ambari/resources/configs-%s.json" % version,
              "w") as fp:
        jsonutils.dump(cfg, fp, indent=4, sort_keys=True,
                       separators=(",", ": "))
 def _write_file(self, port_id, mapping_dict, file_format):
     filename = file_format % port_id
     if not os.path.exists(os.path.dirname(filename)):
         os.makedirs(os.path.dirname(filename))
     with open(filename, 'w') as f:
         jsonutils.dump(mapping_dict, f, indent=4)