Beispiel #1
0
    def _get_metadata(self):
        metadata_file = self.conf.glance_store.filesystem_store_metadata_file

        if metadata_file is None:
            return {}

        try:
            with open(metadata_file, 'r') as fptr:
                metadata = jsonutils.load(fptr)

            glance_store.check_location_metadata(metadata)
            return metadata
        except exceptions.BackendException as bee:
            LOG.error(_('The JSON in the metadata file %(file)s could not '
                        'be used: %(bee)s  An empty dictionary will be '
                        'returned to the client.')
                      % dict(file=metadata_file, bee=str(bee)))
            return {}
        except IOError as ioe:
            LOG.error(_('The path for the metadata file %(file)s could not be '
                        'opened: %(io)s  An empty dictionary will be returned '
                        'to the client.')
                      % dict(file=metadata_file, io=ioe))
            return {}
        except Exception as ex:
            LOG.exception(_('An error occurred processing the storage systems '
                            'meta data file: %s.  An empty dictionary will be '
                            'returned to the client.') % str(ex))
            return {}
 def _load_file(self, handle):
     """Decode the JSON file. Broken out for testing."""
     try:
         return jsonutils.load(handle)
     except ValueError as e:
         LOG.exception(_("Could not decode scheduler options: '%s'"), e)
         return {}
Beispiel #3
0
 def _load_file(self, handle):
     """Decode the JSON file. Broken out for testing."""
     try:
         return jsonutils.load(handle)
     except ValueError as e:
         LOG.exception(_LE("Could not decode scheduler options: '%s'"), e)
         return {}
    def _get_metadata(self):
        metadata_file = self.conf.glance_store.filesystem_store_metadata_file

        if metadata_file is None:
            return {}

        try:
            with open(metadata_file, 'r') as fptr:
                metadata = jsonutils.load(fptr)

            glance_store.check_location_metadata(metadata)
            return metadata
        except exceptions.BackendException as bee:
            LOG.error(
                _('The JSON in the metadata file %(file)s could not '
                  'be used: %(bee)s  An empty dictionary will be '
                  'returned to the client.') %
                dict(file=metadata_file, bee=str(bee)))
            return {}
        except IOError as ioe:
            LOG.error(
                _('The path for the metadata file %(file)s could not be '
                  'opened: %(io)s  An empty dictionary will be returned '
                  'to the client.') % dict(file=metadata_file, io=ioe))
            return {}
        except Exception as ex:
            LOG.exception(
                _('An error occurred processing the storage systems '
                  'meta data file: %s.  An empty dictionary will be '
                  'returned to the client.') % str(ex))
            return {}
Beispiel #5
0
def read_previous_results():
    """Read results of previous run.

    :return: dictionary of results if exist
    """
    try:
        with open(settings.LOAD_TESTS_PATHS['load_previous_tests_results'],
                  'r') as results_file:
            results = jsonutils.load(results_file)
    except (IOError, ValueError):
        results = {}
    return results
Beispiel #6
0
def read_previous_results():
    """Read results of previous run.

    :return: dictionary of results if exist
    """
    try:
        with open(settings.LOAD_TESTS_PATHS['load_previous_tests_results'],
                  'r') as results_file:
            results = jsonutils.load(results_file)
    except (IOError, ValueError):
        results = {}
    return results
    def test_load(self):

        jsontext = u'{"a": "\u0442\u044d\u0441\u0442"}'
        expected = {u'a': u'\u0442\u044d\u0441\u0442'}

        for encoding in ('utf-8', 'cp1251'):
            fp = six.BytesIO(jsontext.encode(encoding))
            result = jsonutils.load(fp, encoding=encoding)
            self.assertEqual(expected, result)
            for key, val in result.items():
                self.assertIsInstance(key, six.text_type)
                self.assertIsInstance(val, six.text_type)
Beispiel #8
0
    def body_dict(self):
        """
        Returns the body content as a dictonary, deserializing per the
        Content-Type header.

        We add this method to ease future XML support, so the main code
        is not hardcoded to call pecans "request.json()" method.
        """
        if self.content_type in JSON_TYPES:
            try:
                return jsonutils.load(self.body_file)
            except ValueError as valueError:
                raise exceptions.InvalidJson(valueError.message)
        else:
            raise Exception('TODO: Unsupported Content Type')
Beispiel #9
0
    def body_dict(self):
        """
        Returns the body content as a dictonary, deserializing per the
        Content-Type header.

        We add this method to ease future XML support, so the main code
        is not hardcoded to call pecans "request.json()" method.
        """
        if self.content_type in JSON_TYPES:
            try:
                return jsonutils.load(self.body_file)
            except ValueError as valueError:
                raise exceptions.InvalidJson(valueError.message)
        else:
            raise Exception('TODO: Unsupported Content Type')
Beispiel #10
0
    def _read_current_data(self):
        with open(self.CURRENT_RUN_DATA_PATH) as file:
            data = jsonutils.load(file)

        tests = self._flatten_json_dictionary(data)

        now = datetime.now(tz=pytz.utc)

        current_test_data = dict(date=now.isoformat())

        for key, test in tests:
            if key not in self.column_names:
                self.column_names.append(key)

            current_test_data[key] = sum(
                [t[1]['expect_time'] for t in test.items()])

        self.report_data.append(current_test_data)
Beispiel #11
0
    def setUp(self):
        """Copy live policy.json file and convert all actions to
           allow users of the specified role only
        """
        super(RoleBasedPolicyFixture, self).setUp()
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = "role:%s" % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file_name = os.path.join(self.policy_dir.path, "policy.json")
        with open(self.policy_file_name, "w") as policy_file:
            jsonutils.dump(policy, policy_file)
        CONF.set_override("policy_file", self.policy_file_name)
        nova.policy.reset()
        nova.policy.init()
        self.addCleanup(nova.policy.reset)
    def _read_current_data(self):
        with open(self.CURRENT_RUN_DATA_PATH) as file:
            data = jsonutils.load(file)

        tests = self._flatten_json_dictionary(data)

        now = datetime.now(tz=pytz.utc)

        current_test_data = dict(date=now.isoformat())

        for key, test in tests:
            if key not in self.column_names:
                self.column_names.append(key)

            current_test_data[key] = sum(
                [t[1]['expect_time'] for t in test.items()]
            )

        self.report_data.append(current_test_data)
    def setUp(self):
        """Copy live policy.json file and convert all actions to
           allow users of the specified role only
        """
        super(RoleBasedPolicyFixture, self).setUp()
        policy = jsonutils.load(open(CONF.policy_file))

        # Convert all actions to require specified role
        for action, rule in policy.iteritems():
            policy[action] = 'role:%s' % self.role

        self.policy_dir = self.useFixture(fixtures.TempDir())
        self.policy_file_name = os.path.join(self.policy_dir.path,
                                             'policy.json')
        with open(self.policy_file_name, 'w') as policy_file:
            jsonutils.dump(policy, policy_file)
        CONF.set_override('policy_file', self.policy_file_name)
        nova.policy.reset()
        nova.policy.init()
        self.addCleanup(nova.policy.reset)
Beispiel #14
0
    def _validate_metadata(self, metadata_file):
        """Validate metadata against json schema.

        If metadata is valid then cache metadata and use it when
        creating new image.

        :param metadata_file: JSON metadata file path
        :raises: BadStoreConfiguration exception if metadata is not valid.
        """
        try:
            with open(metadata_file, 'r') as fptr:
                metadata = jsonutils.load(fptr)

            if isinstance(metadata, dict):
                # If metadata is of type dictionary
                # i.e. - it contains only one mountpoint
                # then convert it to list of dictionary.
                metadata = [metadata]

            # Validate metadata against json schema
            jsonschema.validate(metadata, MULTI_FILESYSTEM_METADATA_SCHEMA)
            glance_store.check_location_metadata(metadata)
            self.FILESYSTEM_STORE_METADATA = metadata
        except (jsonschema.exceptions.ValidationError,
                exceptions.BackendException, ValueError) as vee:
            reason = _('The JSON in the metadata file %(file)s is '
                       'not valid and it can not be used: '
                       '%(vee)s.') % dict(file=metadata_file,
                                          vee=utils.exception_to_str(vee))
            LOG.error(reason)
            raise exceptions.BadStoreConfiguration(
                store_name="filesystem", reason=reason)
        except IOError as ioe:
            reason = _('The path for the metadata file %(file)s could '
                       'not be accessed: '
                       '%(ioe)s.') % dict(file=metadata_file,
                                          ioe=utils.exception_to_str(ioe))
            LOG.error(reason)
            raise exceptions.BadStoreConfiguration(
                store_name="filesystem", reason=reason)
Beispiel #15
0
    def _validate_metadata(self, metadata_file):
        """Validate metadata against json schema.

        If metadata is valid then cache metadata and use it when
        creating new image.

        :param metadata_file: JSON metadata file path
        :raises: BadStoreConfiguration exception if metadata is not valid.
        """
        try:
            with open(metadata_file, 'r') as fptr:
                metadata = jsonutils.load(fptr)

            if isinstance(metadata, dict):
                # If metadata is of type dictionary
                # i.e. - it contains only one mountpoint
                # then convert it to list of dictionary.
                metadata = [metadata]

            # Validate metadata against json schema
            jsonschema.validate(metadata, MULTI_FILESYSTEM_METADATA_SCHEMA)
            glance_store.check_location_metadata(metadata)
            self.FILESYSTEM_STORE_METADATA = metadata
        except (jsonschema.exceptions.ValidationError,
                exceptions.BackendException, ValueError) as vee:
            reason = _('The JSON in the metadata file %(file)s is '
                       'not valid and it can not be used: '
                       '%(vee)s.') % dict(file=metadata_file,
                                          vee=utils.exception_to_str(vee))
            LOG.error(reason)
            raise exceptions.BadStoreConfiguration(
                store_name="filesystem", reason=reason)
        except IOError as ioe:
            reason = _('The path for the metadata file %(file)s could '
                       'not be accessed: '
                       '%(ioe)s.') % dict(file=metadata_file,
                                          ioe=utils.exception_to_str(ioe))
            LOG.error(reason)
            raise exceptions.BadStoreConfiguration(
                store_name="filesystem", reason=reason)
Beispiel #16
0
    def __init__(self, *args, **kwargs):
        super(JsonFileVendorData, self).__init__(*args, **kwargs)
        data = {}
        fpath = CONF.vendordata_jsonfile_path
        logprefix = "%s[%s]: " % (file_opt.name, fpath)
        if fpath:
            try:
                with open(fpath, "r") as fp:
                    data = jsonutils.load(fp)
            except IOError as e:
                if e.errno == errno.ENOENT:
                    LOG.warn(_LW("%(logprefix)sfile does not exist"),
                             {'logprefix': logprefix})
                else:
                    LOG.warn(_LW("%(logprefix)unexpected IOError when "
                                 "reading"), {'logprefix': logprefix})
                raise e
            except ValueError:
                LOG.warn(_LW("%(logprefix)sfailed to load json"),
                         {'logprefix': logprefix})
                raise

        self._data = data
Beispiel #17
0
def parse_json_file(file_path):
    current_directory = os.path.dirname(os.path.realpath(__file__))
    commands_path = os.path.join(current_directory, file_path)
    with open(commands_path, 'r') as f:
        return jsonutils.load(f)
def parse_json_file(file_path):
    current_directory = os.path.dirname(os.path.realpath(__file__))
    commands_path = os.path.join(
        current_directory, file_path)
    with open(commands_path, 'r') as f:
        return jsonutils.load(f)