Exemple #1
0
def is_not_yaml(check_obj):
    check_obj.is_string()
    try:
        yaml.loads(check_obj._val)
        raise CheckError('{} is valid YAML'.format(check_obj._val))
    except ValueError:
        return check_obj
Exemple #2
0
 def is_not_yaml(self):
     self.is_string()
     try:
         yaml.loads(self._val)
         raise CheckError('{} is valid YAML'.format(self._val))
     except ValueError:
         return self
Exemple #3
0
def get_yaml(fh):
    """YAML to object with input type detection"""
    import yaml
    if isinstance(fh, file):
        return yaml.load(fh)
    elif isinstance(fh, str):
        return yaml.loads(fh)
    def load_config_file(self, config_file):
        if not os.path.exists(config_file):
            raise Exception("Path to config file do not exists on disk...")

        with open(config_file, "r") as stream:
            data = stream.read()

        if not data:
            raise Exception("No data in config file : {}".format(config_file))

        # Try first with yaml as that is default config lagn
        # If yaml loading failed then try json loading
        try:
            data_tree = yaml.loads(data)
        except Exception:
            try:
                data_tree = json.loads(data)
            except Exception:
                raise Exception(
                    "Unable to load data as yaml or json from config file : {}"
                    .format(config_file))

        log.debug("Loading default data from default config file : {}".format(
            config_file))

        # If data was loaded into python datastructure then load it into the config tree
        self.merge_data_tree(data_tree)
Exemple #5
0
    def to_yaml_string(self, pb_msg, project_ns=False):
        """ Serialize a protobuf message into YAML

        Arguments:
            pb_msg - A GI-protobuf object of type provided into constructor
            project_ns - Need the desc in project namespace, required for
                         posting to Restconf as part of onboarding

        Returns:
            A YAML string representing the protobuf message

        Raises:
            SerializationError - Message could not be serialized
            TypeError - Incorrect protobuf type provided
        """
        self._log.debug("Convert desc to yaml (ns:{}): {}".format(
            project_ns, pb_msg.as_dict()))
        try:
            desc_msg = self.to_desc_msg(pb_msg, not project_ns)
            yaml_str = desc_msg.to_yaml(self.model)
            if project_ns:
                # Remove rw-project:project top level element
                dic = yaml.loads(yaml_str)
                ystr = yaml.dump(dic[NS_PROJECT][0])
            else:
                ystr = yaml_str

        except Exception as e:
            self._log.exception("Exception converting to yaml: {}".format(e))
            raise SerializationError(e)

        return ystr
def yaml2dict(content):
    try:
        return yaml.loads(content)
    except yaml.YAMLError as e:
        msg_sche = 'your content {} is not in the right yaml ' \
                   'style and details are {}'
        error(__file__, msg_sche.format(content, e))
Exemple #7
0
    def testCreateAPIsMapFile(self):
        config = yaml.loads(
            textwrap.dedent("""\
        orange:
          v1:
            discovery: organge_v1.json
            default: True
          v2:
            discovery: organge_v2.json
        banana:
          v2beta:
            discovery: banana_v2beta.json
          v2_staging:
            version: v2
            discovery: banana_v2_staging.json
            default: True
        pear:
          v7_test:
            discovery: pear_v7_test.json
    """))

        with files.TemporaryDirectory() as tmp_dir:
            regen.GenerateApiMap(tmp_dir, 'fruits', config)
            content = self.GetFileContent(os.path.join(tmp_dir, 'api_map.py'))

        self.assertEquals(
            self.GetFileContent(
                os.path.join(os.path.dirname(__file__), 'testdata',
                             'api_map_sample.txt')), content)
def svc_id(svc_recs, rec):
    _id = None
    for key, val in svc_recs.items():
        if loads(key).items() <= rec.items():
            assert _id is None
            _id = val
    return _id
    def load_config_file(self, config_file):
        if not os.path.exists(config_file):
            raise Exception("Path to config file do not exists on disk...")

        with open(config_file, "r") as stream:
            data = stream.read()

        if not data:
            raise Exception("No data in config file : {}".format(config_file))

        # Try first with yaml as that is default config lagn
        # If yaml loading failed then try json loading
        try:
            data_tree = yaml.loads(data)
        except Exception:
            try:
                data_tree = json.loads(data)
            except Exception:
                raise Exception("Unable to load data as yaml or json from"
                                " config file : {}".format(config_file))

        Log.debug("Loading default data from default config file : {}".format(config_file))

        # If data was loaded into python datastructure then load it into the
        # config tree
        self.merge_data_tree(data_tree)
Exemple #10
0
    def testCreateAPIsMapFile(self):
        config = yaml.loads(
            textwrap.dedent(
                """\
        orange:
          v1:
            discovery: organge_v1.json
            default: True
          v2:
            discovery: organge_v2.json
        banana:
          v2beta:
            discovery: banana_v2beta.json
          v2_staging:
            version: v2
            discovery: banana_v2_staging.json
            default: True
        pear:
          v7_test:
            discovery: pear_v7_test.json
    """
            )
        )

        with files.TemporaryDirectory() as tmp_dir:
            regen.GenerateApiMap(tmp_dir, "fruits", config)
            content = self.GetFileContent(os.path.join(tmp_dir, "api_map.py"))

        self.assertEquals(
            self.GetFileContent(os.path.join(os.path.dirname(__file__), "testdata", "api_map_sample.txt")), content
        )
Exemple #11
0
def loadResultDefinition(fname):
    '''Deserialize `fname` appropriate to extension. Return the result.'''
    ext = fname.split('.')[-1]
    fstr = open(fname).read()
    if ext == 'json':
        return json.loads(fstr)
    elif ext == 'yaml':
        return yaml.loads(fstr)
Exemple #12
0
def _load_schema_url(url):
    import urllib2
    try:
        res = urllib2.urlopen(url)
        tables = res.read()
    except urllib2.URLError:
        raise SchemingException("Could not load %s" % url)

    return yaml.loads(tables, url)
def test_read_cities_data():

    config = yaml.loads('src/configs/config.yaml')

    result = io.read_cities_data('br', config)

    assert isinstance(result, dict)
    
    for v in result.values():
        assert isinstance(v, dict)
Exemple #14
0
def decode_content_data(message):
    '''
    Validate message and decode data from content according to mime-type.

    Parameters
    ----------
    message : dict
        One of the message types defined in :data:`MESSAGE_SCHEMA`.

    Returns
    -------
    object
        Return deserialized object from ``content['data']`` field of message.

    Raises
    ------
    RuntimeError
        If ``content['error']`` field is set.
    '''
    validate(message)

    error = message['content'].get('error', None)
    if error is not None:
        raise RuntimeError(error)

    mime_type = 'application/python-pickle'
    transfer_encoding = 'BASE64'
    metadata = message['content'].get('metadata', None)
    if metadata is not None:
        mime_type = metadata.get('mime_type', mime_type)
        transfer_encoding = metadata.get('transfer_encoding',
                                         transfer_encoding)

    data = message['content'].get('data', None)

    if data is None:
        return None

    # If content data was base64 encoded, decode it.
    #
    # [1]: https://www.w3.org/Protocols/rfc1341/5_Content-Transfer-Encoding.html
    if transfer_encoding == 'BASE64':
        data = base64.b64decode(data)

    if mime_type == 'application/python-pickle':
        # Pickle object.
        return pickle.loads(data)
    elif mime_type == 'application/x-yaml':
        return yaml.loads(data)
    elif mime_type == 'application/json':
        return json.loads(data)
    elif mime_type in ('application/octet-stream', 'text/plain'):
        return data
    else:
        raise ValueError('Unrecognized mime-type: %s' % mime_type)
def decode_content_data(message):
    '''
    Validate message and decode data from content according to mime-type.

    Parameters
    ----------
    message : dict
        One of the message types defined in :data:`MESSAGE_SCHEMA`.

    Returns
    -------
    object
        Return deserialized object from ``content['data']`` field of message.

    Raises
    ------
    RuntimeError
        If ``content['error']`` field is set.
    '''
    validate(message)

    error = message['content'].get('error', None)
    if error is not None:
        raise RuntimeError(error)

    mime_type = 'application/python-pickle'
    transfer_encoding = 'BASE64'
    metadata = message['content'].get('metadata', None)
    if metadata is not None:
        mime_type = metadata.get('mime_type', mime_type)
        transfer_encoding = metadata.get('transfer_encoding',
                                         transfer_encoding)

    data = message['content'].get('data', None)

    if data is None:
        return None

    # If content data was base64 encoded, decode it.
    #
    # [1]: https://www.w3.org/Protocols/rfc1341/5_Content-Transfer-Encoding.html
    if transfer_encoding == 'BASE64':
        data = base64.b64decode(data)

    if mime_type == 'application/python-pickle':
        # Pickle object.
        return pickle.loads(data)
    elif mime_type == 'application/x-yaml':
        return yaml.loads(data)
    elif mime_type == 'application/json':
        return json.loads(data)
    elif mime_type in ('application/octet-stream', 'text/plain'):
        return data
    else:
        raise ValueError('Unrecognized mime-type: %s' % mime_type)
Exemple #16
0
def query(ctx, input, output, context, query_file):
    """ Run an MQL query and store the results. """
    graph = ctx.obj['GRAPH']
    for uri in input:
        load_dump(graph, ensure_uri(uri))

    if query_file is not None:
        query = read_yaml_uri(ensure_uri(query_file))
    else:
        query = yaml.loads(sys.stdin)
    save_query_json(graph, query, output, context_id=context)
Exemple #17
0
def query(ctx, input, output, context, query_file):
    """ Run an MQL query and store the results. """
    graph = ctx.obj['GRAPH']
    for uri in input:
        load_dump(graph, ensure_uri(uri))

    if query_file is not None:
        query = read_yaml_uri(ensure_uri(query_file))
    else:
        query = yaml.loads(sys.stdin)
    save_query_json(graph, query, output, context_id=context)
 def handle(self, *args, **options):
     for filename in args:
         with open(filename) as f:
             if filename.endswith('.json'):
                 bug_dicts = simplejson.load(f)
             else:
                 # assume YAML
                 s = f.read()
                 bug_dicts = yaml.loads(s)
         for bug_dict in bug_dicts:
             mysite.customs.core_bugimporters.import_one_bug_item(bug_dict)
Exemple #19
0
 def testGetAPIsMap(self):
     config = yaml.loads(
         textwrap.dedent(
             """\
     orange:
       v1:
         discovery: organge_v1.json
         default: True
       v2:
         discovery: organge_v2.json
     banana:
       v2beta:
         discovery: banana_v2beta.json
       v2_staging:
         version: v2
         discovery: banana_v2_staging.json
         default: True
     pear:
       v7_test:
         discovery: pear_v7_test.json
 """
         )
     )
     expected_map = {
         "orange": {
             "v1": api_def.APIDef(
                 "fruits.orange.v1.orange_v1_client.OrangeV1", "fruits.orange.v1.orange_v1_messages", True
             ),
             "v2": api_def.APIDef(
                 "fruits.orange.v2.orange_v2_client.OrangeV2", "fruits.orange.v2.orange_v2_messages"
             ),
         },
         "banana": {
             "v2beta": api_def.APIDef(
                 "fruits.banana.v2beta.banana_v2beta_client.BananaV2beta",
                 "fruits.banana.v2beta.banana_v2beta_messages",
             ),
             "v2_staging": api_def.APIDef(
                 "fruits.banana.v2_staging.banana_v2_client.BananaV2",
                 "fruits.banana.v2_staging.banana_v2_messages",
                 True,
             ),
         },
         "pear": {
             "v7_test": api_def.APIDef(
                 "fruits.pear.v7_test.pear_v7_test_client.PearV7Test",
                 "fruits.pear.v7_test.pear_v7_test_messages",
                 True,
             )
         },
     }
     actual_map = regen._MakeApiMap("fruits", config)
     self.assertEquals(expected_map, actual_map)
Exemple #20
0
    def _make_call(self, call):
        resp = requests.request('GET', call)
        if resp.status_code != 200:
            return None
        else:
            data = resp.text

        if self.style == 'json':
            return json.loads(data)
        elif self.style == 'yaml':
            return yaml.loads(data)
        else:
            return data
 def handle(self, *args, **options):
     for filename in args:
         with open(filename) as f:
             if filename.endswith('.json'):
                 bug_dicts = json.load(f)
             elif filename.endswith('.jsonlines'):
                 bug_dicts = jsonlines_decoder(f)
             else:
                 # assume YAML
                 s = f.read()
                 bug_dicts = yaml.loads(s)
             for bug_dict in bug_dicts:
                 _import_one(bug_dict)
Exemple #22
0
 def handle(self, *args, **options):
     for filename in args:
         with open(filename) as f:
             if filename.endswith('.json'):
                 bug_dicts = simplejson.load(f)
             elif filename.endswith('.jsonlines'):
                 bug_dicts = jsonlines_decoder(f)
             else:
                 # assume YAML
                 s = f.read()
                 bug_dicts = yaml.loads(s)
             for bug_dict in bug_dicts:
                 mysite.customs.core_bugimporters.import_one_bug_item(bug_dict)
Exemple #23
0
 def testGetAPIsMap(self):
     config = yaml.loads(
         textwrap.dedent("""\
     orange:
       v1:
         discovery: organge_v1.json
         default: True
       v2:
         discovery: organge_v2.json
     banana:
       v2beta:
         discovery: banana_v2beta.json
       v2_staging:
         version: v2
         discovery: banana_v2_staging.json
         default: True
     pear:
       v7_test:
         discovery: pear_v7_test.json
 """))
     expected_map = {
         'orange': {
             'v1':
             api_def.APIDef('fruits.orange.v1.orange_v1_client.OrangeV1',
                            'fruits.orange.v1.orange_v1_messages', True),
             'v2':
             api_def.APIDef('fruits.orange.v2.orange_v2_client.OrangeV2',
                            'fruits.orange.v2.orange_v2_messages')
         },
         'banana': {
             'v2beta':
             api_def.APIDef(
                 'fruits.banana.v2beta.banana_v2beta_client.BananaV2beta',
                 'fruits.banana.v2beta.banana_v2beta_messages'),
             'v2_staging':
             api_def.APIDef(
                 'fruits.banana.v2_staging.banana_v2_client.BananaV2',
                 'fruits.banana.v2_staging.banana_v2_messages', True)
         },
         'pear': {
             'v7_test':
             api_def.APIDef(
                 'fruits.pear.v7_test.pear_v7_test_client.PearV7Test',
                 'fruits.pear.v7_test.pear_v7_test_messages', True)
         }
     }
     actual_map = regen._MakeApiMap('fruits', config)
     self.assertEquals(expected_map, actual_map)
Exemple #24
0
    def testGetAPIsMapNoDefaultsClientsForAPIs(self):
        config = yaml.loads(
            textwrap.dedent("""\
        orange:
          v1:
            discovery: organge_v1.json
          v2:
            discovery: organge_v2.json
    """))

        with self.assertRaises(Exception) as ctx:
            regen._MakeApiMap('fruits', config)

        msg = str(ctx.exception)
        self.assertEquals(msg,
                          'No default client versions found for [fig, lime]!')
Exemple #25
0
    def __init__(self, filename, title='Tk', prefer_tk=True, file_type='json'):

        self.prefer_tk = prefer_tk
        self.vars = {}

        if file_type == 'json':
            user_interface = json.load(open(filename)) if os.path.isfile(
                filename) else json.loads(filename)
        elif file_type == 'yaml':
            user_interface = yaml.load(open(filename)) if os.path.isfile(
                filename) else yaml.loads(filename)
        else:
            raise ValueError('Only json or yaml file definitions are'
                             'supported.')

        self.create_widgets(self, user_interface)
Exemple #26
0
    def __init__(self, filename, title='Tk', preferTk=True):
        """
        Initialize a Tk root and created the UI from a JSON file.

        Returns the Tk root.
        """
        # Needs to be done this way - because base class do not derive from
        # object :-(
        tkinter.Tk.__init__(self)
        self.preferTk = preferTk
        self.title(title)

        user_interface = yaml.load(open(filename)) if os.path.isfile(
            filename) else yaml.loads(filename)

        self.create_widgets(self, user_interface)
        super().__init__(filename, title, preferTk)
Exemple #27
0
def getParsedSchemaFromYaml(model):
    """reads a JSON schema file in yaml format
    and returns the parsed dictionary from it

    Keyword arguments:
    model -- file name and path to the model to load or model content from stdin
    """

    if __isHttpLoadableModel(model):
        with urllib.request.urlopen(model) as url:
            return yaml.loads(url.read().decode(), Loader=yaml.FullLoader)
    elif doesFileExist(model):
        # model is treaten as file to input
        with open(model) as json_schema:
            return yaml.load(json_schema, Loader=yaml.FullLoader)
    else:
        # model is treaten as string content to get parsed
        return yaml.load(model, Loader=yaml.FullLoader)
Exemple #28
0
    def testGetAPIsMapNoDefaultsClientsForAPIs(self):
        config = yaml.loads(
            textwrap.dedent(
                """\
        orange:
          v1:
            discovery: organge_v1.json
          v2:
            discovery: organge_v2.json
    """
            )
        )

        with self.assertRaises(Exception) as ctx:
            regen._MakeApiMap("fruits", config)

        msg = str(ctx.exception)
        self.assertEquals(msg, "No default client versions found for [fig, lime]!")
Exemple #29
0
def jq_all(query, data_str, *args, **kwargs):
    """
    Queries the nested data and returns all results as a list.

    Parameters
    ----------
    data_str : str
        Nested data in Python dict's representation format.
        If must be loadable by ``yaml.safe_load()``.

    Returns
    -------
    result : str
        String representation of the result list.
    """
    try:
        return json.dumps(pyjq.all(query, yaml.loads(data_str)), *args, **kwargs)
    except Exception as e:
        return 'jq error: {} query: {!r} str: {!r}'.format(e, query, data_str)
Exemple #30
0
def ucsdJsonParser(text):
    jsondict = None
    if text: 
        try: 
		    jsondict = json.loads(text)
        except: pass
		# we're here because standard JSON parser failed to make sense of the input
        # try interpreting as YAML:
        try: 
		    jsondict = yaml.loads(text)
        except: pass
		# we're here because standard YAML parser failed to make sense of the input
        # try alternative JSON lib:
        try: 
		    jsondict = demjson.decode(text)
        except: pass
		# inconceivable!
		
    return jsondict
Exemple #31
0
    def __init__(self, data_or_file, shape):
        super(ZacSimulator, self).__init__()
        if isinstance(data_or_file, file):
            data = yaml.load(data_or_file)
        else:
            data = yaml.loads(data_or_file)

        self.sets = data["sets"]
        self.cells = self.sets.keys()
        self.strings = data["strings"]
        self.python_code = "# auto-generated code:\n" + data["python_code"]
        self.cpp_code = data["cpp_code"]
        if len(shape) == 1:
            shape = (shape[0], 1)
        self.shape = shape

        self.possible_values = self.sets

        self.stringy_subcells = [k for k, v in self.sets.iteritems() if isinstance(v[0], basestring)]

        self.neighbourhood = ZacNeighbourhood(data["neighbourhood"])
        self.acc = SubcellAccessor(self.sets.keys())
        self.computation = PasteComputation(self.cpp_code, self.python_code)
        self.border = BorderSizeEnsurer()

        if len(shape) == 1:
            self.loop = OneDimCellLoop()
        else:
            self.loop = TwoDimCellLoop()

        for k in self.sets.keys():
            nconf_arr = np.zeros(shape, dtype=int)
            cconf_arr = np.zeros(shape, dtype=int)
            setattr(self, "cconf_%s" % k, cconf_arr)
            setattr(self, "nconf_%s" % k, nconf_arr)
            if k in self.stringy_subcells:
                val = [index for index, val in enumerate(self.strings) if val == self.sets[k][0]][0]
                cconf_arr[:] = val
                nconf_arr[:] = val

        self.stepfunc = StepFunc(self, self.loop, self.acc, self.neighbourhood, self.border, visitors=[self.computation])
        self.stepfunc.gen_code()
Exemple #32
0
 def load(self, filePath, formatType=DETECT):
     self.config = {}
     self.formatType = formatType
     self.filePath = filePath
     if os.path.isfile(filePath):
         file = open(filePath).read()
         if self.formatType == self.DETECT:
             bname = os.path.basename(self.filePath)
             extension = os.path.splitext(bname)[0]
             try:
                 self.formatType = self.formats[extension]
             except:
                 return
         if self.formatType == self.JSON:
             self.config = json.loads(content)
         elif self.formatType == self.YAML:
             self.fixYamlIndexes(content)
             self.config = yaml.loads(content)
         elif self.formatType == self.PROPERTIES:
             self.config = Properties.loads(content)
    def __init__(self, spec):

        self._stateMachine = self
        self._spec = spec
        if isinstance(spec, dict):  # for testing and calling as a library
            self._stateMachineSpec = spec
        else:
            if self._spec.endswith("json"):
                self._stateMachineSpec = json.loads(
                    open(self._spec, "r").read())
            elif self._spec.endswith("yml"):
                self._stateMachineSpec = yaml.loads(
                    open(self._spec, "r").read())
            else:
                self._stateMachineSpec = {}  # for incremantal construction

        self._input = {}
        self._output = {}
        self._state = {}

        # make instances of all inputs, outputs, and states
        for input in self._stateMachineSpec["Input"]:
            self._input[input] = Input(
                self._stateMachineSpec["Input"]
                [input])  # construct an instance with the document node
        for output in self._stateMachineSpec["Output"]:
            self._output[output] = Output(
                self._stateMachineSpec["Output"]
                [output])  # construct an instance with the document node
        for state in self._stateMachineSpec["State"]:
            self._state[state] = State(
                state, self._stateMachineSpec["State"][state],
                self._stateMachine
            )  # construct an instance with the name, the document node, and the state machine instance

        self._currentState = self._state[self._stateMachineSpec[
            "CurrentState"]]  # initialize the state machine to the provided state

        self._currentTime = 0
        self._intervalStartTime = self._currentTime
        self._intervalTime = self._currentTime - self._intervalStartTime  # wrap- and sign-safe interval compare
Exemple #34
0
 def load(self, file, configFormat = formats["detect"]):
     self.config = {}
     self.format = configFormat
     self.file = file
     content = file.read()
     if self.format == self.formats["detect"]:
         baseName = os.path.basename(self.file.name)
         extension = baseName.rsplit(".", 1)[1]
         if extension in self.formats:
             self.format = self.formats[extension]
     if self.format == self.formats["json"]:
         self.config = json.loads(content)
     elif self.format == self.formats["yaml"]:
         self.fixYamlIndexes(content)
         self.config = yaml.loads(content)
     elif self.format == self.formats["properties"]:
         self.config = Properties.loads(content)
     elif self.format == self.formats["toml"]:
         self.config = toml.loads(content)
     elif self.format == self.formats["ini"]:
         self.config = toml.loads(content)
Exemple #35
0
    def __init__(self, filename, title='Tk', prefer_tk=True, file_type='json'):
        """
        Initialize a Tk root and created the UI from a JSON file.

        Returns the Tk root.
        """
        # Needs to be done this way - because base class do not derive from
        # object :-(
        tkinter.Tk.__init__(self)
        self.prefer_tk = prefer_tk
        self.title(title)

        if file_type == 'json':
            user_interface = json.load(open(filename)) if os.path.isfile(
                filename) else json.loads(filename)
        elif file_type == 'yaml':
            user_interface = yaml.load(open(filename)) if os.path.isfile(
                filename) else yaml.loads(filename)
        else:
            raise ValueError('Only json or yaml file definitions are'
                             'supported.')

        self.create_widgets(self, user_interface)
Exemple #36
0
import threading
import RPi.GPIO as GPIO
from cPickle import loads as load
from cPickle import dumps as dump


## Settings
with open('/etc/hydrobot/settings.yml','r') as f:
  settings = yaml.load(f.read())


## Stored Procedures
procedures = {}
if os.path.isfile('/etc/hydrobot/procedures.yml'):
  with open('/etc/hydrobot/procedures.yml','r') as f:
    procedures = yaml.loads(f.read())
if os.path.isfile(settings['hub']['procedures file']):
  with open(settings['hub']['procedures file'],'r') as f:
    procedures = yaml.loads(f.read())


## Node Role
def node():
  root_pattern = 'output::'+settings['node']['id']+'::'
  r = redis.StrictRedis(host=settings['hub']['host'],port=settings['hub']['port'])
  r.set('node::'+settings['node']['id'],dump(settings['node']))
  pins = []
  for i in settings['node']['pins']:
    pins.append(int(i.keys()[0]))
  for i in pins:
    print root_pattern+str(i)
 def comment_data(self, data, verified=None, code_review=None):
     comment_message = yaml.loads(data)
     self.remote.comment_change(self.number, self.patchset_number, comment_message, verified=verified, code_review=code_review)
        if extra_columns:
            logger.error("Your configuration names columns that don't exists in the database.")
            logger.error("These columns are: %s" % (extra_columns,))
            sys.exit(1)


def anonymize(config):
    sql = []
    sql.extend(get_truncates(config))
    sql.extend(get_deletes(config))
    sql.extend(get_updates(config))
    print "SET FOREIGN_KEY_CHECKS=0;"
    for stmt in sql:
        print stmt + ";"
    print "SET FOREIGN_KEY_CHECKS=1;"


if __name__ == "__main__":
    db_file_names = sys.argv[1:]
    if not db_file_names:
        logger.error("Usage: %s config_file [config_file ...]", sys.argv[0])

    for db_file_name in db_file_names:
        logger.info("Processing %s", db_file_name)
        with open(db_file_name) as db_file:
            cfg = yaml.loads(db_file)

        check_config(fg)
        anonymize(cfg)
Exemple #39
0
 def loads(obj: str):
     return loads(obj)
Exemple #40
0
 def load(file="testyaml.yaml"):
     with open(file, 'r+') as fr:
         return loads(file)
	def loads(cls, s, *args, **kwargs):
		try:
			return yamllib.loads(s, *args, **kwargs)
		except Exception, e:
			raise TranscoderLoadException(e)
Exemple #42
0
import json
import redis
import threading
import RPi.GPIO as GPIO
from cPickle import loads as load
from cPickle import dumps as dump

## Settings
with open('/etc/hydrobot/settings.yml', 'r') as f:
    settings = yaml.load(f.read())

## Stored Procedures
procedures = {}
if os.path.isfile('/etc/hydrobot/procedures.yml'):
    with open('/etc/hydrobot/procedures.yml', 'r') as f:
        procedures = yaml.loads(f.read())
if os.path.isfile(settings['hub']['procedures file']):
    with open(settings['hub']['procedures file'], 'r') as f:
        procedures = yaml.loads(f.read())


## Node Role
def node():
    root_pattern = 'output::' + settings['node']['id'] + '::'
    r = redis.StrictRedis(host=settings['hub']['host'],
                          port=settings['hub']['port'])
    r.set('node::' + settings['node']['id'], dump(settings['node']))
    pins = []
    for i in settings['node']['pins']:
        pins.append(int(i.keys()[0]))
    for i in pins:
Exemple #43
0
 def from_yaml(cls, yml):
     if '\n' not in yml:
         with open(os.path.expanduser(yml)) as f:
             return cls.from_dict(yaml.load(f))
     else:
         return cls.from_dict(yaml.loads(yml))
def optimize_variational_circuit(
    ansatz_specs,
    backend_specs,
    optimizer_specs,
    cost_function_specs,
    qubit_operator,
    initial_parameters="None",
    fixed_parameters="None",
    noise_model="None",
    device_connectivity="None",
    parameter_grid="None",
    parameter_values_list=None,
    constraint_operator="None",
    prior_expectation_values: Optional[str] = None,
    keep_history=False,
    thetas=None,
):
    warnings.warn(
        "optimize_variational_circuit will be depreciated in favor of optimize_ansatz_based_cost_function in steps/optimize.py in z-quantum-core.",
        DeprecationWarning,
    )
    if initial_parameters != "None":
        initial_params = load_array(initial_parameters)
    else:
        initial_params = None

    if fixed_parameters != "None":
        fixed_params = load_array(fixed_parameters)
    else:
        fixed_params = None

    # Load qubit operator
    operator = load_qubit_operator(qubit_operator)

    if isinstance(ansatz_specs, str):
        ansatz_specs_dict = yaml.load(ansatz_specs, Loader=yaml.SafeLoader)
    else:
        ansatz_specs_dict = ansatz_specs
    if "WarmStartQAOAAnsatz" in ansatz_specs_dict["function_name"]:
        thetas = np.array(load_list(thetas))
        ansatz = create_object(ansatz_specs_dict,
                               cost_hamiltonian=operator,
                               thetas=thetas)
    elif "QAOA" in ansatz_specs_dict["function_name"]:
        ansatz = create_object(ansatz_specs_dict, cost_hamiltonian=operator)
    else:
        ansatz = create_object(ansatz_specs_dict)

    # Load parameter grid
    if parameter_grid != "None":
        grid = load_parameter_grid(parameter_grid)
    else:
        grid = None

    # Load parameter values list
    if parameter_values_list is not None:
        parameter_values_list = load_array(parameter_values_list)

    # Load optimizer specs
    if isinstance(optimizer_specs, str):
        optimizer_specs_dict = yaml.load(optimizer_specs,
                                         Loader=yaml.SafeLoader)
    else:
        optimizer_specs_dict = optimizer_specs

    if (grid is not None and optimizer_specs_dict["function_name"]
            == "GridSearchOptimizer"):
        optimizer = create_object(optimizer_specs_dict, grid=grid)
    elif (parameter_values_list is not None and
          optimizer_specs_dict["function_name"] == "SearchPointsOptimizer"):
        optimizer = create_object(optimizer_specs_dict,
                                  parameter_values_list=parameter_values_list)
    else:
        optimizer = create_object(optimizer_specs_dict)

    # Load backend specs
    if isinstance(backend_specs, str):
        backend_specs_dict = yaml.load(backend_specs, Loader=yaml.SafeLoader)
    else:
        backend_specs_dict = backend_specs
    if noise_model != "None":
        backend_specs_dict["noise_model"] = load_noise_model(noise_model)
    if device_connectivity != "None":
        backend_specs_dict["device_connectivity"] = load_circuit_connectivity(
            device_connectivity)
    backend = create_object(backend_specs_dict)

    # Load cost function specs
    if isinstance(cost_function_specs, str):
        cost_function_specs_dict = yaml.load(cost_function_specs,
                                             Loader=yaml.SafeLoader)
    else:
        cost_function_specs_dict = cost_function_specs
    estimation_method_specs = cost_function_specs_dict.pop(
        "estimation_method_specs", None)

    if estimation_method_specs is not None:
        if isinstance(estimation_method_specs, str):
            estimation_method_specs = yaml.loads(estimation_method_specs)
        estimation_method = create_object(estimation_method_specs)
    else:
        estimation_method = estimate_expectation_values_by_averaging
    cost_function_specs_dict["estimation_method"] = estimation_method

    estimation_preprocessors_specs_list = cost_function_specs_dict.pop(
        "estimation_preprocessors_specs", None)
    if estimation_preprocessors_specs_list is not None:
        estimation_preprocessors = []
        for estimation_preprocessor_specs in estimation_preprocessors_specs_list:
            if isinstance(estimation_preprocessor_specs, str):
                estimation_preprocessor_specs = yaml.loads(
                    estimation_preprocessor_specs)
            estimation_preprocessors.append(
                create_object(estimation_preprocessor_specs))
        cost_function_specs_dict[
            "estimation_preprocessors"] = estimation_preprocessors

    cost_function_specs_dict["target_operator"] = operator
    cost_function_specs_dict["ansatz"] = ansatz
    cost_function_specs_dict["backend"] = backend
    cost_function_specs_dict["fixed_parameters"] = fixed_params
    cost_function = create_object(cost_function_specs_dict)

    if prior_expectation_values is not None:
        if isinstance(prior_expectation_values, str):
            cost_function.estimator.prior_expectation_values = load_expectation_values(
                prior_expectation_values)

    if constraint_operator != "None":
        constraint_op = load_qubit_operator(constraint_operator)
        constraints_cost_function_specs = yaml.load(cost_function_specs,
                                                    Loader=yaml.SafeLoader)
        constraints_estimator_specs = constraints_cost_function_specs.pop(
            "estimator-specs", None)
        if constraints_estimator_specs is not None:
            constraints_cost_function_specs["estimator"] = create_object(
                constraints_estimator_specs)
        constraints_cost_function_specs["ansatz"] = ansatz
        constraints_cost_function_specs["backend"] = backend
        constraints_cost_function_specs["target_operator"] = constraint_op
        constraint_cost_function = create_object(
            constraints_cost_function_specs)
        constraint_cost_function_wrapper = (
            lambda params: constraint_cost_function.evaluate(params).value)
        constraint_functions = ({
            "type": "eq",
            "fun": constraint_cost_function_wrapper
        }, )
        optimizer.constraints = constraint_functions

    opt_results = optimizer.minimize(cost_function, initial_params,
                                     keep_history)

    save_optimization_results(opt_results, "optimization-results.json")
    save_array(opt_results.opt_params, "optimized-parameters.json")
 def validate(self):
     try:
         yaml.loads(self._content)
     except ValueError as err:
         return err
print(re.sub(r'\#(.*)', '_comment: $1', am))
print(re.sub(r'\#(.*)', '_comment: \\1', am))
print(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
get_ipython().magic('cd')
get_ipython().magic('cd')
get_ipython().magic('cd Desktop/')
with open('print(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
with open('automoderator.yaml', 'w') as f:
    f.write(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
    
with open('automoderator.yaml', 'w', encoding='utf8') as f:
    f.write(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
    
am_obj = yaml.loads(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
import yaml
am_obj = yaml.loads(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', '_comment: \\1', am))
am_obj
list(am_obj)
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', '_comment: "\\1"', am))
list(am_obj)
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', '_comment: \'\\1\'', am))
list(am_obj)
am_obj = list(yaml.load_all(re.sub(r'\#\s*(.*)', '_comment: \'\\1\'', am.replace('"', '\uff02'))))
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', make_comment_tag, am))
def make_comment_tag(matchobj):
    print(matchobj)
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', make_comment_tag, am))
def make_comment_tag(matchobj):
    return '_comment: ' + matchobj.group(1)
am_obj = yaml.load_all(re.sub(r'\#\s*(.*)', make_comment_tag, am))