def simulate_eapol_flow_install(self, ldev_id, olt_id, onu_ids):

        # emulate the flow mod requests that shall arrive from the SDN
        # controller, one for each ONU
        lports = self.get(
            '/api/v1/logical_devices/{}/ports'.format(ldev_id)
        )['items']

        # device_id -> logical port map, which we will use to construct
        # our flows
        lport_map = dict((lp['device_id'], lp) for lp in lports)
        for onu_id in onu_ids:
            # if eth_type == 0x888e => send to controller
            _in_port = lport_map[onu_id]['ofp_port']['port_no']
            req = ofp.FlowTableUpdate(
                id=ldev_id,
                flow_mod=mk_simple_flow_mod(
                    match_fields=[
                        in_port(_in_port),
                        vlan_vid(ofp.OFPVID_PRESENT | 0),
                        eth_type(0x888e)],
                    actions=[
                        output(ofp.OFPP_CONTROLLER)
                    ],
                    priority=1000
                )
            )
            res = self.post('/api/v1/logical_devices/{}/flows'.format(ldev_id),
                            MessageToDict(req,
                                          preserving_proto_field_name=True),
                            expected_http_code=200)

        # for sanity, verify that flows are in flow table of logical device
        flows = self.get(
            '/api/v1/logical_devices/{}/flows'.format(ldev_id))['items']
        self.assertGreaterEqual(len(flows), 4)
Example #2
0
    def sink_update(self, project, sink_name, filter_, destination,
                    unique_writer_identity=False):
        """API call:  update a sink resource.

        :type project: str
        :param project: ID of the project containing the sink.

        :type sink_name: str
        :param sink_name: the name of the sink

        :type filter_: str
        :param filter_: the advanced logs filter expression defining the
                        entries exported by the sink.

        :type destination: str
        :param destination: destination URI for the entries exported by
                            the sink.

        :type unique_writer_identity: bool
        :param unique_writer_identity: (Optional) determines the kind of
                                       IAM identity returned as
                                       writer_identity in the new sink.

        :rtype: dict
        :returns: The sink resource returned from the API (converted from a
                  protobuf to a dictionary).
        """
        path = 'projects/%s/sinks/%s' % (project, sink_name)
        sink_pb = LogSink(name=path, filter=filter_, destination=destination)
        sink_pb = self._gapic_api.update_sink(
            path,
            sink_pb,
            unique_writer_identity=unique_writer_identity)
        # NOTE: LogSink message type does not have an ``Any`` field
        #       so `MessageToDict`` can safely be used.
        return MessageToDict(sink_pb)
Example #3
0
 def update_product(
     self,
     product,
     location=None,
     product_id=None,
     update_mask=None,
     project_id=None,
     retry=None,
     timeout=None,
     metadata=None,
 ):
     """
     For the documentation see:
     :class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator`
     """
     client = self.get_conn()
     product = self.product_name_determiner.get_entity_with_name(product, product_id, location, project_id)
     self.log.info('Updating ProductSet: %s', product.name)
     response = client.update_product(
         product=product, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata
     )
     self.log.info('Product updated: %s', response.name if response else '')
     self.log.debug('Product updated:\n%s', response)
     return MessageToDict(response)
Example #4
0
def pull_mta_bus():
    ssl._create_default_https_context = ssl._create_unverified_context
    busfeed = gtfs_realtime_pb2.FeedMessage()
    tripfeed = gtfs_realtime_pb2.FeedMessage()
    api_key = 'ab069f16-0419-41d9-903f-2020458dbc42'

    # requests will fetch the results from the url
    busresponse = requests.get(
        'http://gtfsrt.prod.obanyc.com/vehiclePositions?key={api_key}',
        allow_redirects=True)
    busfeed.ParseFromString(busresponse.content)

    #request for trip feed
    tripresponse = requests.get(
        'http://gtfsrt.prod.obanyc.com/tripUpdates?key={api_key}',
        allow_redirects=True)
    tripfeed.ParseFromString(tripresponse.content)

    # Check if request is working
    #print('There are {} buses in the dataset.'.format(len(busfeed.entity)))
    #print('There are {} trip updates in the dataset.'.format(len(tripfeed.entity)))

    if len(tripfeed.entity) == 0:
        return

    dict_obj_bus = MessageToDict(busfeed)
    dict_obj_trip = MessageToDict(tripfeed)

    # Create dataframe of all vehicles that are currently on the network
    collector = []
    for bus in dict_obj_bus['entity']:
        row = OrderedDict()
        row['id'] = bus['id']
        row['route_id'] = bus['vehicle'].get('trip', {}).get('routeId')
        row['trip_id'] = bus['vehicle'].get('trip', {}).get('tripId')
        row['latitude'] = bus['vehicle']['position'].get('latitude', '')
        row['longitude'] = bus['vehicle']['position'].get('longitude', '')
        row['bearing'] = bus['vehicle']['position'].get('bearing', '')
        row['timestamp'] = bus['vehicle'].get('timestamp', '')
        row['bus_stop_id'] = bus['vehicle'].get('stopId', '')
        row['vehicle_id'] = bus['vehicle']['vehicle'].get('id', '')
        row['label'] = bus['vehicle']['vehicle'].get('label', '')
        collector.append(row)

    df_bus = pd.DataFrame(collector)
    df_bus['humantime'] = df_bus.apply(
        lambda row: datetime.datetime.fromtimestamp(int(row['timestamp'])),
        axis=1)
    feedtime = int(dict_obj_bus['header']['timestamp'])

    # Create dataframe of all trips updates
    collector = []
    for trip in dict_obj_trip['entity']:
        if trip['tripUpdate'].get('stopTimeUpdate') is not None:
            for i in range(len(trip['tripUpdate']['stopTimeUpdate'])):
                row = OrderedDict()
                row['trip_id'] = trip['tripUpdate'].get('trip',
                                                        {}).get('tripId')
                row['id'] = trip.get('id')
                row['vehicle_id'] = trip['tripUpdate'].get('vehicle',
                                                           {}).get('id')
                row['trip_startday'] = trip['tripUpdate'].get(
                    'trip', {}).get('startDate')
                row['route_id'] = trip['tripUpdate'].get('trip',
                                                         {}).get('routeId')
                row['delay'] = trip['tripUpdate'].get('delay')
                row['arrival_time'] = trip['tripUpdate']['stopTimeUpdate'][
                    i].get('arrival', {}).get('time')
                row['departure_time'] = trip['tripUpdate']['stopTimeUpdate'][
                    i].get('departure', {}).get('time')
                row['trip_stop_id'] = trip['tripUpdate']['stopTimeUpdate'][
                    i].get('stopId')
                row['trip_stop_seq'] = trip['tripUpdate']['stopTimeUpdate'][
                    i].get('stopSequence')
                collector.append(row)

    df_trip = pd.DataFrame(collector)
    # Merge the two feeds together to get the delay times of each bus
    merged_rt = df_bus.merge(df_trip, on=['trip_id', 'vehicle_id'], how='left')
    merged_rt = merged_rt.drop_duplicates(subset=['trip_id', 'vehicle_id'])
    # Clean the format of the dataframe
    merged_rt = merged_rt.drop(columns=['id_y', 'route_id_y'])
    merged_rt = merged_rt.rename(columns={
        "id_x": "id",
        "route_id_x": "route_id"
    })
    merged_rt['mode'] = "Bus"
    merged_rt['agency'] = "MTA Bus"
    merged_rt['region'] = 'New York'
    return merged_rt
 def _change_message(message):
     return MessageToDict(message, preserving_proto_field_name=True)
def get_domain_assets():
    query = iroha.query('GetDomainAssets')
    ic.sign_query(query, user_private_key)
    response = net.send_query(query)
    data = MessageToDict(response)
    pprint(data, indent=2)
Example #7
0
 def add_device(self):
     device = Device(type='simulated_olt', )
     device = self.post('/api/v1/devices',
                        MessageToDict(device),
                        expected_http_code=200)
     return device
 def add_vm(self):
     spec = self._translate()
     response = dict()
     response["changed"] = False
     sec_disk = self.params.get("secondary_disks")
     if sec_disk:
         schema = {
             "type": "array",
             "items": {
                 "type":
                 "object",
                 "oneOf": [
                     {
                         "properties": {
                             "autodelete": {
                                 "type": "boolean"
                             },
                             "type": {
                                 "type": "string",
                                 "enum":
                                 ["ssd", "hdd", "ssd-nonreplicated"],
                             },
                             "size": {
                                 "type": "number"
                             },
                             "description": {
                                 "type": "string"
                             },
                             "image_id": {
                                 "type": "string"
                             },
                             "snapshot_id": {
                                 "type": "string"
                             },
                         },
                         "required": ["size"],
                         "additionalProperties": False,
                     },
                     {
                         "properties": {
                             "autodelete": {
                                 "type": "boolean"
                             },
                             "description": {
                                 "type": "string"
                             },
                             "disk_id": {
                                 "type": "string"
                             },
                         },
                         "required": ["disk_id"],
                         "additionalProperties": False,
                     },
                 ],
             },
         }
         validate(instance=sec_disk, schema=schema)
     name = self.params.get("name")
     folder_id = self.params.get("folder_id")
     instance = self._get_instance(name, folder_id)
     if instance:
         compare_result = self._is_same(instance, spec)
         if compare_result:
             response["failed"] = True
             response["msg"] = ("Instance already exits and %s"
                                " request params are different" %
                                ", ".join(compare_result))
         else:
             response["response"] = instance
             response["failed"] = False
             response["changed"] = False
     else:
         params = self._get_instance_params(spec)
         operation = self.active_op_limit_timeout(
             self.params.get("active_operations_limit_timeout"),
             self.instance_service.Create,
             CreateInstanceRequest(**params),
         )
         cloud_response = self.waiter(operation)
         response.update(MessageToDict(cloud_response))
         response = response_error_check(response)
     return response
Example #9
0
def ProtoMessageToDict(protobuf_msg, **kwargs):
    if 'preserving_proto_field_name' not in kwargs:
        kwargs['preserving_proto_field_name'] = True

    return MessageToDict(protobuf_msg, **kwargs)
Example #10
0
    def test_update_server_by_collector(self, name=None):
        """ Create Server by Collector
        """
        self.test_create_server_by_collector()

        self.server = self.inventory_v1.Server.update(
            {
                'server_id': self.server.server_id,
                'os_type': 'WINDOWS',
                'cloud_service_group': 'ComputeEngine',
                'cloud_service_type': 'Instance',
                'data': {
                    'hardware': {
                        'core': 8,
                        'memory': 16
                    },
                    'os': {
                        'os_distro': 'windows2012',
                        'os_details': 'Windows 2012 ENT SP2'
                    },
                    'iam': {
                        'profile': {
                            'k1': 'v1',
                            'k2': 'v2'
                        }
                    },
                    'lv1': {
                        'lv2': {
                            'lv3': {
                                'k1': 'v1',
                                'k2': 'v2'
                            }
                        }
                    }
                },
                'metadata': {
                    'view': {
                        'sub_data': {
                            'layouts': [{
                                'name': 'Hardware',
                                'type': 'item',
                                'options': {
                                    'fields': [{
                                        'key': 'data.hardware.core',
                                        'name': 'Core'
                                    }, {
                                        'key': 'data.hardware.memory',
                                        'name': 'Memory'
                                    }]
                                }
                            }, {
                                'name': 'Hardware3',
                                'type': 'item',
                                'options': {
                                    'fields': [{
                                        'key': 'data.hardware.core',
                                        'name': 'Core2'
                                    }, {
                                        'key': 'data.hardware.memory',
                                        'name': 'Memory'
                                    }]
                                }
                            }, {
                                'name': 'Compute',
                                'type': 'item',
                                'options': {
                                    'fields': [{
                                        'key': 'data.compute.instance_id',
                                        'name': 'Instance ID2'
                                    }, {
                                        'key': 'data.platform.type',
                                        'name': 'Platform Type',
                                        'view_type': 'badge',
                                        'background_color': 'yellow'
                                    }]
                                }
                            }]
                        }
                    }
                },
                "reference": {
                    "resource_id": "resource-yyyy",
                    "external_link": "https://ddd.eee.fff"
                },
                'domain_id': self.domain.domain_id
            },
            metadata=(
                ('token', self.owner_token),
                ('job_id', utils.generate_id('job')),
                # ('collector_id', utils.generate_id('collector')),
                # ('plugin_id', utils.generate_id('plugin')),
                ('secret.secret_id', utils.generate_id('secret')),
                ('secret.service_account_id', utils.generate_id('sa')),
                ('secret.project_id', self.project.project_id),
                ('secret.provider', 'aws')
            ))

        self._print_data(self.server, 'test_update_server_by_collector_1')

        self.server = self.inventory_v1.Server.update(
            {
                'server_id': self.server.server_id,
                'data': {
                    'hardware': {
                        'core': 12
                    },
                    'route': {
                        'default_gateway': '192.168.0.1'
                    },
                    'compute': {
                        'instance_id': 'i-' + utils.random_string(),
                        'changed_key': 'changed_value'
                    },
                    'softwares': [{
                        'name': 'mysql',
                        'version': '1.0.0'
                    }, {
                        'name': 'apache',
                        'version': '2.0.0'
                    }],
                    'platform': {
                        'type': 'AZURE'
                    },
                    'iam': {
                        'profile': {
                            'k2': 'v2',
                            'k3': 'v3'
                        }
                    },
                    'lv1': {
                        'lv2': {
                            'lv3': {
                                'k2': 'v2',
                                'k3': 'v3'
                            }
                        }
                    }
                },
                'server_type': 'BAREMETAL',
                "metadata": {
                    'view': {
                        'sub_data': {
                            'layouts': [{
                                'name': 'Compute',
                                'type': 'item',
                                'options': {
                                    'fields': [{
                                        'key': 'data.compute.instance_id',
                                        'name': 'Instance ID2'
                                    }, {
                                        'key': 'data.platform.type',
                                        'name': 'Platform Type2',
                                        'view_type': 'badge',
                                        'background_color': 'yellow'
                                    }]
                                }
                            }]
                        }
                    }
                },
                'domain_id': self.domain.domain_id
            },
            metadata=(
                ('token', self.owner_token),
                ('job_id', utils.generate_id('job')),
                # ('plugin_id', utils.generate_id('plugin')),
                # ('collector_id', utils.generate_id('collector')),
                ('secret.secret_id', utils.generate_id('secret')),
                ('secret.service_account_id', utils.generate_id('sa')),
                ('secret.project_id', self.project.project_id),
                ('secret.provider', 'aws'),
                ('update_mode', 'MERGE')
            ))

        server_data = MessageToDict(self.server, preserving_proto_field_name=True)

        self._print_data(self.server, 'test_update_server_by_collector_2')
    def test_to_api_repr_proto_explicit(self):
        import datetime
        from google.protobuf.json_format import MessageToDict
        from google.cloud.logging_v2.resource import Resource
        from google.cloud._helpers import _datetime_to_rfc3339
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        LOG_NAME = "test.log"
        message = Struct(fields={"foo": Value(bool_value=True)})
        LABELS = {"foo": "bar", "baz": "qux"}
        IID = "IID"
        SEVERITY = "CRITICAL"
        METHOD = "POST"
        URI = "https://api.example.com/endpoint"
        STATUS = "500"
        REQUEST = {
            "requestMethod": METHOD,
            "requestUrl": URI,
            "status": STATUS
        }
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        RESOURCE = Resource(type="gae_app",
                            labels={
                                "module_id": "default",
                                "version_id": "test"
                            })
        TRACE = "12345678-1234-5678-1234-567812345678"
        SPANID = "000000000000004a"
        FILE = "my_file.py"
        LINE = 123
        FUNCTION = "my_function"
        SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
        OP_ID = "OP_ID"
        PRODUCER = "PRODUCER"
        OPERATION = {
            "id": OP_ID,
            "producer": PRODUCER,
            "first": True,
            "last": False
        }
        expected = {
            "logName": LOG_NAME,
            "protoPayload": MessageToDict(message),
            "labels": LABELS,
            "insertId": IID,
            "severity": SEVERITY,
            "httpRequest": REQUEST,
            "timestamp": _datetime_to_rfc3339(TIMESTAMP),
            "resource": RESOURCE._to_dict(),
            "trace": TRACE,
            "spanId": SPANID,
            "traceSampled": True,
            "sourceLocation": {
                "file": FILE,
                "line": str(LINE),
                "function": FUNCTION
            },
            "operation": OPERATION,
        }

        entry = self._make_one(
            log_name=LOG_NAME,
            payload=message,
            labels=LABELS,
            insert_id=IID,
            severity=SEVERITY,
            http_request=REQUEST,
            timestamp=TIMESTAMP,
            resource=RESOURCE,
            trace=TRACE,
            span_id=SPANID,
            trace_sampled=True,
            source_location=SOURCE_LOCATION,
            operation=OPERATION,
        )

        self.assertEqual(entry.to_api_repr(), expected)
Example #12
0
                        datefmt='%d-%m-%y %H:%M:%S')
    logging.info(f"Args: {args}")

    stats1 = tfdv.load_stats_text(input_path=args.stats_file_1)
    stats2 = tfdv.load_stats_text(input_path=args.stats_file_2)

    schema1 = tfdv.infer_schema(statistics=stats1)

    # Custom rules, tweak this as required. This is just an example
    tfdv.get_feature(
        schema1,
        'I1').drift_comparator.jensen_shannon_divergence.threshold = 0.06

    # Calculate drift between the reference stats stats1, and the statistics from new data in stats2
    drift_anomalies = tfdv.validate_statistics(statistics=stats2,
                                               schema=schema1,
                                               previous_statistics=stats1)

    # Convert the .pb2 to dict
    drift = MessageToDict(drift_anomalies)

    value = drift['driftSkewInfo'][0]['driftMeasurements'][0]['value']
    threshold = drift['driftSkewInfo'][0]['driftMeasurements'][0]['threshold']
    logging.info(
        f"JS divergence value: {value}, and JS divergence threshold: {threshold}"
    )
    drift_detected = True
    if value < threshold:
        drift_detected = False
    logging.info(f"Drift detected: {drift_detected}")
Example #13
0
def pb2dict(pb_msg):
    d = MessageToDict(pb_msg, including_default_value_fields=1,
                      preserving_proto_field_name=1)
    return d
Example #14
0
    def test_add_schedule(self, mock_parse_request, *args):
        params = {
            'topic': utils.random_string(),
            'options': {
                'aggregate': [
                    {
                        'query': {
                            'resource_type': 'identity.Project',
                            'query': {
                                'aggregate': [
                                    {
                                        'group': {
                                            'keys': [
                                                {
                                                    'key': 'project_id',
                                                    'name': 'project_id'
                                                },
                                                {
                                                    'key': 'name',
                                                    'name': 'project_name'
                                                }
                                            ]
                                        }
                                    }
                                ]
                            }
                        }
                    },
                    {
                        'join': {
                            'resource_type': 'inventory.Server',
                            'type': 'LEFT',
                            'keys': ['project_id'],
                            'query': {
                                'aggregate': [
                                    {
                                        'group': {
                                            'keys': [
                                                {
                                                    'key': 'project_id',
                                                    'name': 'project_id'
                                                }
                                            ],
                                            'fields': [
                                                {
                                                    'operator': 'count',
                                                    'name': 'server_count'
                                                }
                                            ]
                                        }
                                    }
                                ]
                            }
                        }
                    },
                    {
                        'join': {
                            'resource_type': 'inventory.CloudService',
                            'type': 'LEFT',
                            'keys': ['project_id'],
                            'query': {
                                'aggregate': [
                                    {
                                        'group': {
                                            'keys': [
                                                {
                                                    'key': 'project_id',
                                                    'name': 'project_id'
                                                }
                                            ],
                                            'fields': [
                                                {
                                                    'operator': 'count',
                                                    'name': 'cloud_service_count'
                                                }
                                            ]
                                        }
                                    }
                                ]
                            }
                        }
                    },
                    {
                        'formula': {
                            'eval': 'resource_count = server_count + cloud_service_count'
                        }
                    },
                    {
                        'sort': {
                            'key': 'resource_count',
                            'desc': True
                        }
                    }
                ],
                'page': {
                    'limit': 5
                }
            },
            'schedule': {
                'cron': '*/5 * * * *',
                'interval': 5,
                'minutes': [0, 10, 20, 30, 40, 50],
                'hours': [0, 6, 12, 18]
            },
            'tags': {
                utils.random_string(): utils.random_string()
            },
            'domain_id': utils.generate_id('domain')
        }
        mock_parse_request.return_value = (params, {})

        schedule_servicer = Schedule()
        schedule_info = schedule_servicer.add({}, {})

        print_message(schedule_info, 'test_add_schedule')
        schedule_data = MessageToDict(schedule_info, preserving_proto_field_name=True)

        self.assertIsInstance(schedule_info, schedule_pb2.ScheduleInfo)
        self.assertEqual(schedule_info.topic, params['topic'])
        self.assertEqual(schedule_info.state, schedule_pb2.ScheduleInfo.State.ENABLED)
        self.assertEqual(schedule_data['options'], params['options'])
        self.assertDictEqual(schedule_data['schedule'], params['schedule'])
        self.assertDictEqual(schedule_data['tags'], params['tags'])
        self.assertEqual(schedule_info.domain_id, params['domain_id'])
        self.assertIsNotNone(getattr(schedule_info, 'created_at', None))
import tensorflow as tf
from google.protobuf.json_format import MessageToDict
import base64

flags = tf.flags

flags.DEFINE_string(
    'example_path',
    '/Users/sanqiang/git/ts/text_simplification_data/example_v7_val/shard_wikilarge_1976.example',
    'The path for examples.')

FLAGS = flags.FLAGS

if __name__ == '__main__':
    for example in tf.python_io.tf_record_iterator(FLAGS.example_path):
        obj = MessageToDict(tf.train.Example.FromString(example))

        output = ""
        for feature_name in obj["features"]["feature"]:
            for field in obj["features"]["feature"][feature_name]:
                if field == "bytesList":
                    val = base64.b64decode(obj["features"]["feature"]
                                           [feature_name][field]["value"][0])
                else:
                    val = obj["features"]["feature"][feature_name][field][
                        "value"]
                # if feature_name in ("trg_wds", "src_wds"):
                output += "%s:\t\t\t%s\n" % (feature_name, val)
        print(output)
        print("======")
Example #16
0
File: app.py Project: trtin/jina
 async def get_result_in_json(req_iter):
     return [
         MessageToDict(k)
         async for k in servicer.Call(request_iterator=req_iter,
                                      context=None)
     ]
Example #17
0
def pb_to_yaml(message):
    message_dict = MessageToDict(message)
    return dump_to_yaml_str(message_dict)
Example #18
0
 def _generate_dicts(self, count):
     return [MessageToDict(m) for m in self._generate_messages(count)]
 def _list_by_name(self, name, folder_id):
     instances = self.instance_service.List(
         ListInstancesRequest(folder_id=folder_id,
                              filter='name="%s"' % name))
     return MessageToDict(instances)
Example #20
0
def asset_deserialization(asset_state_data):
    asset = asset_pb2.Asset()
    asset.ParseFromString(asset_state_data)
    return MessageToDict(asset, preserving_proto_field_name=True)
Example #21
0
def test_init(status_pb):
    assert StatusMessage(status_object=None)
    assert StatusMessage(status_object=status_pb)
    assert StatusMessage(status_object=MessageToDict(status_pb))
    assert StatusMessage(status_object=MessageToJson(status_pb))
Example #22
0
 def add_device(self):
     return self.post('/api/v1/devices',
                      MessageToDict(
                          Device(type=device_type,
                                 host_and_port=host_and_port)),
                      expected_code=200)
Example #23
0
    def dataPublish(self, message, context):
        global options
        grpcPeer = {}
        grpcPeerStr = context.peer()
        (grpcPeer['telemetry_proto'], grpcPeer['telemetry_node'],
         grpcPeer['telemetry_node_port']) = grpcPeerStr.split(":")
        grpcPeer['vendor'] = 'Huawei'

        if options.debug:
            metadata = dict(context.invocation_metadata())
            grpcPeer['user-agent'] = metadata['user-agent']
            #Example of grpcPeerStr -> 'ipv4:10.215.133.23:57775'
            grpcPeer['grpc_processing'] = 'huawei_grpc_dialout_pb2_grpc'
            grpcPeer['grpc_ulayer'] = 'GPB Telemetry'
            jsonTelemetryNode = json.dumps(grpcPeer, indent=2, sort_keys=True)
            logging.debug(jsonTelemetryNode)

        for new_msg in message:
            if options.verbose:
                logging.info('Huawei: Received GRPC-Data')
            elif options.debug:
                logging.debug('Huawei: Received GRPC-Data')
                logging.debug(new_msg.data)

            telemetry_msg = huawei_telemetry_pb2.Telemetry()
            telemetry_msg.ParseFromString(new_msg.data)

            telemetry_msg_dict = MessageToDict(
                telemetry_msg,
                including_default_value_fields=True,
                preserving_proto_field_name=True,
                use_integers_for_enums=True)

            if options.debug:
                logging.debug("Huawei: Received GPB-Data as JSON")
                logging.debug(
                    json.dumps(telemetry_msg_dict, indent=2, sort_keys=True))

            message_header_dict = telemetry_msg_dict.copy()

            if 'data_gpb' in message_header_dict:
                del message_header_dict['data_gpb']

            (proto, path) = message_header_dict['sensor_path'].split(":")

            if options.debug:
                logging.debug("PROTOTYP=%s" % proto)

            for new_row in telemetry_msg.data_gpb.row:
                new_row_header_dict = MessageToDict(
                    new_row,
                    including_default_value_fields=True,
                    preserving_proto_field_name=True,
                    use_integers_for_enums=True)

                if 'content' in new_row_header_dict:
                    del new_row_header_dict['content']

                msg = select_gbp_methode(proto)

                msg.ParseFromString(new_row.content)
                content = MessageToDict(msg,
                                        including_default_value_fields=True,
                                        preserving_proto_field_name=True,
                                        use_integers_for_enums=True)

                message_dict = {}
                message_dict.update(
                    {'grpc': {
                        'grpcPeer': grpcPeer['telemetry_node']
                    }})
                message_dict.update({'data': {'content': content}})
                message_dict['data'].update(message_header_dict)
                message_dict['data'].update(new_row_header_dict)

                if options.verbose:
                    allkeys = parse_dict(content, ret='', level=0)
                    logging.info("Huawei: %s: %s" % (proto, allkeys))

                sendJsonTelemetryData(message_dict, grpcPeer['vendor'],
                                      message_header_dict['sensor_path'])
Example #24
0
 def modify(self, type, req, name):
     self.post(self.get_path(type, name, '/modify'),
               MessageToDict(req, preserving_proto_field_name=True),
               expected_code=200)
     return self.verify(type)
    def list_alert_policies(self,
                            project_id: Optional[str] = None,
                            format_: Optional[str] = None,
                            filter_: Optional[str] = None,
                            order_by: Optional[str] = None,
                            page_size: Optional[int] = None,
                            retry: Optional[str] = DEFAULT,
                            timeout: Optional[float] = DEFAULT,
                            metadata: Optional[str] = None) -> Any:
        """
        Fetches all the Alert Policies identified by the filter passed as
        filter parameter. The desired return type can be specified by the
        format parameter, the supported formats are "dict", "json" and None
        which returns python dictionary, stringified JSON and protobuf
        respectively.

        :param format_: (Optional) Desired output format of the result. The
            supported formats are "dict", "json" and None which returns
            python dictionary, stringified JSON and protobuf respectively.
        :type format_: str
        :param filter_:  If provided, this field specifies the criteria that
            must be met by alert policies to be included in the response.
            For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
        :type filter_: str
        :param order_by: A comma-separated list of fields by which to sort the result.
            Supports the same set of field references as the ``filter`` field. Entries
            can be prefixed with a minus sign to sort by the field in descending order.
            For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
        :type order_by: str
        :param page_size: The maximum number of resources contained in the
            underlying API response. If page streaming is performed per-
            resource, this parameter does not affect the return value. If page
            streaming is performed per-page, this determines the maximum number
            of resources in a page.
        :type page_size: int
        :param retry: A retry object used to retry requests. If ``None`` is
            specified, requests will be retried using a default configuration.
        :type retry: str
        :param timeout: The amount of time, in seconds, to wait
            for the request to complete. Note that if ``retry`` is
            specified, the timeout applies to each individual attempt.
        :type timeout: float
        :param metadata: Additional metadata that is provided to the method.
        :type metadata: str
        :param project_id: The project to fetch alerts from.
        :type project_id: str
        """
        client = self._get_policy_client()
        policies_ = client.list_alert_policies(
            name='projects/{project_id}'.format(project_id=project_id),
            filter_=filter_,
            order_by=order_by,
            page_size=page_size,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )
        if format_ == "dict":
            return [MessageToDict(policy) for policy in policies_]
        elif format_ == "json":
            return [MessageToJson(policy) for policy in policies_]
        else:
            return policies_
Example #26
0
    def __init__(
        self,
        region,
        role_arn,
        resource_config,
        algorithm_specification=None,
        stopping_condition=None,
        vpc_config=None,
        enable_spot_training=False,
        interruptible=False,
        retries=0,
        cacheable=False,
        cache_version="",
    ):
        """
        :param Text region: The region in which to run the SageMaker job.
        :param Text role_arn: The ARN of the role to run in the SageMaker job.
        :param dict[Text,T] algorithm_specification: https://docs.aws.amazon.com/sagemaker/latest/dg/API_AlgorithmSpecification.html
        :param dict[Text,T] resource_config: https://docs.aws.amazon.com/sagemaker/latest/dg/API_ResourceConfig.html
        :param dict[Text,T] stopping_condition: https://docs.aws.amazon.com/sagemaker/latest/dg/API_StoppingCondition.html
        :param dict[Text,T] vpc_config: https://docs.aws.amazon.com/sagemaker/latest/dg/API_VpcConfig.html
        :param bool enable_spot_training: https://docs.aws.amazon.com/sagemaker/latest/dg/API_HyperParameterTrainingJobDefinition.html
        :param int retries: Number of time to retry.
        :param bool cacheable: Whether or not to use Flyte's caching system.
        :param Text cache_version: Update this to notify a behavioral change requiring the cache to be invalidated.
        """

        algorithm_specification = algorithm_specification or {}
        algorithm_specification["TrainingImage"] = (
            algorithm_specification.get("TrainingImage")
            or "825641698319.dkr.ecr.us-east-2.amazonaws.com/xgboost:1")
        algorithm_specification["TrainingInputMode"] = "File"

        job_config = ParseDict(
            {
                "Region": region,
                "ResourceConfig": resource_config,
                "StoppingCondition": stopping_condition,
                "VpcConfig": vpc_config,
                "AlgorithmSpecification": algorithm_specification,
                "RoleArn": role_arn,
            },
            sagemaker_pb2.SagemakerHPOJob(),
        )
        print(MessageToDict(job_config))

        # TODO: Optionally, pull timeout behavior from stopping condition and pass to Flyte task def.
        timeout = _datetime.timedelta(seconds=0)

        # TODO: The FlyteKit type engine is extensible so we can create a SagemakerInput type with custom
        # TODO:     parsing/casting logic. For now, we will use the Generic type since there is a little that needs
        # TODO:     to be done on Flyte side to unlock this cleanly.
        # TODO: This call to the super-constructor will be less verbose in future versions of Flytekit following a
        # TODO:     refactor.
        # TODO: Add more configurations to the custom dict. These are things that are necessary to execute the task,
        # TODO:     but might not affect the outputs (i.e. Running on a bigger machine). These are currently static for
        # TODO:     a given definition of a task, but will be more dynamic in the future. Also, it is possible to
        # TODO:     make it dynamic by using our @dynamic_task.
        # TODO: You might want to inherit the role ARN from the execution at runtime.
        super(SagemakerXgBoostOptimizer, self).__init__(
            type=_TASK_TYPE,
            metadata=_task_models.TaskMetadata(
                discoverable=cacheable,
                runtime=_task_models.RuntimeMetadata(0, "0.1.0b0",
                                                     "sagemaker"),
                timeout=timeout,
                retries=_literal_models.RetryStrategy(retries=retries),
                interruptible=interruptible,
                discovery_version=cache_version,
                deprecated_error_message="",
            ),
            interface=_interface.TypedInterface({}, {}),
            custom=MessageToDict(job_config),
        )

        # TODO: Add more inputs that we expect to change the outputs of the task.
        # TODO: We can add outputs too!
        # We use helper methods for adding to interface, thus overriding the one set above. This will be simplified post
        # refactor.
        self.add_inputs({
            "static_hyperparameters":
            _interface_model.Variable(
                _sdk_types.Types.Generic.to_flyte_literal_type(), ""),
            "train":
            _interface_model.Variable(
                _sdk_types.Types.MultiPartCSV.to_flyte_literal_type(), ""),
            "validation":
            _interface_model.Variable(
                _sdk_types.Types.MultiPartCSV.to_flyte_literal_type(), ""),
        })
        self.add_outputs({
            "model":
            _interface_model.Variable(
                _sdk_types.Types.Blob.to_flyte_literal_type(), "")
        })
Example #27
0
def pull_mta_lirr():
    ssl._create_default_https_context = ssl._create_unverified_context
    # Parse the realtime subway feed
    api_key = 'CK3pt1u1k71RlZ7OzDnUm43Qa74inIjp7NkFOAxp'
    headers = {"x-api-key": api_key}

    feed = gtfs_realtime_pb2.FeedMessage()
    url = ' https://api-endpoint.mta.info/Dataservice/mtagtfsfeeds/lirr%2Fgtfs-lirr'  # to make as a function to pass through later
    response = urllib.request.Request(url, headers=headers)
    xml = urllib.request.urlopen(response)
    feed.ParseFromString(xml.read())

    if len(feed.entity) == 0:
        return

    dict_obj_feed = MessageToDict(feed)

    collector = []
    for message in dict_obj_feed['entity']:
        # Only get trains that are currently active in the system
        if message.get('vehicle') is not None:
            row = OrderedDict()
            row['trip_id'] = message['vehicle']['trip'].get('tripId')
            train_id = message['vehicle']['trip'].get('tripId')
            stop_id = message['vehicle'].get('stopId')
            row['schedule_relation'] = message['vehicle']['trip'].get(
                'scheduleRelationship')
            row['stop_id'] = message['vehicle'].get('stopId')
            row['current_status'] = message['vehicle'].get('currentStatus')
            row['timestamp'] = dict_obj_feed['header']['timestamp']
            # loop through the trip feeds
            for message in dict_obj_feed['entity']:
                if message.get('tripUpdate') is not None and message[
                        'tripUpdate'].get('stopTimeUpdate') is not None:
                    # Only get if the trip_ids match
                    if train_id == message['tripUpdate']['trip'].get('tripId'):
                        #print(train_id)
                        # Loop through the list until stop_id matches. Otherwise take the most recent one, which is the last one
                        for i in range(
                                len(message['tripUpdate']['stopTimeUpdate'])):
                            if message['tripUpdate']['stopTimeUpdate'][i].get(
                                    'arrival', {}
                            ).get('delay') is not None and stop_id == message[
                                    'tripUpdate']['stopTimeUpdate'][i].get(
                                        'stopId'):
                                row['delay'] = message['tripUpdate'][
                                    'stopTimeUpdate'][i].get('arrival',
                                                             {}).get('delay')
                                break
                            if message['tripUpdate']['stopTimeUpdate'][i].get(
                                    'departure', {}
                            ).get('delay') is not None and stop_id == message[
                                    'tripUpdate']['stopTimeUpdate'][i].get(
                                        'stopId'):
                                row['delay'] = message['tripUpdate'][
                                    'stopTimeUpdate'][i].get('departure',
                                                             {}).get('delay')
                                break
                            else:
                                if message['tripUpdate']['stopTimeUpdate'][
                                        -1].get('arrival',
                                                {}).get('delay') is not None:
                                    row['delay'] = message['tripUpdate'][
                                        'stopTimeUpdate'][-1].get(
                                            'arrival', {}).get('delay')
                                elif message['tripUpdate']['stopTimeUpdate'][
                                        -1].get('departure',
                                                {}).get('delay') is not None:
                                    row['delay'] = message['tripUpdate'][
                                        'stopTimeUpdate'][-1].get(
                                            'departure', {}).get('delay')
                        continue
                    continue
            collector.append(row)
    df = pd.DataFrame(collector)
    df['humantime'] = df.apply(
        lambda row: datetime.datetime.fromtimestamp(int(row['timestamp'])),
        axis=1)
    df['mode'] = "Rail"
    df['agency'] = 'Long Island Rail Road'
    df['region'] = 'New York'
    return df
    def _print_data(self, message, description=None):
        print()
        if description:
            print(f'[ {description} ]')

        self.pp.pprint(MessageToDict(message, preserving_proto_field_name=True))
Example #29
0
                            }
                        ],
                        "volumes": [
                            {
                                "persistentVolumeClaim": {
                                    "claimName": "pvc-fedlearner-default"
                                },
                                "name": "data"
                            }
                        ]
                    }
                },
                "pair": true,
                "replicas": 1
            }
        }
    }
}
                ''')
        ])

    return workflow


if __name__ == '__main__':
    print(
        json.dumps(
            MessageToDict(make_workflow_template(),
                          preserving_proto_field_name=True,
                          including_default_value_fields=True)))
Example #30
0
File: app.py Project: trtin/jina
 async def result_in_stream(req_iter):
     async for k in servicer.Call(request_iterator=req_iter, context=None):
         yield MessageToDict(k)