Пример #1
0
    def test_init_rest(self):
        request = utils.common.FakeRequest(
            args={}, data=simdjson.dumps({"name": "bucket"})
        )
        bucket, projection = gcs.bucket.Bucket.init(request, None)
        assert bucket.metadata.name == "bucket"
        assert projection == "noAcl"

        request = utils.common.FakeRequest(
            args={},
            data=simdjson.dumps(
                {
                    "name": "bucket",
                    "acl": [
                        json_format.MessageToDict(acl)
                        for acl in utils.acl.compute_predefined_bucket_acl(
                            "bucket", "authenticatedRead", None
                        )
                    ],
                }
            ),
        )
        bucket, projection = gcs.bucket.Bucket.init(request, None)
        assert bucket.metadata.name == "bucket"
        assert projection == "full"
        assert list(bucket.metadata.acl) == utils.acl.compute_predefined_bucket_acl(
            "bucket", "authenticatedRead", None
        )
Пример #2
0
def get_neighbors_attr(graph, n, pred=False):
    """Get the neighbors attr of node in graph.

    Parameters
    ----------
    graph:
        the graph to query.
    n: node
        the node to get neighbors.
    report_type:
        the report type of report graph operation,
            types_pb2.SUCC_ATTR_BY_NODE: get the successors attr of node,
            types_pb2.PRED_ATTR_BY_NODE: get the predecessors attr of node,

    Returns
    -------
    attr: tuple
    """
    if graph.graph_type == graph_def_pb2.ARROW_PROPERTY:
        n = graph._convert_to_label_id_tuple(n)
    report_t = types_pb2.PRED_ATTR_BY_NODE if pred else types_pb2.SUCC_ATTR_BY_NODE
    op = dag_utils.report_graph(graph,
                                report_t,
                                node=simdjson.dumps(n).encode("utf-8"))
    archive = op.eval()
    return simdjson.loads(archive.get_bytes())
Пример #3
0
 def insert_test_bucket(self, context):
     bucket_name = os.environ.get(
         "GOOGLE_CLOUD_CPP_STORAGE_TEST_BUCKET_NAME", "bucket")
     if self.buckets.get(bucket_name) is None:
         if context is not None:
             request = storage_pb2.InsertBucketRequest(
                 bucket={"name": bucket_name})
         else:
             request = utils.common.FakeRequest(args={},
                                                data=simdjson.dumps(
                                                    {"name": bucket_name}))
         bucket_test, _ = gcs.bucket.Bucket.init(request, context)
         self.insert_bucket(request, bucket_test, context)
         bucket_test.metadata.metageneration = 4
         bucket_test.metadata.versioning.enabled = True
Пример #4
0
def csek(context, rest_code=400, grpc_code=grpc.StatusCode.INVALID_ARGUMENT):
    msg = "Missing a SHA256 hash of the encryption key, or it is not"
    msg += " base64 encoded, or it does not match the encryption key."
    link = "https://cloud.google.com/storage/docs/encryption#customer-supplied_encryption_keys"
    error_msg = {
        "error": {
            "errors": [{
                "domain": "global",
                "reason": "customerEncryptionKeySha256IsInvalid",
                "message": msg,
                "extendedHelp": link,
            }],
            "code":
            rest_code,
            "message":
            msg,
        }
    }
    generic(simdjson.dumps(error_msg), rest_code, grpc_code, context)
Пример #5
0
def get_node_data(graph, n):
    """Returns the attribute dictionary of node n.

    This is identical to `G[n]`.

    Parameters
    ----------
    n : nodes

    Returns
    -------
    node_dict : dictionary
        The node attribute dictionary.

    Examples
    --------
    >>> G = nx.path_graph(4)  # or DiGraph etc
    >>> G[0]
    {}

    Warning: Assigning to `G[n]` is not permitted.
    But it is safe to assign attributes `G[n]['foo']`

    >>> G[0]['weight'] = 7
    >>> G[0]['weight']
    7

    >>> G = nx.path_graph(4)  # or DiGraph etc
    >>> G.get_node_data(0, 1)
    {}

    """
    if graph.graph_type == graph_def_pb2.ARROW_PROPERTY:
        n = graph._convert_to_label_id_tuple(n)
    op = dag_utils.report_graph(graph,
                                types_pb2.NODE_DATA,
                                node=simdjson.dumps(n).encode("utf-8"))
    archive = op.eval()
    return msgpack.loads(archive.get_bytes(), use_list=False)
Пример #6
0
def writel_simdjson(filepath: str, data):
    with open(filepath, "w") as fp:
        for line in data:
            fp.write(simdjson.dumps(line) + "\n")
Пример #7
0
    def test_patch(self):
        # Updating requires a full metadata so we don't test it here.
        request = storage_pb2.InsertBucketRequest(
            bucket={
                "name": "bucket",
                "labels": {
                    "init": "true",
                    "patch": "false"
                },
                "website": {
                    "not_found_page": "notfound.html"
                },
            })
        bucket, projection = gcs.bucket.Bucket.init(request, "")
        assert bucket.metadata.labels.get("init") == "true"
        assert bucket.metadata.labels.get("patch") == "false"
        assert bucket.metadata.labels.get("method") is None
        assert bucket.metadata.website.main_page_suffix == ""
        assert bucket.metadata.website.not_found_page == "notfound.html"

        request = storage_pb2.PatchBucketRequest(
            bucket="bucket",
            metadata={
                "labels": {
                    "patch": "true",
                    "method": "grpc"
                },
                "website": {
                    "main_page_suffix": "bucket",
                    "not_found_page": "404"
                },
            },
            update_mask={"paths": ["labels", "website.main_page_suffix"]},
        )
        bucket.patch(request, "")
        # GRPC can not update a part of map field.
        assert bucket.metadata.labels.get("init") is None
        assert bucket.metadata.labels.get("patch") == "true"
        assert bucket.metadata.labels.get("method") == "grpc"
        assert bucket.metadata.website.main_page_suffix == "bucket"
        # `update_mask` does not update `website.not_found_page`
        assert bucket.metadata.website.not_found_page == "notfound.html"

        request = utils.common.FakeRequest(
            args={},
            data=simdjson.dumps({
                "name": "new_bucket",
                "labels": {
                    "method": "rest"
                },
                "website": {
                    "notFoundPage": "404.html"
                },
            }),
        )
        bucket.patch(request, None)
        # REST should only update modifiable field.
        assert bucket.metadata.name == "bucket"
        # REST can update a part of map field.
        assert bucket.metadata.labels.get("init") is None
        assert bucket.metadata.labels.get("patch") == "true"
        assert bucket.metadata.labels.get("method") == "rest"
        assert bucket.metadata.website.main_page_suffix == "bucket"
        assert bucket.metadata.website.not_found_page == "404.html"
Пример #8
0
                         username=args.username,
                         password=args.password,
                         verify=False,
                         timeout=5)
    except spi.SocratesConnectError as err:
        print('failed to connect to socrates: ' + str(err))
        sys.exit(1)

    timestamp_format = '%Y-%m-%d %H:%M:%S'

    push_before = datetime.now()
    status, response = s.push_raw_data(
        name='test',
        records=simdjson.dumps([{
            "test_key":
            "integration",
            "timestamp":
            datetime.now().strftime(timestamp_format)
        }]))
    if status is False:
        s.log(level='ERROR',
              app='test',
              procedure='s.push_raw_data',
              detail=response,
              message='failed to push raw data (str)')
        sys.exit(1)
    status, response = s.push_raw_data(
        name='test',
        records=[{
            "test_key": "integration",
            "timestamp": datetime.now().strftime(timestamp_format)
        }])
Пример #9
0
 def jldump(cls, data, f):
     f.write(json.dumps(data, ensure_ascii=False))
     f.write('\n')
Пример #10
0
 def jsondumps(cls, pdict, ensure_ascii=False):
     return json.dumps(pdict, ensure_ascii=ensure_ascii)
Пример #11
0
    def get(self,
            metric="tpu_mxu",
            node_id=None,
            interval=None,
            filters=None,
            raw=False,
            when=None,
            full_names=False):
        if when is None:
            when = utc()

        if '/' not in metric:
            metric = metrics[metric]

        if interval is None:
            now = time.time()
            seconds = int(now)
            nanos = int((now - seconds) * 10**9)
            interval = monitoring_v3.TimeInterval({
                "end_time": {
                    "seconds": seconds,
                    "nanos": nanos
                },
                "start_time": {
                    "seconds": (seconds - 1200),
                    "nanos": nanos
                },
            })

        if filters is None:
            filters = []
        filters = filters[:]
        if node_id is not None:
            filters += [['resource.labels.node_id', node_id]]
        filters += [['metric.type', metric]]
        filters = ' AND '.join(
            ['{} = {}'.format(k, json.dumps(v)) for k, v in filters])

        results = self.client.list_time_series(
            request={
                "name": "projects/{project_id}".format(
                    project_id=self.project_id),
                "filter": filters,
                "interval": interval,
                "view":
                monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL,
            })
        if raw:
            return results
        points = collections.defaultdict(lambda: [])
        for timeSeries in results:
            key = get_time_series_label(timeSeries, short=not full_names)
        for point in timeSeries.points:
            point_utc = point.interval.start_time.timestamp()
            seconds_ago = int(when - point_utc)
            if timeSeries.value_type == 2:  # what's the correct way to get INT64 here?
                value = point.value.int64_value
            else:
                value = point.value.double_value
            points[key].append([seconds_ago, value])
        points = dict(points)
        return points