Ejemplo n.º 1
0
 def test_get_tweets(self):
     tweet_db = models.Tweet(
         username="******",
         content="Test Content",
         posted_at=datetime.datetime.now(),
         last_edited_at=datetime.datetime.now(),
     )
     for tag in ["tag_1", "tag_2"]:
         tweet_db.tags.append(models.Tag(tag=tag))
     tweets = [tweet_db, tweet_db]
     self.twitter_clone_db.get_tweets.return_value = tweets
     all_tweets = []
     t1 = Timestamp()
     t2 = Timestamp()
     for tweet in tweets:
         if tweet.posted_at is not None:
             t1.FromDatetime(tweet.posted_at)
         if tweet.last_edited_at is not None:
             t2.FromDatetime(tweet.last_edited_at)
         else:
             t2 = t1
         ret_tweet = Tweet(
             id=tweet.id,
             username=tweet.username,
             content=tweet.content,
             posted_at=t1,
             last_edited_at=t2,
         )
         for tag in tweet.tags:
             ret_tweet.tag.append(tag.tag)
         all_tweets.append(ret_tweet)
     helper = Helper(self.twitter_clone_db)
     assert helper.get_tweets(self.tweet) == all_tweets
Ejemplo n.º 2
0
def list_findings_at_time(source_name):
    # [START list_findings_at_a_time]
    from google.cloud import securitycenter
    from google.protobuf.timestamp_pb2 import Timestamp
    from datetime import timedelta, datetime

    # Create a new client.
    client = securitycenter.SecurityCenterClient()

    # source_name is the resource path for a source that has been
    # created previously (you can use list_sources to find a specific one).
    # Its format is:
    # source_name = "organizations/{organization_id}/sources/{source_id}"
    # e.g.:
    # source_name = "organizations/111122222444/sources/1234"
    # You an also use a wild-card "-" for all sources:
    #   source_name = "organizations/111122222444/sources/-"
    five_days_ago = Timestamp()
    five_days_ago.FromDatetime(datetime.now() - timedelta(days=5))
    # [END list_findings_at_a_time]
    i = -1
    five_days_ago.FromDatetime(datetime(2019, 3, 5, 0, 0, 0))
    # [START list_findings_at_a_time]

    finding_result_iterator = client.list_findings(source_name,
                                                   read_time=five_days_ago)
    for i, finding_result in enumerate(finding_result_iterator):
        print("{}: name: {} resource: {}".format(
            i, finding_result.finding.name,
            finding_result.finding.resource_name))
    # [END list_findings_at_a_time]
    return i
Ejemplo n.º 3
0
    def ListenCurrentTime(self, request, context):
        title = 'ListenCurrentTime'
        client_list = self.current_time_subscribe_clients
        q = Queue()
        client_list.append(q)
        _LOGGER.info('%s %d', title, len(client_list))

        current = Timestamp()
        if self.current_datetime is None:
            current.FromDatetime(datetime.now() - timedelta(hours=9))
        else:
            current.FromDatetime(self.current_datetime)

        yield current

        while True:
            try:
                data = q.get(True, 1)
                yield data
            except gevent.queue.Empty as ge:
                if not context.is_active():
                    break

        client_list.remove(q)
        _LOGGER.info('Done %s %d', title, len(client_list))
Ejemplo n.º 4
0
 def get_timestamp(self, dt=None):
     """Makes timestamp out of a datatime (or returns timestamp
     for current time)""" 
     ts = Timestamp()
     if dt:
         ts.FromDatetime(dt)
     else:
         ts.FromDatetime(datetime.utcnow())
     return ts
Ejemplo n.º 5
0
    def to_internal_job(self, data_store):
        # There should never be more than one active lease for a job. If we
        # have more than one for some reason, just take the first one.
        # TODO(SotK): Log some information here if there are multiple active
        # (ie. not completed or cancelled) leases.
        lease = self.active_leases[0].to_protobuf(
        ) if self.active_leases else None
        q_timestamp = Timestamp()
        if self.queued_timestamp:
            q_timestamp.FromDatetime(self.queued_timestamp)
        q_time_duration = Duration()
        if self.queued_time_duration:
            q_time_duration.FromSeconds(self.queued_time_duration)
        ws_timestamp = Timestamp()
        if self.worker_start_timestamp:
            ws_timestamp.FromDatetime(self.worker_start_timestamp)
        wc_timestamp = Timestamp()
        if self.worker_completed_timestamp:
            wc_timestamp.FromDatetime(self.worker_completed_timestamp)

        requirements = {}
        for req in self.platform_requirements:
            values = requirements.setdefault(req.key, set())
            values.add(req.value)

        if self.name in data_store.response_cache:
            result = data_store.response_cache[self.name]
        elif self.result is not None:
            result_digest = string_to_digest(self.result)
            result = data_store.storage.get_message(result_digest,
                                                    ExecuteResponse)
        else:
            result = None

        return job.Job(self.do_not_cache,
                       string_to_digest(self.action_digest),
                       platform_requirements=requirements,
                       priority=self.priority,
                       name=self.name,
                       operations=[op.to_protobuf() for op in self.operations],
                       cancelled_operations=set(op.name
                                                for op in self.operations
                                                if op.cancelled),
                       lease=lease,
                       stage=self.stage,
                       cancelled=self.cancelled,
                       queued_timestamp=q_timestamp,
                       queued_time_duration=q_time_duration,
                       worker_start_timestamp=ws_timestamp,
                       worker_completed_timestamp=wc_timestamp,
                       done=all(op.done for op in self.operations)
                       and len(self.operations) > 0,
                       result=result,
                       worker_name=self.active_leases[0].worker_name
                       if self.active_leases else None,
                       n_tries=self.n_tries)
Ejemplo n.º 6
0
 def setUpTestData(cls):
     cls.serializer_class = BillingRequestSerializer
     org_uuid = uuid.uuid4()
     before = tz.now()
     after = before - tz.timedelta(minutes=1)
     before_message = TimestampMessage()
     after_message = TimestampMessage()
     before_message.FromDatetime(before)
     after_message.FromDatetime(after)
     cls.valid_data = dict(org_uuid=org_uuid, before=before, after=after)
     cls.valid_message = pb2.BillingRequest(org_uuid=str(org_uuid), before=before_message, after=after_message)
Ejemplo n.º 7
0
    def StartStreamToOnlineIngestionJob(
            self, request: StartStreamToOnlineIngestionJobRequest, context):
        """Start job to ingest data from stream into online store"""

        job_submission_count.labels("streaming", request.project,
                                    request.table_name).inc()

        if not self.is_whitelisted(request.project):
            raise ValueError(
                f"Project {request.project} is not whitelisted. Please contact your Feast administrator to whitelist it."
            )

        feature_table = self.client.feature_store.get_feature_table(
            request.table_name, request.project)

        if self.client.config.getboolean(opt.JOB_SERVICE_ENABLE_CONTROL_LOOP):
            # If the control loop is enabled, return existing stream ingestion job id instead of starting a new one
            params = get_stream_to_online_ingestion_params(
                self.client, request.project, feature_table, [])
            job_hash = params.get_job_hash()
            for job in list_jobs(include_terminated=True, client=self.client):
                if isinstance(
                        job,
                        StreamIngestionJob) and job.get_hash() == job_hash:
                    job_start_timestamp = Timestamp()
                    job_start_timestamp.FromDatetime(job.get_start_time())
                    return StartStreamToOnlineIngestionJobResponse(
                        id=job.get_id(),
                        job_start_time=job_start_timestamp,
                        table_name=job.get_feature_table(),
                        log_uri=job.get_log_uri(),  # type: ignore
                    )
            raise RuntimeError(
                "Feast Job Service has control loop enabled, "
                "but couldn't find the existing stream ingestion job for the given FeatureTable"
            )

        # TODO: add extra_jars to request
        job = start_stream_to_online_ingestion(
            client=self.client,
            project=request.project,
            feature_table=feature_table,
            extra_jars=[],
        )

        job_start_timestamp = Timestamp()
        job_start_timestamp.FromDatetime(job.get_start_time())
        return StartStreamToOnlineIngestionJobResponse(
            id=job.get_id(),
            job_start_time=job_start_timestamp,
            table_name=request.table_name,
            log_uri=job.get_log_uri(),  # type: ignore
        )
Ejemplo n.º 8
0
def prepare_start(event):
    tmp_date = datetime.strptime(from_ts.get(), '%Y-%m-%d %H:%M')
    print(str(tmp_date))
    ts1 = Timestamp()
    ts1.FromDatetime(tmp_date)
    print(str(ts1))
    tmp_date1 = datetime.strptime(to_ts.get(), '%Y-%m-%d %H:%M')
    print(str(tmp_date1))
    ts2 = Timestamp()
    ts2.FromDatetime(tmp_date1)
    print(str(ts2))
    deveui = eui.get()
    print("cli.start")
    cli.start(ts1, ts2, deveui)
Ejemplo n.º 9
0
    def create_message(cls, org_uuid=None, before=None, after=None):
        if not org_uuid:
            org_uuid = uuid.uuid4()
        if not before:
            before = tz.now()
        if not after:
            after = tz.now() - tz.timedelta(minutes=1)

        before_message = TimestampMessage()
        before_message.FromDatetime(before)
        after_message = TimestampMessage()
        after_message.FromDatetime(after)

        return pb2.BillingRequest(org_uuid=str(org_uuid), before=before_message, after=after_message)
Ejemplo n.º 10
0
    async def send(self, product_id, app_id, space_id, payload, msg_type, from_id, job_filter, level, message,
                   expiration_at_hours=48, additional_params=None, schedule_at=None, request_id=None):
        """

        :param product_id:
        :param app_id:
        :param space_id:
        :param payload:
        :param msg_type: im(保留), im_group (保留), feed ,system
        :param from_id:
        :param job_filter:
        :param level:
        :param message:
        :param expiration_at:
        :param schedule_at:
        :param request_id:
        :return:
        """
        msg_object = pbpush_pb2.MessageObject(msg_type=msg_type, message_body=message, from_id=from_id,
                                              space_id=space_id)
        expiration_at_stamp = Timestamp()
        expiration_at_stamp.FromDatetime(datetime.datetime.now() + datetime.timedelta(hours=expiration_at_hours))
        msg = pbpush_pb2.PushJob(product_id=product_id, app_id=app_id, job_filter=job_filter, level=level,
                                 payload=payload,schedule_at=schedule_at, expiration_at=expiration_at_stamp,
                                 msg_object=msg_object, additional_params=additional_params)

        push_job = await MServer().call("push-server", "Send", msg, request_id)

        if push_job:
            return push_job.job_id
Ejemplo n.º 11
0
    async def get_birth_record(self, space_id, creator_id, account_id,
                               record_at, request_id):
        """

        :param space_id:
        :param creator_id:
        :param account_id:
        :param request_id:
        :return:
        """
        try:
            date = Timestamp()
            date.FromDatetime(
                datetime.datetime.strptime(
                    record_at.strftime("%Y-%m-%d %H:%M:%S"),
                    "%Y-%m-%d %H:%M:%S"))
            msg = pbrecord_pb2.GetBirthRecordRequest(space_id=space_id,
                                                     creator_id=creator_id,
                                                     account_id=account_id,
                                                     record_at=date)
            result = await MServer().call("record-server", "GetBirthRecord",
                                          msg, request_id)

            return result

        except grpc.RpcError as e:
            # except grpc._channel._Rendezvous as e:
            msg = "update record. code: {}, details:{}".format(
                e.code(), e.details())
            logging.warning(msg)

            return None
Ejemplo n.º 12
0
    def handle_bidask_tick(self, code, data_arr):
        if len(data_arr) != 1:
            return

        if '_BA' in code:
            code = code[:code.index('_')]

        data = data_arr[0]
        tick_date = Timestamp()
        tick_date.FromDatetime(data['date'])
        bidask = stock_provider_pb2.CybosBidAskTickData(
            tick_date=tick_date,
            code=code,
            time=data['time'],
            volume=data['volume'],
            bid_prices=data['bid_prices'],
            ask_prices=data['ask_prices'],
            bid_remains=data['bid_remains'],
            ask_remains=data['ask_remains'],
            total_ask_remain=data['total_ask_remain'],
            total_bid_remain=data['total_bid_remain'],
            out_time_total_ask_remain=data['uni_ask_remain'],
            out_time_total_bid_remain=data['uni_bid_remain'])

        for q in self.bidask_subscribe_clients:
            q.put_nowait(bidask)
Ejemplo n.º 13
0
 def protobuf_timestamp(x):
     dt = pd.to_datetime(x)
     if dt.tzinfo:
         dt = dt.tz_convert('utc').tz_localize(None)
     ts = Timestamp()
     ts.FromDatetime(dt.to_pydatetime())
     return ts
Ejemplo n.º 14
0
    def handle_stock_tick(self, code, data_arr):
        if len(data_arr) != 1:
            return

        data = data_arr[0]
        tick_date = Timestamp()
        tick_date.FromDatetime(data['date'] - timedelta(hours=9))

        tick_data = stock_provider_pb2.CybosTickData(
            tick_date=tick_date,
            code=code,
            company_name=data['1'],
            yesterday_diff=data['2'],
            time=data['3'],
            start_price=int(data['4']),
            highest_price=int(data['5']),
            lowest_price=int(data['6']),
            ask_price=int(data['7']),
            bid_price=int(data['8']),
            cum_volume=data['9'],
            cum_amount=data['10'],
            current_price=int(data['13']),
            buy_or_sell=(data['14'] == ord('1')),
            cum_sell_volume_by_price=data['15'],
            cum_buy_volume_by_price=data['16'],
            volume=data['17'],
            time_with_sec=data['18'],
            market_type_exp=data['19'],
            market_type=data['20'],
            out_time_volume=data['21'],
            cum_sell_volume=data['27'],
            cum_buy_volume=data['28'],
            is_kospi=preload.is_kospi(code))
        for q in self.stock_subscribe_clients:
            q.put_nowait(tick_data)
Ejemplo n.º 15
0
def list_assets_with_filters_and_read_time(organization_id):
    """Demonstrate listing assets with a filter."""
    i = 0
    # [START demo_list_assets_with_filter_and_time]
    from datetime import datetime, timedelta

    from google.protobuf.timestamp_pb2 import Timestamp

    from google.cloud import securitycenter

    client = securitycenter.SecurityCenterClient()

    # organization_id is the numeric ID of the organization.
    # organization_id = "1234567777"
    org_name = "organizations/{org_id}".format(org_id=organization_id)

    project_filter = ("security_center_properties.resource_type=" +
                      '"google.cloud.resourcemanager.Project"')

    # Lists assets as of yesterday.
    read_time = datetime.utcnow() - timedelta(days=1)
    timestamp_proto = Timestamp()
    timestamp_proto.FromDatetime(read_time)

    # Call the API and print results.
    asset_iterator = client.list_assets(org_name,
                                        filter_=project_filter,
                                        read_time=timestamp_proto)
    for i, asset_result in enumerate(asset_iterator):
        print(i, asset_result)
    # [END demo_list_assets_with_filter_and_time]
    return i
Ejemplo n.º 16
0
    def setUpTestData(cls):
        cls.serializer_class = ActiveContactDetailSerializer
        contact_uuid = uuid.uuid4()
        contact_name = "Joe"
        msg_uuid = uuid.uuid4()
        msg_text = "Hi, Joe!"

        msg_sent_on = tz.now()
        ts = TimestampMessage()
        ts.FromDatetime(msg_sent_on)
        msg_direction = "O"
        channel_uuid = uuid.uuid4()
        channel_name = "Test Channel"
        cls.data = dict(
            uuid=contact_uuid,
            name=contact_name,
            msg__uuid=msg_uuid,
            msg__text=msg_text,
            msg__sent_on=msg_sent_on,
            msg__direction=msg_direction,
            channel__uuid=channel_uuid,
            channel__name=channel_name,
        )
        cls.message = pb2.ActiveContactDetail(
            uuid=str(contact_uuid),
            name=contact_name,
            msg=pb2.Msg(
                uuid=str(msg_uuid),
                text=msg_text,
                sent_on=ts,
                direction=pb2.OUTPUT,
            ),
            channel=pb2.Channel(uuid=str(channel_uuid), name=channel_name),
        )
Ejemplo n.º 17
0
def tick_to_grpc(tick):
    tick_date = Timestamp()
    tick_date.FromDatetime(tick['date'] - timedelta(hours=9))
    code = tick['code']

    tick_data = stock_provider_pb2.CybosTickData(
        tick_date=tick_date,
        code=code,
        company_name=tick['1'],
        yesterday_diff=tick['2'],
        time=tick['3'],
        start_price=int(tick['4']),
        highest_price=int(tick['5']),
        lowest_price=int(tick['6']),
        ask_price=int(tick['7']),
        bid_price=int(tick['8']),
        cum_volume=tick['9'],
        cum_amount=tick['10'],
        current_price=int(tick['13']),
        buy_or_sell=(tick['14'] == ord('1')),
        cum_sell_volume_by_price=tick['15'],
        cum_buy_volume_by_price=tick['16'],
        volume=tick['17'],
        time_with_sec=tick['18'],
        market_type_exp=tick['19'],
        market_type=tick['20'],
        out_time_volume=tick['21'],
        cum_sell_volume=tick['27'],
        cum_buy_volume=tick['28'])
    # let handle is_kospi in stock_service
    return tick_data
Ejemplo n.º 18
0
 def json_date_timestamp_to_proto(json_date_timestamp):
     """Parse String -Timestamp Format found in Twitter json"""
     date_time_record = dt.datetime.fromtimestamp(json_date_timestamp)
     protobuf_timestamp_record = Timestamp()
     # Convert to ProtoBuf Timestamp Recommendation
     protobuf_timestamp_record.FromDatetime(date_time_record)
     return protobuf_timestamp_record
Ejemplo n.º 19
0
    def handle_subject_tick(self, code, data_arr):
        if len(data_arr) != 1:
            return

        if '_' in code:
            code = code[:code.index('_')]

        data = data_arr[0]

        tick_date = Timestamp()
        tick_date.FromDatetime(data['date'] - timedelta(hours=9))
        tick_data = stock_provider_pb2.CybosSubjectTickData(
            tick_date=tick_date,
            time=data['0'],
            name=data['1'],
            code=code,
            company_name=data['3'],
            buy_or_sell=(data['4'] == ord('2')),
            volume=data['5'],
            total_volume=data['6'],
            foreigner_total_volume=data['8'])

        for q in self.subject_subscribe_clients:
            q.put_nowait(tick_data)
        gevent.sleep(SLEEP_DURATION)
Ejemplo n.º 20
0
 def parse_timestamp_string_to_protobuf(timestamp_string):
     """Converts timestamp string to protobuf object"""
     # Parse from RFC 3339 date string to Timestamp.
     time_date = dt.datetime.fromtimestamp(int(timestamp_string))
     protobuf_timestamp_record = Timestamp()
     protobuf_timestamp_record.FromDatetime(time_date)
     return protobuf_timestamp_record
Ejemplo n.º 21
0
def group_findings_at_time(source_name):
    """Demonstrates grouping all findings across an organization as of
    a specific time."""
    i = -1
    # [START group_findings_at_time]
    from datetime import datetime, timedelta
    from google.cloud import securitycenter
    from google.protobuf.timestamp_pb2 import Timestamp

    # Create a client.
    client = securitycenter.SecurityCenterClient()

    # source_name is the resource path for a source that has been
    # created previously (you can use list_sources to find a specific one).
    # Its format is:
    # source_name = "organizations/{organization_id}/sources/{source_id}"
    # e.g.:
    # source_name = "organizations/111122222444/sources/1234"

    # Group findings as of yesterday.
    read_time = datetime.utcnow() - timedelta(days=1)
    timestamp_proto = Timestamp()
    timestamp_proto.FromDatetime(read_time)

    group_result_iterator = client.group_findings(source_name,
                                                  group_by="category",
                                                  read_time=timestamp_proto)
    for i, group_result in enumerate(group_result_iterator):
        print((i + 1), group_result)
    # [END group_filtered_findings_at_time]
    return i
Ejemplo n.º 22
0
def search_change_history_events(account_id: str, property_id: str):
    """Lists the change history events for the Google Analytics 4 property
  within the specified date range."""
    client = AnalyticsAdminServiceClient()
    # Create a timestamp object and subtract 7 days from the current date/time.
    earliest_change_time = Timestamp()
    earliest_change_time.FromDatetime(datetime.now() - timedelta(days=7))

    results = client.search_change_history_events(
        SearchChangeHistoryEventsRequest(
            account=f"accounts/{account_id}",
            property=f"properties/{property_id}",
            action=["CREATED", "UPDATED"],
            earliest_change_time=earliest_change_time,
        ))

    print("Result:")
    for event in results:
        print(f"Event ID: {event.id}")
        print(f"Change time: {event.change_time}")
        print(f"Actor type: {ActorType(event.actor_type).name}")
        print(f"User actor e-mail: {event.user_actor_email}")
        print(f"Changes filtered: {event.changes_filtered}")
        for change in event.changes:
            print(" Change details")
            print(f"  Resource name: {change.resource}")
            print(f"  Action: {ActionType(change.action).name}")
            print("  Resource before change: ")
            print_resource(change.resource_before_change)
            print("  Resource after change: ")
            print_resource(change.resource_after_change)
        print()
Ejemplo n.º 23
0
    def GetHistoricalFeatures(self, request: GetHistoricalFeaturesRequest,
                              context):
        """Produce a training dataset, return a job id that will provide a file reference"""

        if not self.is_whitelisted(request.project):
            raise ValueError(
                f"Project {request.project} is not whitelisted. Please contact your Feast administrator to whitelist it."
            )

        job = start_historical_feature_retrieval_job(
            client=self.client,
            project=request.project,
            entity_source=DataSource.from_proto(request.entity_source),
            feature_tables=self.client._get_feature_tables_from_feature_refs(
                list(request.feature_refs), request.project),
            output_format=request.output_format,
            output_path=request.output_location,
        )

        output_file_uri = job.get_output_file_uri(block=False)

        job_start_timestamp = Timestamp()
        job_start_timestamp.FromDatetime(job.get_start_time())

        return GetHistoricalFeaturesResponse(
            id=job.get_id(),
            output_file_uri=output_file_uri,
            job_start_time=job_start_timestamp,
        )
Ejemplo n.º 24
0
    def StartOfflineToOnlineIngestionJob(
            self, request: StartOfflineToOnlineIngestionJobRequest, context):
        """Start job to ingest data from offline store into online store"""

        if not self.is_whitelisted(request.project):
            raise ValueError(
                f"Project {request.project} is not whitelisted. Please contact your Feast administrator to whitelist it."
            )

        feature_table = self.client.feature_store.get_feature_table(
            request.table_name, request.project)
        job = start_offline_to_online_ingestion(
            client=self.client,
            project=request.project,
            feature_table=feature_table,
            start=request.start_date.ToDatetime(),
            end=request.end_date.ToDatetime(),
        )

        job_start_timestamp = Timestamp()
        job_start_timestamp.FromDatetime(job.get_start_time())

        return StartOfflineToOnlineIngestionJobResponse(
            id=job.get_id(),
            job_start_time=job_start_timestamp,
            table_name=request.table_name,
            log_uri=job.get_log_uri(),  # type: ignore
        )
Ejemplo n.º 25
0
    async def get_count_daily(self, space_id, account_id, filetype, date,
                              request_id):
        """
        
        :param space_id: 
        :param type: 
        :param date: 
        :return: 
        """
        try:
            date_stamp = Timestamp()
            date_stamp.FromDatetime(
                datetime.datetime.strptime(date, '%Y-%m-%d'))
            msg = pbfeed_pb2.GetCountDailyRequest(space_id=space_id,
                                                  account_id=account_id,
                                                  filetype=filetype,
                                                  date=date_stamp)
            pbcount_daily = await MServer().call("feed-server",
                                                 "GetCountDaily", msg,
                                                 request_id)
            return pbcount_daily

        except grpc.RpcError as e:
            msg = "code: {}, details:{}".format(e.code(), e.details())
            logging.warning(msg)

        return None
Ejemplo n.º 26
0
def test_list_assets_with_filters_and_read_time():
    """Demonstrate listing assets with a filter."""
    from datetime import datetime, timedelta
    from google.cloud import securitycenter_v1beta1 as securitycenter
    from google.protobuf.timestamp_pb2 import Timestamp

    # [START demo_list_assets_with_filter_and_time]
    client = securitycenter.SecurityCenterClient()

    # ORGANIZATION_ID is the numeric ID of the organization (e.g. 123213123121)
    org_name = "organizations/{org_id}".format(org_id=ORGANIZATION_ID)

    project_filter = ("security_center_properties.resource_type=" +
                      '"google.cloud.resourcemanager.Project"')

    # Lists assets as of yesterday.
    read_time = datetime.utcnow() - timedelta(days=1)
    timestamp_proto = Timestamp()
    timestamp_proto.FromDatetime(read_time)

    # Call the API and print results.
    asset_iterator = client.list_assets(org_name,
                                        filter_=project_filter,
                                        read_time=timestamp_proto)
    for i, asset_result in enumerate(asset_iterator):
        print(i, asset_result)
    # [END demo_list_assets_with_filter_and_time]
    assert i > 0
Ejemplo n.º 27
0
def datetime_to_timestamp(_datetime):
    # _datetime = datetime.datetime.now() - datetime.timedelta(days=180)
    if _datetime is None:
        return None
    else:
        timestamp = Timestamp()
        timestamp.FromDatetime(_datetime)
        return timestamp
Ejemplo n.º 28
0
 def json_date_string_to_proto(json_date_string: str):
     """Parse String -Date Format found in Twitter json"""
     date_time_record = dt.datetime.strptime(json_date_string,
                                             '%a %b %d %H:%M:%S +0000 %Y')
     protobuf_timestamp_record = Timestamp()
     # Convert to ProtoBuf Timestamp Recommendation
     protobuf_timestamp_record.FromDatetime(date_time_record)
     return protobuf_timestamp_record
Ejemplo n.º 29
0
 def get_tweets(self, user_id, offset, limit, created_at):
     time = Timestamp()
     time.FromDatetime(created_at)
     request = GetTweetsRequest(user_id=user_id,
                                offset=offset,
                                limit=limit,
                                created_at=time)
     return self.service.GetTweets(request)
Ejemplo n.º 30
0
 def response_messages():
     for record in raw_data:
         # Convert the datetime format into timestamp for transmission
         timestamp = Timestamp()
         timestamp.FromDatetime(record[0])
         response = meterusage_pb2.AllRecordsResponse(
             time=timestamp, meterusage=record[1])
         # print(response)
         yield response