예제 #1
0
    def create_direct_ingest_scheduler_queue_task(
        self,
        region: Region,
        just_finished_job: bool,
        delay_sec: int,
    ):
        schedule_time = datetime.datetime.now() + \
                        datetime.timedelta(seconds=delay_sec)

        schedule_time_sec = datetime_helpers.to_milliseconds(
            schedule_time) // 1000
        schedule_timestamp = timestamp_pb2.Timestamp(seconds=schedule_time_sec)

        task_name = self._build_task_name_for_queue_and_region(
            DIRECT_INGEST_SCHEDULER_QUEUE, region.region_code, None)
        task = tasks.types.Task(
            name=task_name,
            schedule_time=schedule_timestamp,
            app_engine_http_request={
                'relative_uri':
                f'/direct/scheduler?region={region.region_code}&'
                f'just_finished_job={just_finished_job}',
                'body': json.dumps({}).encode()
            })
        self._queue_task(DIRECT_INGEST_SCHEDULER_QUEUE, task)
예제 #2
0
def undelete_table(client, table_id, recovered_table_id):
    # [START bigquery_undelete_table]
    import time

    # TODO(developer): Import the client library.
    # from google.cloud import bigquery

    # TODO(developer): Construct a BigQuery client object.
    # client = bigquery.Client()

    # TODO(developer): Choose a table to recover.
    # table_id = "your-project.your_dataset.your_table"

    # TODO(developer): Choose a new table ID for the recovered table data.
    # recovery_table_id = "your-project.your_dataset.your_table_recovered"

    # TODO(developer): Choose an appropriate snapshot point as epoch
    # milliseconds. For this example, we choose the current time as we're about
    # to delete the table immediately afterwards.
    snapshot_epoch = int(time.time() * 1000)

    # [START_EXCLUDE]
    # Due to very short lifecycle of the table, ensure we're not picking a time
    # prior to the table creation due to time drift between backend and client.
    table = client.get_table(table_id)
    created_epoch = datetime_helpers.to_milliseconds(table.created)
    if created_epoch > snapshot_epoch:
        snapshot_epoch = created_epoch
    # [END_EXCLUDE]

    # "Accidentally" delete the table.
    client.delete_table(table_id)  # API request

    # Construct the restore-from table ID using a snapshot decorator.
    snapshot_table_id = "{}@{}".format(table_id, snapshot_epoch)

    # Construct and run a copy job.
    job = client.copy_table(
        snapshot_table_id,
        recovered_table_id,
        # Location must match that of the source and destination tables.
        location="US",
    )  # API request

    job.result()  # Wait for job to complete.

    print(
        "Copied data from deleted table {} to {}".format(table_id, recovered_table_id)
    )
import datetime
import time

import mock
import pytz
from six.moves import queue
from google.protobuf import timestamp_pb2

from google.api_core import datetime_helpers
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.subscriber import message
from google.cloud.pubsub_v1.subscriber._protocol import requests


RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc)
RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000
PUBLISHED_MICROS = 123456
PUBLISHED = RECEIVED + datetime.timedelta(
    days=1, microseconds=PUBLISHED_MICROS)
PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000


def create_message(data, ack_id='ACKID', **attrs):
    with mock.patch.object(message.Message, 'lease') as lease:
        with mock.patch.object(time, 'time') as time_:
            time_.return_value = RECEIVED_SECONDS
            msg = message.Message(types.PubsubMessage(
                attributes=attrs,
                data=data,
                message_id='message_id',
                publish_time=timestamp_pb2.Timestamp(
예제 #4
0
# limitations under the License.

import datetime
import queue
import time

import mock

from google.api_core import datetime_helpers
from google.cloud.pubsub_v1.subscriber import message
from google.cloud.pubsub_v1.subscriber._protocol import requests
from google.protobuf import timestamp_pb2
from google.pubsub_v1 import types as gapic_types

RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc)
RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000
PUBLISHED_MICROS = 123456
PUBLISHED = RECEIVED + datetime.timedelta(days=1,
                                          microseconds=PUBLISHED_MICROS)
PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000


def create_message(data,
                   ack_id="ACKID",
                   delivery_attempt=0,
                   ordering_key="",
                   **attrs):
    with mock.patch.object(time, "time") as time_:
        time_.return_value = RECEIVED_SECONDS
        gapic_pubsub_message = gapic_types.PubsubMessage(
            attributes=attrs,
예제 #5
0
def test_to_milliseconds():
    dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
    assert datetime_helpers.to_milliseconds(dt) == 1000
예제 #6
0
def test_to_milliseconds():
    dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
    assert datetime_helpers.to_milliseconds(dt) == 1000
예제 #7
0
    def test_purge_docket_and_session(self, mock_sessions, mock_remove, mock_purge):
        scrape_key = ScrapeKey("us_va", constants.ScrapeType.BACKGROUND)
        mock_sessions.return_value = ["us_va_1", "us_va_2"]

        tracker.purge_docket_and_session(scrape_key)

        mock_purge.assert_called_with(scrape_key)
        mock_remove.assert_has_calls([call("us_va_1"), call("us_va_2")])


def get_payload():
    return [{"name": "Jacoby, Mackenzie"}, {"name": "Jacoby, Clementine"}]


PUBLISHED_SECONDS = (
    datetime_helpers.to_milliseconds(datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc))
    // 1000
)


def create_pubsub_message(
    content, ack_id="ACKID", published=PUBLISHED_SECONDS, **attrs
):
    return types.ReceivedMessage(
        message=types.PubsubMessage(
            attributes=attrs,
            data=json.dumps(content).encode(),
            message_id="message_id",
            publish_time=timestamp_pb2.Timestamp(seconds=published),
        ),
        ack_id=ack_id,
예제 #8
0
    def test_purge_docket_and_session(self, mock_sessions, mock_remove,
                                      mock_purge):
        scrape_key = ScrapeKey("us_va", constants.ScrapeType.BACKGROUND)
        mock_sessions.return_value = ['us_va_1', 'us_va_2']

        tracker.purge_docket_and_session(scrape_key)

        mock_purge.assert_called_with(scrape_key)
        mock_remove.assert_has_calls([call('us_va_1'), call('us_va_2')])


def get_payload():
    return [{'name': 'Jacoby, Mackenzie'}, {'name': 'Jacoby, Clementine'}]


PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(
    datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc)) // 1000


def create_pubsub_message(content,
                          ack_id="ACKID",
                          published=PUBLISHED_SECONDS,
                          **attrs):
    return types.ReceivedMessage(
        message=types.PubsubMessage(
            attributes=attrs,
            data=json.dumps(content).encode(),
            message_id="message_id",
            publish_time=timestamp_pb2.Timestamp(seconds=published),
        ),
        ack_id=ack_id,
    )