コード例 #1
0
    def test_create_bq_dataset(self, mock_dataset, mock_client):
        mock_client.dataset.return_value = 'dataset_ref'
        mock_dataset.return_value = 'dataset_obj'

        utils.create_bq_dataset('project', 'dataset_base_name')
        mock_client.return_value.create_dataset.assert_called_with(
            'dataset_obj')
コード例 #2
0
    def setUp(self):
        self.test_pipeline = TestPipeline(is_integration_test=True)
        self.project = self.test_pipeline.get_option('project')

        # Set up BigQuery environment
        self.dataset_ref = utils.create_bq_dataset(self.project,
                                                   self.OUTPUT_DATASET)
コード例 #3
0
  def setUp(self):
    self.test_pipeline = TestPipeline(is_integration_test=True)
    self.project = self.test_pipeline.get_option('project')

    # Set up BigQuery environment
    self.dataset_ref = utils.create_bq_dataset(self.project,
                                               self.OUTPUT_DATASET)
コード例 #4
0
    def setUp(self):
        self.test_pipeline = TestPipeline(is_integration_test=True)
        self.project = self.test_pipeline.get_option('project')

        # Set up PubSub environment.
        from google.cloud import pubsub
        self.pub_client = pubsub.PublisherClient()
        self.pubsub_setup_client = PubSubSetupClient(project=self.project)

        self.input_topic = self.pubsub_setup_client.create_topic(INPUT_TOPIC)
        self.output_topic = self.pubsub_setup_client.create_topic(OUTPUT_TOPIC)

        self.input_sub = self.pubsub_setup_client.create_subscription(
            self.input_topic, INPUT_SUB)
        self.output_sub = self.pubsub_setup_client.create_subscription(
            self.output_topic, OUTPUT_SUB)

        # Set up BigQuery tables
        self.dataset_ref = utils.create_bq_dataset(self.project,
                                                   OUTPUT_DATASET)
        self.bq_wrapper = BigQueryWrapper()
        table_schema = parse_table_schema_from_json(schemas.get_test_schema())

        def _create_table(table_id, schema):
            return self.bq_wrapper.get_or_create_table(
                project_id=self.project,
                dataset_id=self.dataset_ref.dataset_id,
                table_id=table_id,
                schema=schema,
                create_disposition='CREATE_IF_NEEDED',
                write_disposition='WRITE_APPEND')

        self.table_ref = _create_table(OUTPUT_TABLE, table_schema)
コード例 #5
0
  def setUp(self):
    # Set up PubSub
    self.test_pipeline = TestPipeline(is_integration_test=True)
    self.runner_name = type(self.test_pipeline.runner).__name__
    self.project = self.test_pipeline.get_option('project')
    self.uuid = str(uuid.uuid4())
    from google.cloud import pubsub
    self.pub_client = pubsub.PublisherClient()
    self.input_topic = self.pub_client.create_topic(
        self.pub_client.topic_path(self.project, self.INPUT_TOPIC + self.uuid))
    self.sub_client = pubsub.SubscriberClient()
    self.input_sub = self.sub_client.create_subscription(
        self.sub_client.subscription_path(
            self.project, self.INPUT_SUB + self.uuid),
        self.input_topic.name)

    # Set up BQ
    self.dataset_ref = utils.create_bq_dataset(
        self.project, self.BIG_QUERY_DATASET_ID)
    self.output_table = "%s.output_table" % (self.dataset_ref.dataset_id)
コード例 #6
0
  def setUp(self):
    self.test_pipeline = TestPipeline(is_integration_test=True)
    self.project = self.test_pipeline.get_option('project')
    _unique_id = str(uuid.uuid4())

    # Set up PubSub environment.
    from google.cloud import pubsub
    self.pub_client = pubsub.PublisherClient()
    self.input_topic = self.pub_client.create_topic(
        self.pub_client.topic_path(self.project, self.INPUT_TOPIC + _unique_id))

    self.sub_client = pubsub.SubscriberClient()
    self.input_sub = self.sub_client.create_subscription(
        self.sub_client.subscription_path(
            self.project, self.INPUT_SUB + _unique_id),
        self.input_topic.name)

    # Set up BigQuery environment
    self.dataset_ref = utils.create_bq_dataset(
        self.project, self.OUTPUT_DATASET)

    self._test_timestamp = int(time.time() * 1000)
コード例 #7
0
  def setUp(self):
    self.test_pipeline = TestPipeline(is_integration_test=True)
    self.project = self.test_pipeline.get_option('project')
    _unique_id = str(uuid.uuid4())

    # Set up PubSub environment.
    from google.cloud import pubsub
    self.pub_client = pubsub.PublisherClient()
    self.input_topic = self.pub_client.create_topic(
        self.pub_client.topic_path(self.project, self.INPUT_TOPIC + _unique_id))

    self.sub_client = pubsub.SubscriberClient()
    self.input_sub = self.sub_client.create_subscription(
        self.sub_client.subscription_path(self.project,
                                          self.INPUT_SUB + _unique_id),
        self.input_topic.name)

    # Set up BigQuery environment
    self.dataset_ref = utils.create_bq_dataset(self.project,
                                               self.OUTPUT_DATASET)

    self._test_timestamp = int(time.time() * 1000)