def test_total_initially_correct(self) -> None:
     """Test total method."""
     drawer: CashDrawer = CashDrawer()
     total: float = 0.0
     for denom in CashDenomination:
         total += denom.amount
     assert_that(total, is_(187.91))
     assert_that(drawer.get_total(), is_(10 * total))
 def test_must_open_drawer_to_modify(self) -> None:
     """Test must open drawer to modify."""
     drawer: CashDrawer = CashDrawer()
     with pytest.raises(RuntimeError) as e:
         drawer.add_count(CashDenomination.PENNY, 1)
     assert_that(str(e.value), is_("Cash drawer must be open to modify."))
     with pytest.raises(RuntimeError) as e:
         drawer.remove_count(CashDenomination.PENNY, 1)
     assert_that(str(e.value), is_("Cash drawer must be open to modify."))
示例#3
0
 def test_ge(self) -> None:
     column1 = Column[int]("test", [1, 2, 3])
     column2 = Column[int]("test", [4, 2, 5])
     assert_that(
         DataFrame({"test": [True, True, True]}).equals(column1 >= column1), is_(True)
     )
     assert_that(
         DataFrame({"test": [False, True, False]}).equals(column1 >= column2), is_(True)
     )
     assert_that(DataFrame({"test": [False, False, True]}).equals(column1 >= 3), is_(True))
 def test_must_close_drawer_to_count(self) -> None:
     """Test must close drawer to count."""
     drawer: CashDrawer = CashDrawer()
     drawer.open(0.0)
     with pytest.raises(RuntimeError) as e:
         drawer.get_count(CashDenomination.PENNY)
     assert_that(str(e.value), is_("Cash drawer must be closed to count."))
     with pytest.raises(RuntimeError) as e:
         drawer.get_total()
     assert_that(str(e.value), is_("Cash drawer must be closed to count."))
 def test_remove_count_negative(self) -> None:
     """Remove Count must not be negative."""
     drawer: CashDrawer = CashDrawer()
     drawer.open(0.0)
     with pytest.raises(ValueError) as e:
         drawer.remove_count(CashDenomination.PENNY, -1)
     assert_that(str(e.value), is_("Count must not be negative."))
示例#6
0
    def test_records_traverse_transform_with_mocks(self):
        destination = 'project1:dataset1.table1'

        job_reference = bigquery_api.JobReference()
        job_reference.projectId = 'project1'
        job_reference.jobId = 'job_name1'
        result_job = bigquery_api.Job()
        result_job.jobReference = job_reference

        mock_job = mock.Mock()
        mock_job.status.state = 'DONE'
        mock_job.status.errorResult = None
        mock_job.jobReference = job_reference

        bq_client = mock.Mock()
        bq_client.jobs.Get.return_value = mock_job

        bq_client.jobs.Insert.return_value = result_job

        transform = bqfl.BigQueryBatchFileLoads(
            destination,
            custom_gcs_temp_location=self._new_tempdir(),
            test_client=bq_client,
            validate=False,
            coder=CustomRowCoder())

        # Need to test this with the DirectRunner to avoid serializing mocks
        with TestPipeline('DirectRunner') as p:
            outputs = p | beam.Create(_ELEMENTS) | transform

            dest_files = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_FILE_PAIRS]
            dest_job = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_JOBID_PAIRS]

            jobs = dest_job | "GetJobs" >> beam.Map(lambda x: x[1])

            files = dest_files | "GetFiles" >> beam.Map(lambda x: x[1])
            destinations = (
                dest_files
                | "GetDests" >>
                beam.Map(lambda x:
                         (bigquery_tools.get_hashable_destination(x[0]), x[1]))
                | "GetUniques" >> beam.combiners.Count.PerKey()
                | "GetFinalDests" >> beam.Keys())

            # All files exist
            _ = (files | beam.Map(
                lambda x: hamcrest_assert(os.path.exists(x), is_(True))))

            # One file per destination
            assert_that(files | beam.combiners.Count.Globally(),
                        equal_to([1]),
                        label='CountFiles')

            assert_that(destinations,
                        equal_to([destination]),
                        label='CheckDestinations')

            assert_that(jobs, equal_to([job_reference]), label='CheckJobs')
示例#7
0
        def check_many_files(output_pcs):
            dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]
            spilled_records_pc = output_pcs[
                bqfl.WriteRecordsToFile.UNWRITTEN_RECORD_TAG]

            spilled_records_count = (spilled_records_pc
                                     | beam.combiners.Count.Globally())
            assert_that(spilled_records_count,
                        equal_to([3]),
                        label='spilled count')

            files_per_dest = (dest_file_pc
                              | beam.Map(lambda x: x).with_output_types(
                                  beam.typehints.KV[str, str])
                              | beam.combiners.Count.PerKey())
            files_per_dest = (
                files_per_dest
                | "GetDests" >> beam.Map(lambda x: (
                    bigquery_tools.get_hashable_destination(x[0]), x[1])))

            # Only table1 and table3 get files. table2 records get spilled.
            assert_that(files_per_dest,
                        equal_to([('project1:dataset1.table1', 1),
                                  ('project1:dataset1.table3', 1)]),
                        label='file count')

            # Check that the files exist
            _ = dest_file_pc | beam.Map(lambda x: x[1]) | beam.Map(
                lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
    def check_many_files(output_pcs):
      dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]
      spilled_records_pc = output_pcs[
          bqfl.WriteRecordsToFile.UNWRITTEN_RECORD_TAG]

      spilled_records_count = (spilled_records_pc |
                               beam.combiners.Count.Globally())
      assert_that(spilled_records_count, equal_to([3]), label='spilled count')

      files_per_dest = (dest_file_pc
                        | beam.Map(lambda x: x).with_output_types(
                            beam.typehints.KV[str, str])
                        | beam.combiners.Count.PerKey())
      files_per_dest = (
          files_per_dest
          | "GetDests" >> beam.Map(
              lambda x: (bigquery_tools.get_hashable_destination(x[0]),
                         x[1])))

      # Only table1 and table3 get files. table2 records get spilled.
      assert_that(files_per_dest,
                  equal_to([('project1:dataset1.table1', 1),
                            ('project1:dataset1.table3', 1)]),
                  label='file count')

      # Check that the files exist
      _ = dest_file_pc | beam.Map(lambda x: x[1]) | beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
  def test_records_traverse_transform_with_mocks(self):
    destination = 'project1:dataset1.table1'

    job_reference = bigquery_api.JobReference()
    job_reference.projectId = 'project1'
    job_reference.jobId = 'job_name1'
    result_job = bigquery_api.Job()
    result_job.jobReference = job_reference

    mock_job = mock.Mock()
    mock_job.status.state = 'DONE'
    mock_job.status.errorResult = None
    mock_job.jobReference = job_reference

    bq_client = mock.Mock()
    bq_client.jobs.Get.return_value = mock_job

    bq_client.jobs.Insert.return_value = result_job

    transform = bqfl.BigQueryBatchFileLoads(
        destination,
        custom_gcs_temp_location=self._new_tempdir(),
        test_client=bq_client,
        validate=False)

    # Need to test this with the DirectRunner to avoid serializing mocks
    with TestPipeline('DirectRunner') as p:
      outputs = p | beam.Create(_ELEMENTS) | transform

      dest_files = outputs[bqfl.BigQueryBatchFileLoads.DESTINATION_FILE_PAIRS]
      dest_job = outputs[bqfl.BigQueryBatchFileLoads.DESTINATION_JOBID_PAIRS]

      jobs = dest_job | "GetJobs" >> beam.Map(lambda x: x[1])

      files = dest_files | "GetFiles" >> beam.Map(lambda x: x[1])
      destinations = (
          dest_files
          | "GetDests" >> beam.Map(
              lambda x: (
                  bigquery_tools.get_hashable_destination(x[0]), x[1]))
          | "GetUniques" >> beam.combiners.Count.PerKey()
          | "GetFinalDests" >>beam.Keys())

      # All files exist
      _ = (files | beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True))))

      # One file per destination
      assert_that(files | beam.combiners.Count.Globally(),
                  equal_to([1]),
                  label='CountFiles')

      assert_that(destinations,
                  equal_to([destination]),
                  label='CheckDestinations')

      assert_that(jobs,
                  equal_to([job_reference]), label='CheckJobs')
 def test_remove_too_many_throws_exception(self) -> None:
     """Removing too many should throw exception."""
     drawer: CashDrawer = CashDrawer()
     drawer.open(0.0)
     for denom in CashDenomination:
         with pytest.raises(ValueError) as e:
             drawer.remove_count(denom, 11)
         assert_that(str(e.value),
                     is_("Cannot remove more than are present."))
 def test_cash_amount_changed_wrong_throws(self) -> None:
     """Cash amount changed will throw exception when incorrect."""
     drawer: CashDrawer = CashDrawer()
     drawer.open(187.91)
     for denom in CashDenomination:
         drawer.add_count(denom, 1)
     drawer.add_count(CashDenomination.PENNY, 1)
     with pytest.raises(RuntimeError) as e:
         drawer.close()
     assert_that(str(e.value), is_("Cash drawer contents incorrect."))
示例#12
0
    def check_files_created(output_pc):
      files = output_pc | "GetFiles" >> beam.Map(lambda x: x[1])
      file_count = files | "CountFiles" >> beam.combiners.Count.Globally()

      _ = files | "FilesExist" >> beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
      assert_that(file_count, equal_to([3]), label='check file count')

      destinations = output_pc | "GetDests" >> beam.Map(lambda x: x[0])
      assert_that(destinations, equal_to(list(_DISTINCT_DESTINATIONS)),
                  label='check destinations ')
示例#13
0
    def check_files_created(output_pc):
      files = output_pc | "GetFiles" >> beam.Map(lambda x: x[1])
      file_count = files | "CountFiles" >> beam.combiners.Count.Globally()

      _ = files | "FilesExist" >> beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
      assert_that(file_count, equal_to([3]), label='check file count')

      destinations = output_pc | "GetDests" >> beam.Map(lambda x: x[0])
      assert_that(destinations, equal_to(list(_DISTINCT_DESTINATIONS)),
                  label='check destinations ')
示例#14
0
    def check_many_files(output_pcs):
      dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]

      files_per_dest = (dest_file_pc
                        | beam.Map(lambda x: x).with_output_types(
                            beam.typehints.KV[str, str])
                        | beam.combiners.Count.PerKey())
      assert_that(files_per_dest,
                  equal_to([('project1:dataset1.table1', 4),
                            ('project1:dataset1.table2', 2),
                            ('project1:dataset1.table3', 1)]))

      # Check that the files exist
      _ = dest_file_pc | beam.Map(lambda x: x[1]) | beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
示例#15
0
    def check_many_files(output_pcs):
      dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]

      files_per_dest = (dest_file_pc
                        | beam.Map(lambda x: x).with_output_types(
                            beam.typehints.KV[str, str])
                        | beam.combiners.Count.PerKey())
      assert_that(files_per_dest,
                  equal_to([('project1:dataset1.table1', 4),
                            ('project1:dataset1.table2', 2),
                            ('project1:dataset1.table3', 1)]))

      # Check that the files exist
      _ = dest_file_pc | beam.Map(lambda x: x[1]) | beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
示例#16
0
    def check_files_created(output_pcs):
      dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]

      files = dest_file_pc | "GetFiles" >> beam.Map(lambda x: x[1][0])
      file_count = files | "CountFiles" >> combiners.Count.Globally()

      _ = files | "FilesExist" >> beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
      assert_that(file_count, equal_to([3]), label='check file count')

      destinations = (
          dest_file_pc
          | "GetDests" >> beam.Map(
              lambda x: bigquery_tools.get_hashable_destination(x[0])))
      assert_that(destinations, equal_to(list(_DISTINCT_DESTINATIONS)),
                  label='check destinations ')
示例#17
0
def with_(*args, **kwargs):
    """ creates a 'with' matcher for either 'args' or,
        if args is None, for 'kwargs'
    """
    #this is broken!
    for key in kwargs.iterkeys():
        kwargs[key] = with_(kwargs[key])
    [arg_spec, kwarg_spec] = [map(lambda x: is_(x), args) or None, kwargs or None]
    if arg_spec is None:
        return kwarg_spec
    if kwarg_spec is not None:
        return (tuple(arg_spec), kwarg_spec)
    if len(arg_spec) == 1:
        return arg_spec[0]
    else:
        return tuple(arg_spec)
    def check_files_created(output_pcs):
      dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]

      files = dest_file_pc | "GetFiles" >> beam.Map(lambda x: x[1])
      file_count = files | "CountFiles" >> beam.combiners.Count.Globally()

      _ = files | "FilesExist" >> beam.Map(
          lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
      assert_that(file_count, equal_to([3]), label='check file count')

      destinations = (
          dest_file_pc
          | "GetDests" >> beam.Map(
              lambda x: bigquery_tools.get_hashable_destination(x[0])))
      assert_that(destinations, equal_to(list(_DISTINCT_DESTINATIONS)),
                  label='check destinations ')
示例#19
0
        def check_many_files(output_pcs):
            dest_file_pc = output_pcs[bqfl.WriteRecordsToFile.WRITTEN_FILE_TAG]

            files_per_dest = (dest_file_pc
                              | beam.Map(lambda x: x).with_output_types(
                                  beam.typehints.KV[str, Tuple[str, int]])
                              | combiners.Count.PerKey())
            files_per_dest = (
                files_per_dest
                | "GetDests" >> beam.Map(lambda x: (
                    bigquery_tools.get_hashable_destination(x[0]), x[1])))
            assert_that(
                files_per_dest,
                equal_to([('project1:dataset1.table1', 4),
                          ('project1:dataset1.table2', 2),
                          ('project1:dataset1.table3', 1)]))

            # Check that the files exist
            _ = dest_file_pc | beam.Map(lambda x: x[1][0]) | beam.Map(
                lambda x: hamcrest_assert(os.path.exists(x), is_(True)))
示例#20
0
 def test_run_card_approved(self) -> None:
     """Test approved result."""
     reader: CardReader = CardReader()
     with mock.patch('random.randint', lambda x, y: 4):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
     with mock.patch('random.randint', lambda x, y: 5):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
     with mock.patch('random.randint', lambda x, y: 6):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
     with mock.patch('random.randint', lambda x, y: 7):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
     with mock.patch('random.randint', lambda x, y: 8):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
     with mock.patch('random.randint', lambda x, y: 9):
         assert_that(reader.run_card(), is_(CardTransactionResult.APPROVED))
示例#21
0
 def test_twenty_dollar_bill(self) -> None:
     """Test Twenty Dollar Bill."""
     assert_that(str(CashDenomination.TWENTY_DOLLAR_BILL), is_("$20 Bill"))
     assert_that(CashDenomination.TWENTY_DOLLAR_BILL.amount,
                 close_to(20.0, 0.00001))
 def test_open_drawer_amount_negative(self) -> None:
     """Open drawer amount must not be negative."""
     drawer: CashDrawer = CashDrawer()
     with pytest.raises(ValueError) as e:
         drawer.open(-0.01)
     assert_that(str(e.value), is_("Amount must not be negative."))
 def ItShouldRaiseAnExceptionWhenLinkIsNotFound(self):
     try:
         assert_that( "<a href='/rotato'></a>", is_(containingALinkTowards("/potato")))
         raise Exception
     except (AssertionError, ), e:
         pass
 def ItShouldNotRaiseAnExceptionWhenLinkIsFound(self):
     assert_that( "<a href='/potato'></a>", is_(containingALinkTowards("/potato")))
 def ItShouldContainAFormWithTheNewBlogPostUrlAsItsAction(self):
     newBlogPostPage = self.__getNewBlogPostPage()
     assert_that(newBlogPostPage, is_(containining_dom_object("form[action='%s'] " %(reverse("newBlog") ) ) ) )
 def test_constructor_populates_drawer(self) -> None:
     """Test constructor."""
     drawer: CashDrawer = CashDrawer()
     for denom in CashDenomination:
         assert_that(drawer.get_count(denom), is_(10))
示例#27
0
 def test_one_dollar_bill(self) -> None:
     """Test One Dollar Bill."""
     assert_that(str(CashDenomination.ONE_DOLLAR_BILL), is_("$1 Bill"))
     assert_that(CashDenomination.ONE_DOLLAR_BILL.amount,
                 close_to(1.0, 0.00001))
示例#28
0
 def verify(self, name):
     assert_that(self.fulfilled, is_(True),
         "expected %s to be called %s %s times, but was %s" %
         (name, self.checktype(), self.expected_ccount, self.ccount))
示例#29
0
 def test_ten_dollar_bill(self) -> None:
     """Test Ten Dollar Bill."""
     assert_that(str(CashDenomination.TEN_DOLLAR_BILL), is_("$10 Bill"))
     assert_that(CashDenomination.TEN_DOLLAR_BILL.amount,
                 close_to(10.0, 0.00001))
示例#30
0
 def test_approved(self) -> None:
     """Test Approved."""
     assert_that(str(CardTransactionResult.APPROVED), is_("Approved"))
示例#31
0
 def ItShouldContainTheSubmitBlogPostButton(self):
     assert_that(self.__getNewBlogPostTemplateRendered(), is_(containining_dom_object("#id_create_blog_post")))
示例#32
0
 def test_read_error(self) -> None:
     """Test Card Read Error."""
     assert_that(str(CardTransactionResult.READ_ERROR),
                 is_("Card Read Error"))
示例#33
0
 def test_incorrect_pin(self) -> None:
     """Test Approved."""
     assert_that(str(CardTransactionResult.INCORRECT_PIN),
                 is_("Incorrect PIN"))
示例#34
0
 def test_declined(self) -> None:
     """Test Declined."""
     assert_that(str(CardTransactionResult.DECLINED), is_("Declined"))
示例#35
0
 def test_fifty_dollar_bill(self) -> None:
     """Test Fifty Dollar Bill."""
     assert_that(str(CashDenomination.FIFTY_DOLLAR_BILL), is_("$50 Bill"))
     assert_that(CashDenomination.FIFTY_DOLLAR_BILL.amount,
                 close_to(50.0, 0.00001))
示例#36
0
 def test_insufficient_funds(self) -> None:
     """Test Approved."""
     assert_that(str(CardTransactionResult.INSUFFICIENT_FUNDS),
                 is_("Insufficient Funds"))
示例#37
0
 def test_hundred_dollar_bill(self) -> None:
     """Test Hundred Dollar Bill."""
     assert_that(str(CashDenomination.HUNDRED_DOLLAR_BILL),
                 is_("$100 Bill"))
     assert_that(CashDenomination.HUNDRED_DOLLAR_BILL.amount,
                 close_to(100.0, 0.00001))
示例#38
0
 def test_enum_size(self) -> None:
     """Test size of enum."""
     assert_that(len(CardTransactionResult), is_(5))
示例#39
0
 def test_enum_size(self) -> None:
     """Test size of enum."""
     assert_that(len(CashDenomination), is_(12))
示例#40
0
    def test_multiple_partition_files(self):
        destination = 'project1:dataset1.table1'

        job_reference = bigquery_api.JobReference()
        job_reference.projectId = 'project1'
        job_reference.jobId = 'job_name1'
        result_job = mock.Mock()
        result_job.jobReference = job_reference

        mock_job = mock.Mock()
        mock_job.status.state = 'DONE'
        mock_job.status.errorResult = None
        mock_job.jobReference = job_reference

        bq_client = mock.Mock()
        bq_client.jobs.Get.return_value = mock_job

        bq_client.jobs.Insert.return_value = result_job
        bq_client.tables.Delete.return_value = None

        with TestPipeline('DirectRunner') as p:
            outputs = (p
                       | beam.Create(_ELEMENTS)
                       | bqfl.BigQueryBatchFileLoads(
                           destination,
                           custom_gcs_temp_location=self._new_tempdir(),
                           test_client=bq_client,
                           validate=False,
                           coder=CustomRowCoder(),
                           max_file_size=45,
                           max_partition_size=80,
                           max_files_per_partition=2))

            dest_files = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_FILE_PAIRS]
            dest_load_jobs = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_JOBID_PAIRS]
            dest_copy_jobs = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_COPY_JOBID_PAIRS]

            load_jobs = dest_load_jobs | "GetLoadJobs" >> beam.Map(
                lambda x: x[1])
            copy_jobs = dest_copy_jobs | "GetCopyJobs" >> beam.Map(
                lambda x: x[1])

            files = dest_files | "GetFiles" >> beam.Map(lambda x: x[1][0])
            destinations = (
                dest_files
                | "GetDests" >>
                beam.Map(lambda x:
                         (bigquery_tools.get_hashable_destination(x[0]), x[1]))
                | "GetUniques" >> combiners.Count.PerKey()
                | "GetFinalDests" >> beam.Keys())

            # All files exist
            _ = (files | beam.Map(
                lambda x: hamcrest_assert(os.path.exists(x), is_(True))))

            # One file per destination
            assert_that(files | "CountFiles" >> combiners.Count.Globally(),
                        equal_to([6]),
                        label='CheckFileCount')

            assert_that(destinations,
                        equal_to([destination]),
                        label='CheckDestinations')

            assert_that(load_jobs
                        | "CountLoadJobs" >> combiners.Count.Globally(),
                        equal_to([6]),
                        label='CheckLoadJobCount')
            assert_that(copy_jobs
                        | "CountCopyJobs" >> combiners.Count.Globally(),
                        equal_to([6]),
                        label='CheckCopyJobCount')
示例#41
0
    def test_triggering_frequency(self, is_streaming, with_auto_sharding):
        destination = 'project1:dataset1.table1'

        job_reference = bigquery_api.JobReference()
        job_reference.projectId = 'project1'
        job_reference.jobId = 'job_name1'
        result_job = bigquery_api.Job()
        result_job.jobReference = job_reference

        mock_job = mock.Mock()
        mock_job.status.state = 'DONE'
        mock_job.status.errorResult = None
        mock_job.jobReference = job_reference

        bq_client = mock.Mock()
        bq_client.jobs.Get.return_value = mock_job
        bq_client.jobs.Insert.return_value = result_job

        # Insert a fake clock to work with auto-sharding which needs a processing
        # time timer.
        class _FakeClock(object):
            def __init__(self, now=time.time()):
                self._now = now

            def __call__(self):
                return self._now

        start_time = timestamp.Timestamp(0)
        bq_client.test_clock = _FakeClock(now=start_time)

        triggering_frequency = 20 if is_streaming else None
        transform = bqfl.BigQueryBatchFileLoads(
            destination,
            custom_gcs_temp_location=self._new_tempdir(),
            test_client=bq_client,
            validate=False,
            temp_file_format=bigquery_tools.FileFormat.JSON,
            is_streaming_pipeline=is_streaming,
            triggering_frequency=triggering_frequency,
            with_auto_sharding=with_auto_sharding)

        # Need to test this with the DirectRunner to avoid serializing mocks
        with TestPipeline(
                runner='BundleBasedDirectRunner',
                options=StandardOptions(streaming=is_streaming)) as p:
            if is_streaming:
                _SIZE = len(_ELEMENTS)
                fisrt_batch = [
                    TimestampedValue(value, start_time + i + 1)
                    for i, value in enumerate(_ELEMENTS[:_SIZE // 2])
                ]
                second_batch = [
                    TimestampedValue(value, start_time + _SIZE // 2 + i + 1)
                    for i, value in enumerate(_ELEMENTS[_SIZE // 2:])
                ]
                # Advance processing time between batches of input elements to fire the
                # user triggers. Intentionally advance the processing time twice for the
                # auto-sharding case since we need to first fire the timer and then
                # fire the trigger.
                test_stream = (
                    TestStream().advance_watermark_to(start_time).add_elements(
                        fisrt_batch).advance_processing_time(30).
                    advance_processing_time(30).add_elements(second_batch).
                    advance_processing_time(30).advance_processing_time(
                        30).advance_watermark_to_infinity())
                input = p | test_stream
            else:
                input = p | beam.Create(_ELEMENTS)
            outputs = input | transform

            dest_files = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_FILE_PAIRS]
            dest_job = outputs[
                bqfl.BigQueryBatchFileLoads.DESTINATION_JOBID_PAIRS]

            files = dest_files | "GetFiles" >> beam.Map(lambda x: x[1][0])
            destinations = (
                dest_files
                | "GetDests" >>
                beam.Map(lambda x:
                         (bigquery_tools.get_hashable_destination(x[0]), x[1]))
                | "GetUniques" >> combiners.Count.PerKey()
                | "GetFinalDests" >> beam.Keys())
            jobs = dest_job | "GetJobs" >> beam.Map(lambda x: x[1])

            # Check that all files exist.
            _ = (files
                 | beam.Map(
                     lambda x: hamcrest_assert(os.path.exists(x), is_(True))))

            # Expect two load jobs are generated in the streaming case due to the
            # triggering frequency. Grouping is per trigger so we expect two entries
            # in the output as opposed to one.
            file_count = files | combiners.Count.Globally().without_defaults()
            expected_file_count = [1, 1] if is_streaming else [1]
            expected_destinations = [destination, destination
                                     ] if is_streaming else [destination]
            expected_jobs = [job_reference, job_reference
                             ] if is_streaming else [job_reference]
            assert_that(file_count,
                        equal_to(expected_file_count),
                        label='CountFiles')
            assert_that(destinations,
                        equal_to(expected_destinations),
                        label='CheckDestinations')
            assert_that(jobs, equal_to(expected_jobs), label='CheckJobs')
示例#42
0
 def test_absolute(self):
     google = uri("http://www.google.com/segment1/segment2?param1=value1&param2=value2#fragment")
     assert_that(google.absolute, is_("/segment1/segment2?param1=value1&param2=value2#fragment"))
示例#43
0
 def ItShouldContainTheContentInputField(self):
     assert_that(self.__getNewBlogPostTemplateRendered(), is_(containining_dom_object("#id_content")))