예제 #1
0
    def test_relative_datetimes(self):
        mapping = MappingStep(
            sf_object="Account", fields=["Some_Datetime__c"], anchor_date="2020-07-01"
        )

        input_dt = datetime_from_salesforce("2020-07-08T09:37:57.373+0000")
        target = datetime.combine(date.today() + timedelta(days=7), input_dt.time())
        assert (
            adjust_relative_dates(
                mapping,
                ([], [0], date.today()),
                [salesforce_from_datetime(input_dt)],
                DataOperationType.INSERT,
            )
            == [salesforce_from_datetime(target)]
        )

        now = datetime.combine(mapping.anchor_date, datetime.now().time())
        assert (
            adjust_relative_dates(
                mapping,
                ([], [0], date.today()),
                [salesforce_from_datetime(now)],
                DataOperationType.INSERT,
            )
            == [salesforce_from_datetime(datetime.combine(date.today(), now.time()))]
        )

        assert adjust_relative_dates(
            mapping, ([], [0], date.today()), [""], DataOperationType.INSERT
        ) == [""]
예제 #2
0
파일: load.py 프로젝트: xapfrom/CumulusCI
    def _stream_queried_data(self, mapping, local_ids, query):
        """Get data from the local db"""

        statics = self._get_statics(mapping)
        total_rows = 0

        if mapping.anchor_date:
            date_context = mapping.get_relative_date_context(self.org_config)

        for row in query.yield_per(10000):
            total_rows += 1
            # Add static values to row
            pkey = row[0]
            row = list(row[1:]) + statics
            if mapping.anchor_date and (date_context[0] or date_context[1]):
                row = adjust_relative_dates(mapping, date_context, row,
                                            DataOperationType.INSERT)
            if mapping.action is DataOperationType.UPDATE:
                if len(row) > 1 and all([f is None for f in row[1:]]):
                    # Skip update rows that contain no values
                    total_rows -= 1
                    continue

            local_ids.append(pkey)
            yield row

        self.logger.info(
            f"Prepared {total_rows} rows for {mapping['action']} to {mapping['sf_object']}"
        )
예제 #3
0
    def test_relative_dates(self):
        mapping = MappingStep(
            sf_object="Account", fields=["Some_Date__c"], anchor_date="2020-07-01"
        )

        target = date.today() + timedelta(days=7)
        assert adjust_relative_dates(
            mapping, ([0], [], date.today()), ["2020-07-08"], DataOperationType.INSERT
        ) == [target.isoformat()]

        assert adjust_relative_dates(
            mapping, ([0], [], date.today()), ["2020-07-01"], DataOperationType.INSERT
        ) == [date.today().isoformat()]

        assert adjust_relative_dates(
            mapping, ([0], [], date.today()), [""], DataOperationType.INSERT
        ) == [""]
예제 #4
0
    def test_relative_dates__extract(self):
        mapping = MappingStep(
            sf_object="Account", fields=["Some_Date__c"], anchor_date="2020-07-01"
        )

        target = mapping.anchor_date + timedelta(days=7)
        input_date = (date.today() + timedelta(days=7)).isoformat()
        assert (
            adjust_relative_dates(
                mapping,
                ([0], [], date.today()),
                ["001000000000000", input_date],
                DataOperationType.QUERY,
            )
            == ["001000000000000", target.isoformat()]
        )

        assert (
            adjust_relative_dates(
                mapping,
                ([0], [], date.today()),
                ["001000000000000", date.today().isoformat()],
                DataOperationType.QUERY,
            )
            == ["001000000000000", mapping.anchor_date.isoformat()]
        )

        assert (
            adjust_relative_dates(
                mapping,
                ([0], [], date.today()),
                ["001000000000000", ""],
                DataOperationType.QUERY,
            )
            == ["001000000000000", ""]
        )
예제 #5
0
    def test_relative_datetimes_extract(self):
        mapping = MappingStep(
            sf_object="Account", fields=["Some_Datetime__c"], anchor_date="2020-07-01"
        )

        input_dt = datetime.now() + timedelta(days=7)
        target = datetime.combine(
            mapping.anchor_date + timedelta(days=7), input_dt.time()
        )
        assert (
            adjust_relative_dates(
                mapping,
                ([], [0], date.today()),
                ["001000000000000", salesforce_from_datetime(input_dt)],
                DataOperationType.QUERY,
            )
            == ["001000000000000", salesforce_from_datetime(target)]
        )
예제 #6
0
    def _import_results(self, mapping, step):
        """Ingest results from the Bulk API query."""
        conn = self.session.connection()

        # Map SF field names to local db column names
        field_map = mapping.get_complete_field_map(include_id=True)
        columns = [field_map[f]
                   for f in field_map]  # Get values in insertion order.

        record_type = mapping.record_type
        if record_type:
            columns.append("record_type")

        # TODO: log_progress needs to know our batch size, when made configurable.
        record_iterator = log_progress(step.get_results(), self.logger)
        if record_type:
            record_iterator = (record + [record_type]
                               for record in record_iterator)

        # Convert relative dates to stable dates.
        if mapping.anchor_date:
            date_context = mapping.get_relative_date_context(self.org_config)
            if date_context[0] or date_context[1]:
                record_iterator = (adjust_relative_dates(
                    mapping, date_context, record, DataOperationType.QUERY)
                                   for record in record_iterator)

        # Set Name field as blank for Person Account "Account" records.
        if (mapping.sf_object == "Account" and "Name" in field_map
                and self.org_config.is_person_accounts_enabled):
            # Bump indices by one since record's ID is the first column.
            Name_index = columns.index(mapping.fields["Name"])
            IsPersonAccount_index = columns.index(
                mapping.fields["IsPersonAccount"])

            def strip_name_field(record):
                nonlocal Name_index, IsPersonAccount_index
                if record[IsPersonAccount_index].lower() == "true":
                    record[Name_index] = ""
                return record

            record_iterator = (strip_name_field(record)
                               for record in record_iterator)

        if mapping.get_oid_as_pk():
            self._sql_bulk_insert_from_records(
                connection=conn,
                table=mapping.table,
                columns=columns,
                record_iterable=record_iterator,
            )
        else:
            # If using the autogenerated id field, split out the returned records
            # into two separate streams and load into the main table and the sf_id_table
            values, ids = itertools.tee(record_iterator)
            f_values = (row[1:] for row in values)
            f_ids = (row[:1] for row in ids)

            values_chunks = self._sql_bulk_insert_from_records_incremental(
                connection=conn,
                table=mapping.table,
                columns=columns[1:],  # Strip off the Id column
                record_iterable=f_values,
            )
            ids_chunks = self._sql_bulk_insert_from_records_incremental(
                connection=conn,
                table=mapping.get_sf_id_table(),
                columns=["sf_id"],
                record_iterable=f_ids,
            )

            # do the inserts one chunk at a time based on all of the
            # generators nested previously.
            consume(zip(values_chunks, ids_chunks))

        if "RecordTypeId" in mapping.fields:
            self._extract_record_types(mapping.sf_object,
                                       mapping.get_source_record_type_table(),
                                       conn)

        self.session.commit()