コード例 #1
0
ファイル: load.py プロジェクト: tcdahlberg/CumulusCI
    def _store_inserted_ids(self, mapping, job_id, local_ids_for_batch):
        """Get the job results and store inserted SF Ids in a new table"""
        id_table_name = self._reset_id_table(mapping)
        conn = self.session.connection()
        for batch_id, local_ids in local_ids_for_batch.items():
            try:
                results_url = "{}/job/{}/batch/{}/result".format(
                    self.bulk.endpoint, job_id, batch_id
                )
                # Download entire result file to a temporary file first
                # to avoid the server dropping connections
                with download_file(results_url, self.bulk) as f:
                    self.logger.info(
                        "  Downloaded results for batch {}".format(batch_id)
                    )
                    self._store_inserted_ids_for_batch(
                        f, local_ids, id_table_name, conn
                    )
                self.logger.info(
                    "  Updated {} for batch {}".format(id_table_name, batch_id)
                )
            except BulkDataException:
                raise
            except Exception as e:
                raise BulkDataException(
                    "Failed to download results for batch {} ({})".format(
                        batch_id, str(e)
                    )
                )

        self.session.commit()
コード例 #2
0
 def _get_results(self, batch_id, job_id):
     result_ids = self.bulk.get_query_batch_result_ids(batch_id,
                                                       job_id=job_id)
     for result_id in result_ids:
         self.logger.info("Result id: {}".format(result_id))
         uri = "{}/job/{}/batch/{}/result/{}".format(
             self.bulk.endpoint, job_id, batch_id, result_id)
         with download_file(uri, self.bulk) as f:
             self.logger.info("Result {} downloaded".format(result_id))
             yield f
コード例 #3
0
 def _get_results(self, batch_id, job_id):
     result_ids = self.bulk.get_query_batch_result_ids(batch_id, job_id=job_id)
     for result_id in result_ids:
         self.logger.info(f"Result id: {result_id}")
         uri = (
             f"{self.bulk.endpoint}/job/{job_id}/batch/{batch_id}/result/{result_id}"
         )
         with download_file(uri, self.bulk) as f:
             self.logger.info(f"Result {result_id} downloaded")
             yield f
コード例 #4
0
ファイル: load.py プロジェクト: force2b/CumulusCI
    def _process_job_results(self, mapping, job_id, local_ids_for_batch):
        """Get the job results and process the results. If we're raising for
        row-level errors, do so; if we're inserting, store the new Ids."""
        if mapping["action"] == "insert":
            id_table_name = self._reset_id_table(mapping)
            conn = self.session.connection()

        for batch_id, local_ids in local_ids_for_batch.items():
            try:
                results_url = (
                    f"{self.bulk.endpoint}/job/{job_id}/batch/{batch_id}/result"
                )
                # Download entire result file to a temporary file first
                # to avoid the server dropping connections
                with download_file(results_url, self.bulk) as f:
                    self.logger.info(
                        f"  Downloaded results for batch {batch_id}")
                    results_generator = self._generate_results_id_map(
                        f, local_ids)
                    if mapping["action"] == "insert":
                        self._sql_bulk_insert_from_csv(
                            conn,
                            id_table_name,
                            ("id", "sf_id"),
                            IteratorBytesIO(results_generator),
                        )
                        self.logger.info(
                            f"  Updated {id_table_name} for batch {batch_id}")
                    else:
                        for r in results_generator:
                            pass  # Drain generator to validate results

            except BulkDataException:
                raise
            except Exception as e:
                raise BulkDataException(
                    f"Failed to download results for batch {batch_id} ({str(e)})"
                )

        if mapping["action"] == "insert":
            self.session.commit()