def _populate_schema_resource(self, resource): if not self._schema: resource['schema'] = None else: resource['schema'] = { 'fields': _build_schema_resource(self._schema), }
def _populate_schema_resource(self, resource): if not self._schema: resource['schema'] = None else: resource['schema'] = { 'fields': _build_schema_resource(self._schema), }
def schema(self, value): if value is None: self._properties["schema"] = None elif not all(isinstance(field, SchemaField) for field in value): raise ValueError("Schema items must be fields") else: self._properties["schema"] = {"fields": _build_schema_resource(value)}
def _call_fut(self, resource): from google.cloud.bigquery.schema import _build_schema_resource return _build_schema_resource(resource)
def __convert_to_ems_job(job): if isinstance(job, QueryJob): destination = job.destination table_id, dataset_id, project_id = \ (destination.table_id, destination.dataset_id, destination.project) \ if destination is not None else (None, None, None) config = EmsQueryJobConfig( priority=EmsJobPriority[job.priority], destination_project_id=project_id, destination_dataset=dataset_id, destination_table=table_id, create_disposition=EmsBigqueryClient. __convert_to_ems_create_disposition(job.create_disposition), write_disposition=EmsBigqueryClient. __convert_to_ems_write_disposition(job.write_disposition), time_partitioning=EmsBigqueryClient. __convert_to_ems_time_partitioning(job.time_partitioning), labels=job.labels) return EmsQueryJob(job.job_id, job.query, config, EmsJobState(job.state), job.error_result, job.created) elif isinstance(job, LoadJob): destination = job.destination table_id, dataset_id, project_id = destination.table_id, destination.dataset_id, destination.project schema = { "fields": _build_schema_resource(job.schema) } if job.schema else [] config = EmsLoadJobConfig( schema=schema, source_uri_template=job.source_uris[0] if job.source_uris else None, destination_project_id=project_id, destination_dataset=dataset_id, destination_table=table_id, create_disposition=EmsBigqueryClient. __convert_to_ems_create_disposition(job.create_disposition), write_disposition=EmsBigqueryClient. __convert_to_ems_write_disposition(job.write_disposition), labels=job.labels) return EmsLoadJob(job_id=job.job_id, load_config=config, state=EmsJobState(job.state), error_result=None, created=job.created) elif isinstance(job, ExtractJob): table = f'{job.source.project}.{job.source.dataset_id}.{job.source.table_id}' destination_uris = job.destination_uris job_config = EmsExtractJobConfig( compression=Compression(job.compression) if job.compression else Compression.NONE, destination_format=DestinationFormat(job.destination_format) if job.destination_format else DestinationFormat.CSV, field_delimiter=job.field_delimiter, print_header=job.print_header, labels=job.labels) return EmsExtractJob(job_id=job.job_id, table=table, destination_uris=destination_uris, job_config=job_config, state=EmsJobState(job.state), error_result=job.error_result, created=job.created) else: LOGGER.warning( f"Unexpected job type for : {job.job_id}, with type class: {job.__class__}" ) return None