def test_list_jobs_w_parent_job_filter(client, PROJECT):
    from google.cloud.bigquery import job

    conn = client._connection = make_connection({}, {})

    parent_job_args = ["parent-job-123", job._AsyncJob("parent-job-123", client)]

    for parent_job in parent_job_args:
        list(client.list_jobs(parent_job=parent_job))
        conn.api_request.assert_called_once_with(
            method="GET",
            path="/projects/%s/jobs" % PROJECT,
            query_params={"projection": "full", "parentJobId": "parent-job-123"},
            timeout=None,
        )
        conn.api_request.reset_mock()
Exemple #2
0
def test_default_no_data_leakage(setup):
    import google.auth.credentials
    from google.cloud.bigquery import client
    from google.cloud.bigquery import job

    mock_credentials = mock.Mock(spec=google.auth.credentials.Credentials)
    test_client = client.Client(project="test_project",
                                credentials=mock_credentials,
                                location="test_location")

    expected_attributes = {
        "foo": "baz",
        "db.system": "BigQuery",
        "db.name": "test_project",
        "location": "test_location",
    }
    with opentelemetry_tracing.create_span(TEST_SPAN_NAME,
                                           attributes=TEST_SPAN_ATTRIBUTES,
                                           client=test_client) as span:
        assert span.name == TEST_SPAN_NAME
        assert span.attributes == expected_attributes

    test_job_reference = job._JobReference(job_id="test_job_id",
                                           project="test_project_id",
                                           location="test_location")
    test_client = client.Client(project="test_project",
                                credentials=mock_credentials,
                                location="test_location")
    test_job = job._AsyncJob(job_id=test_job_reference, client=test_client)

    expected_attributes = {
        "db.system": "BigQuery",
        "db.name": "test_project_id",
        "location": "test_location",
        "num_child_jobs": 0,
        "job_id": "test_job_id",
        "foo": "baz",
        "hasErrors": False,
    }

    with opentelemetry_tracing.create_span(TEST_SPAN_NAME,
                                           attributes=TEST_SPAN_ATTRIBUTES,
                                           job_ref=test_job) as span:
        assert span.name == TEST_SPAN_NAME
        assert span.attributes == expected_attributes
Exemple #3
0
def check_job_finish(migset):
    #BQ_TEMP_CSVPATH = migset.csvfile
    #BQ_TEMP_DEL_PATH = migset.csvfile_del_path

    datalist, tablename, pkname, pk_range, col_type_map, log_idx = migset.values(
    )
    jobId = migset.jobId
    if jobId is None:
        print >> sys.stderr, "jobid is null %s" % log_idx

    try:
        print "start check jobId : %s" % jobId
        bq = bigquery.Client(migset.bq_project)
        job = _AsyncJob(jobId, client=bq)
        job.reload()
        state = job._properties.get("status", {}).get("state", "")
        if state == "DONE":
            errors = job._properties.get("status", {}).get("errors", [])
            if len(errors) > 0:
                # 에러
                errmsg = ujson.dumps(errors)
                print "jodIb :%s has error : %s" % (jobId, errmsg)

                filename = "migbq-%s-%s-%s-%s" % (tablename, pkname,
                                                  pk_range[0], pk_range[1])
                csvfile = os.path.join(migset.csvfile, filename)
                if os.path.isfile(csvfile) and os.path.exists(csvfile):
                    print "!! check job found error. delete error file : %s" % csvfile
                    os.remove(csvfile)

                ret = MigrationSetJobResult(migset.log_idx, -1, msg=errmsg)
            else:
                # 성공
                rowcnt = job._properties.get("statistics",
                                             {}).get("load",
                                                     {}).get("outputRows", -1)
                ret = MigrationSetJobResult(migset.log_idx, rowcnt)

                print "jodIb : %s success" % jobId

                filename = "migbq-%s-%s-%s-%s" % (tablename, pkname,
                                                  pk_range[0], pk_range[1])
                csvfile = os.path.join(migset.csvfile, filename)

                print "jodIb : %s ... delete temp file : %s " % (jobId,
                                                                 csvfile)

                if os.path.isfile(csvfile):
                    del_path_file = os.path.join(migset.csvfile_del_path,
                                                 os.path.basename(csvfile))
                    print "remove file ... %s to %s" % (csvfile, del_path_file)
                    shutil.move(csvfile, del_path_file)

        else:
            ret = MigrationSetJobResult(migset.log_idx, 0)
            print "jodIb :%s not DONE : %s " % (jobId, state)
    except:
        errormsg = traceback.format_exc()
        print >> sys.stderr, "jobId [%s] error : %s" % (jobId, errormsg)
        ret = MigrationSetJobResult(migset.log_idx, -1, errormsg)

    return ret
Exemple #4
0
 def get_job_state(self, jobId):
     job = _AsyncJob(jobId, client=self.bq)
     job.reload()
     state = job._properties.get("status", {}).get("state", "")
     return state
Exemple #5
0
 def get_job(self, jobId, bq):
     from google.cloud.bigquery.job import _AsyncJob, QueryJob
     job = _AsyncJob(jobId, client=bq)
     self.execute_with_retry(lambda: job.reload())
     return job