def decorated(self, *args, **kwargs): start_time = datetime.now() result = f(self, *args, **kwargs) end_time = datetime.now() # Check the status of the function: if isinstance(result, list): if all([res.returncode == 0 for res in result]): status = "SUCCESSFUL" else: print(res.stderr for res in result) status = "FAILED" else: if result.returncode == 0: status = "SUCCESSFUL" else: print(result.stderr) status = "FAILED" # Get network transfer rate: transfer_rate = get_network_transfer_rate() # Record the result to database: save_to_db(self.db, "Experiment", self.file.name, self.version, self.bucket, self.cluster, self.node, self.tool, self.file_split_size, self.segment_size, self.thread, self.cores, transfer_rate, start_time.isoformat(), end_time.isoformat(), status) return result
def process_images(request): urls_array = request.POST.get('urls_list') urls_list = urls_array.split('<br/>') del urls_list[-1] print("########") print(urls_list) print("########") save_to_db(urls_list) return render(request, 'home.html')
def predict(main_project_id, input_bucket, input_file, png_path, model_id, compute_region, config): """Runs AutoML NER on a folder and writes results to BigQuery. Args: gcs_ocr_text_folder: JSON folder (outputs of OCR). dataset_bq: BiqQuery dataset name. table_bq_output: BigQuery table where the ner results are written to. project_id_ner: Project ID for AutoML Ner. project_id_bq: Project ID for BigQuery Table. ner_model_id: AutoML Model ID (NER). list_fields: List of field_names to extract (list of string). service_account_ner: Location of service account key to access the NER model. service_account_gcs_bq: Location of service account key to access BQ and Storage. compute_region: Compute Region for NER model. """ print('Starting entity extraction.') # input_bucket_name = input_path.replace('gs://', '').split('/')[0] input_txt_folder = f"gs://{input_bucket}/txt" print(input_txt_folder) list_fields = [ x['field_name'] for x in config["model_ner"]["fields_to_extract"] ] list_fields.remove('gcs_path') storage_client = storage.Client() bucket_name, path = utils.get_bucket_blob(input_txt_folder) bucket = storage_client.get_bucket(bucket_name) # list_results = [] # for file in bucket.list_blobs(prefix=path): # logger.info(file) # logger.info(input_txt_folder) full_filename = os.path.join(input_txt_folder, os.path.basename(input_file)) print(full_filename) result = run_automl_single(ocr_path=full_filename, list_fields=list_fields, model_id=model_id, main_project_id=main_project_id, compute_region=compute_region) print('result') print(result) # logger.info(result) # list_results.append(result) # logger.info(list_results) # schema = [bigquery.SchemaField('file', 'STRING', mode='NULLABLE')] # for field in list_fields: # schema.append(bigquery.SchemaField(field, 'STRING', mode='NULLABLE')) print('Saving to db\n') utils.save_to_db(row_to_insert=result, png_path=png_path, config=config) print('Entity extraction finished.\n')
def test7_save_to_db(self): """записать полученные результаты в базу данных""" self.assertTrue("rlist" in self.parent_suite.params) id = None id = utils.save_to_db({ "date": datetime.now(), "results": self.parent_suite.params["rlist"] }) self.assertIsNotNone(id) self.log.info(str(id.inserted_id))
def record_file(self) -> None: save_to_db(self.db, "File", self.file.name, self.file.suffix, str(self.file.stat().st_size))
def result(): save_to_db(request=request) return calculate_roots(a=request.args['a'], b=request.args['b'], c=request.args['c'])