def sendToBQUsingTaskQueue(project_id, dataset_id, table_id, csv_name, schema): csv_file = re.sub(".csv", "", csv_name) loop_count = functions.calculateLoopCount() for i in range(0, loop_count): csv_name = csv_file + str(i) + ".csv" print "<br />Sending to Google BigQuery from " + csv_name deferred.defer(functions.sendToBQ, project_id, dataset_id, table_id, csv_name, schema, _countdown=60) print "<br />Logs Successfully sent from " + csv_name + " to Google BigQuery<br />"
def sendToGSUsingTaskQueue(csv_name): csv_file = re.sub(".csv", "", csv_name) log_count = 30 loop_count = functions.calculateLoopCount() for i in range(0, loop_count): offset = i * log_count csv_name = csv_file + str(i) + ".csv" deferred.defer(functions.writeToGS, csv_name, offset, log_count) print "<br />Logs Succesfully written to gs://" + csv_name + "<br />"
def sendToGSUsingTaskQueue(csv_name): csv_file = re.sub('.csv', '', csv_name) log_count = 30 loop_count = functions.calculateLoopCount() for i in range(0, loop_count): offset = i * log_count csv_name = csv_file + str(i) + '.csv' deferred.defer(functions.writeToGS, csv_name, offset, log_count) print '<br />Logs Succesfully written to gs://' + csv_name + '<br />'
def sendToBQUsingTaskQueue(project_id, dataset_id, table_id, csv_name, schema): csv_file = re.sub('.csv', '', csv_name) loop_count = functions.calculateLoopCount() for i in range(0, loop_count): csv_name = csv_file + str(i) + '.csv' print '<br />Sending to Google BigQuery from ' + csv_name deferred.defer(functions.sendToBQ, project_id, dataset_id, table_id, csv_name, schema, _countdown=60) print '<br />Logs Successfully sent from ' + csv_name + ' to Google BigQuery<br />'