def insert_answer(self,alexaAnswer): insert_sql = self.session.prepare( "INSERT INTO AlexaAnswerDB (id, id_question, answer,alias_employee, date,profile_question, penalization_profile, punctuation) VALUES (?,?,?,?,?,?,?,?)") # ,(1, single_news.get_title(), single_news.get_summary(), single_news.get_metric()) batch = BatchStatement() batch.add(insert_sql, (alexaAnswer.get_id_answer(), alexaAnswer.get_id_question(), alexaAnswer.get_answer(),alexaAnswer.get_alias_employee(), alexaAnswer.get_date(), alexaAnswer.get_profile_question_int(), alexaAnswer.get_penalization_int(), alexaAnswer.get_punctuation())) self.session.execute(batch) self.log.info('Data insertion completed in AlexaAnswerDB')
def insert_stops(stops, current_time, cassandra_session): cassandra_table = os.environ['CASSANDRA_TABLE'] cassandra_row = os.environ['CASSANDRA_ROW'] cassandra_prep_values = os.environ['CASSANDRA_PREP_VALUES'] statement = "INSERT INTO " + cassandra_table + \ " " + cassandra_row + " VALUES " + cassandra_prep_values insert_stop = cassandra_session.prepare(statement) batch = BatchStatement(batch_type=BatchType.UNLOGGED, consistency_level=ConsistencyLevel.LOCAL_QUORUM) # can only put 30 statements in a batch batch_counter = 0 current_time = current_time * 1000 for key in stops: stop = stops[key] stop_id = stop[0] route_id = stop[1] waiting_time = stop[2] batch.add(insert_stop, (stop_id, route_id, waiting_time, current_time)) batch_counter = batch_counter + 1 # can only put 30 statements in a batch if batch_counter == 30: cassandra_session.execute(batch) # create a new batch batch = BatchStatement( batch_type=BatchType.UNLOGGED, consistency_level=ConsistencyLevel.LOCAL_QUORUM) batch_counter = 0 # execute remaining batch operations if batch_counter > 0: cassandra_session.execute(batch)
def saveTwitterDf(dfrecords): if isinstance(CASSANDRA_HOST, list): cluster = Cluster(CASSANDRA_HOST) else: cluster = Cluster([CASSANDRA_HOST]) session = cluster.connect(CASSANDRA_KEYSPACE) counter = 0 totalcount = 0 cqlsentence = "INSERT INTO " + twittertable + " (tweet_date, location, tweet, classification) \ VALUES (?, ?, ?, ?)" batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) insert = session.prepare(cqlsentence) batches = [] for idx, val in dfrecords.iterrows(): batch.add(insert, (val['datetime'], val['location'], val['tweet'], val['classification'])) counter += 1 if counter >= 100: print('inserting ' + str(counter) + ' records') totalcount += counter counter = 0 batches.append(batch) batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) if counter != 0: batches.append(batch) totalcount += counter rs = [session.execute(b, trace=True) for b in batches] print('Inserted ' + str(totalcount) + ' rows in total')
def main(input_dir, keyspace, output_table): cluster = Cluster(['199.60.17.188', '199.60.17.216']) session = cluster.connect(keyspace) linesep = re.compile(r'^(\S+) - - \[(\S+) [+-]\d+\] \"[A-Z]+ (\S+) HTTP/\d\.\d\" \d+ (\d+)$') insert_nasalogs = session.prepare("INSERT INTO nasalogs (id, host, datetime, path, bytes) VALUES (?, ?, ?, ?, ?)") batch = BatchStatement(consistency_level=ConsistencyLevel.ONE) session.execute("TRUNCATE " + keyspace + '.' + output_table + ';') for f in os.listdir(input_dir): with gzip.open(os.path.join(input_dir, f), 'rt') as logfile: count = 0 for line in logfile: linesplit = linesep.split(line) if len(linesplit) > 4: batch.add(insert_nasalogs, (uuid.uuid1(), linesplit[1], \ dt.datetime.strptime(linesplit[2], '%d/%b/%Y:%H:%M:%S'), linesplit[3], int(linesplit[4]))) count += 1 if count > 300: session.execute(batch) batch.clear() count = 0 session.execute(batch)
def insert_parameter(self, data2insert, date, employee_alias, parameter="HR"): command = "UPDATE EmployeeStateDB SET "+parameter+"=? WHERE alias=? AND date=?;" update_sql = self.session.prepare(command) batch = BatchStatement() batch.add(update_sql, (data2insert, employee_alias, date)) self.session.execute(batch) self.log.info(parameter+' insertion completed in EmployeeStateDB')
def insert_metric(self, room_id, rack_id, server_id, datetime, temperature_anomaly=False, energy_anomaly=False, utilization_anomaly=False): self.log.info("Inserting AAdata into ProteusDB") insert_sql = self.session.prepare( "INSERT INTO AnomaliesMachinesDB (room_id, rack_id, server_id, date, temperature_anomaly, energy_anomaly, utilization_anomaly) VALUES (?,?,?,?,?,?,?)" ) batch = BatchStatement() batch.add(insert_sql, ( room_id, rack_id, server_id, datetime, temperature_anomaly, energy_anomaly, utilization_anomaly, )) self.session.execute(batch) self.log.info( 'Data insertion completed in AnomaliesMachinesDB successfully')
def put_entity(self, table_name, row_key, column_names, cell_values): error = [ERROR_DEFAULT] list = error row_key = bytearray('/'.join([table_name, row_key])) values = {} for index, column in enumerate(column_names): values[column] = cell_values[index] statement = """ INSERT INTO "{table}" ({key}, {column}, {value}) VALUES (%(key)s, %(column)s, %(value)s) """.format(table=table_name, key=ThriftColumn.KEY, column=ThriftColumn.COLUMN_NAME, value=ThriftColumn.VALUE) batch = BatchStatement(retry_policy=self.retry_policy) for column in column_names: parameters = {'key': row_key, 'column': column, 'value': bytearray(values[column])} batch.add(statement, parameters) try: self.session.execute(batch) except dbconstants.TRANSIENT_CASSANDRA_ERRORS: list[0] += 'Unable to insert entity' return list list.append("0") return list
def insert_data_block(self, messages): insert_sql = self.session.prepare(""" INSERT INTO block(block_number, block_hash, miner_address, parent_hash, receipt_tx_root, state_root, tx_trie_root, extra_data, nonce, bloom, solution, difficulty, total_difficulty, nrg_consumed, nrg_limit, size, block_timestamp, num_transactions, block_time, nrg_reward, transaction_id, transaction_list) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) """) batch = BatchStatement() for message in messages: batch.add(insert_sql, message) self.session.execute(batch) print( "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) print( '############################### DATA INSERTED ########################################' ) print( "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) self.log.info('Block Batch Insert Completed')
def insert_data(self, single_news): #INSERT DATA IN NOTICASDB insert_sql = self.session.prepare( "INSERT INTO NoticiasDB (id, date, news, typology, punctuation, title, summary, graph, side, metrics) VALUES (?,?,?,?,?,?,?,?,?,?)" ) #,(1, single_news.get_title(), single_news.get_summary(), single_news.get_metric()) batch = BatchStatement() batch.add(insert_sql, (single_news.get_id(), single_news.get_date(), single_news.get_news_number(), single_news.get_typology(), single_news.get_punctuation(), single_news.get_title(), single_news.get_summary(), single_news.get_graph(), single_news.get_side(), single_news.get_metrics())) self.session.execute(batch) self.log.info('Data insertion completed in NoticiasDB') #INSERT DATA IN METRICSDB identificatorNew = single_news.get_id() for metric in single_news.get_metrics(): insert_sql = self.session.prepare( "UPDATE MetricsDB SET news_id=news_id+? WHERE metric=?" ) # ,(1, single_news.get_title(), single_news.get_summary(), single_news.get_metric()) toAdd = set({identificatorNew}) batch = BatchStatement() batch.add(insert_sql, (toAdd, metric)) self.session.execute(batch) self.log.info('Data insertion completed in MetricsDB')
def insert_noise_level(self, noise_level, date, room_id): update_sql = self.session.prepare( "UPDATE OperatorsRoomStatusDB SET noise_level=? WHERE room_id=? AND date=?;" ) batch = BatchStatement() batch.add(update_sql, (noise_level, room_id, date)) self.session.execute(batch) self.log.info('emoiton insertion completed in OperatorsRoomStatusDB')
def insert_data(self): insert_sql = self.session.prepare( "INSERT INTO employee (emp_id, ename , sal,city) VALUES (?,?,?,?)" ) batch = BatchStatement() batch.add(insert_sql, (5, 'Rajani', 2500, 'AMS')) batch.add(insert_sql, (6, 'RK', 3000, 'nld')) self.session.execute(batch) self.log.info('Batch Insert Completed')
def insert_data(self, *fields): value_str = ["?"] * len(fields) CMD_STR = """INSERT INTO {} {} values ({})""" INSERT_COMMAND = CMD_STR.format(self.tableName, self.fieldTupleStr, ",".join(value_str)) insert_sql = self.session.prepare(INSERT_COMMAND) batch = BatchStatement() batch.add(insert_sql, fields) self.session.execute(batch)
def insert_temperature(self, temperature, date, room_id): update_sql = self.session.prepare( "UPDATE OperatorsRoomStatusDB SET temperature=? WHERE room_id=? AND date=?;" ) batch = BatchStatement() batch.add(update_sql, (temperature, room_id, date)) self.session.execute(batch) self.log.info( 'temperature insertion completed in OperatorsRoomStatusDB')
def insert_metric(self, room_id, rack_id, metric_log): #self.log.info("Inserting data into machinesdb") insert_sql = self.session.prepare( "INSERT INTO machinesdb (room_id, rack_id, server_id, server_component, metric_type, metric_value, date) VALUES (?,?,?,?,?,?,?)" ) batch = BatchStatement() batch.add(insert_sql, (room_id, rack_id, metric_log[1], metric_log[2], metric_log[3], int(metric_log[4]), metric_log[5])) self.session.execute(batch)
def insert_new_row(self, temperature, noise_level, humidity, heat_index, date, room_id): insert_sql = self.session.prepare( "INSERT INTO OperatorsRoomStatusDB (room_id, date, temperature,humidity, heat_index, noise_level) VALUES (?,?,?,?,?,?)" ) # ,(1, single_news.get_title(), single_news.get_summary(), single_news.get_metric()) batch = BatchStatement() batch.add( insert_sql, (room_id, date, temperature, humidity, heat_index, noise_level)) self.session.execute(batch) self.log.info('Data insertion completed in OperatorsRoomStatusDB')
def insert_data(self, table, messages): qry = self.session.prepare(insert_sql[table]) batch = BatchStatement() # batch.add(insert_sql, (1, 'LyubovK')) for message in messages: #logger.warning("insert %s data:%s",table,message) try: batch.add(qry, message) except Exception: logger.error(table.upper()+' INSERT FAILED:', exc_info=True) self.session.execute(batch)
def updateData(cls, query): updateMsg = "" try: insert_sql = cls.session.prepare(query) batch = BatchStatement() batch.add(query) cls.session.execute(batch) updateMsg = "Successful" except Exception as e: updateMsg = "Exception" + str(e) return updateMsg
def insert_data_transaction(self, message): insert_sql = self.session.prepare(""" INSERT INTO transaction( id,transaction_hash, block_hash, block_number, transaction_index, from_addr, to_addr, nrg_consumed, nrg_price, transaction_timestamp, block_timestamp, tx_value, transaction_log, tx_data, nonce, tx_error, contract_addr) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) """) batch = BatchStatement() # batch.add(insert_sql, (1, 'LyubovK')) batch.add(insert_sql, message) self.session.execute(batch) self.log.info('Batch Insert Completed')
def insert_batch(rec, session, table_name): for each_list in rec: for list_content in each_list: host = list_content[0] datetime = d.datetime.strptime(list_content[1], "%d/%b/%Y:%H:%M:%S") path = list_content[2] byte = int(list_content[3]) insert_statement = session.prepare( "INSERT INTO %s(host,uid,datetime,path,bytes) VALUES (?,UUID(),?,?,?)" % table_name) batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) batch.add(insert_statement, (host, datetime, path, byte)) session.execute(batch) batch.clear()
def insert_metric(self, server_status_log): self.log.info("Inserting AAdata into ProteusDB") insert_sql = self.session.prepare( "INSERT INTO ServerStatusDB (rack_id, server_id, date, server_component, profile, value, punctuation) VALUES (?,?,?,?,?,?,?)" ) batch = BatchStatement() batch.add(insert_sql, (server_status_log.get_rack_id(), server_status_log.get_server_id(), server_status_log.get_date(), server_status_log.get_server_component(), server_status_log.get_profile_int(), server_status_log.get_value(), server_status_log.get_punctuation())) self.session.execute(batch) self.log.info( 'Data insertion completed in ServerStatusDB successfully')
def insert_data_batch(self, keyspace): self.session.set_keyspace(keyspace) insert_sql = self.session.prepare("INSERT INTO employee (emp_id, ename , sal,city) VALUES (?,?,?,?)") batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) for k in range(1, 20): batch.add(insert_sql, [k, 'Reading can open your mind to brilliant new worlds and take you to a new level of English \ language learning.It may feel like a slow process, but it is effective.Adopting English \ Reading can open your mind to brilliant new worlds and take you to a new level of English \ Reading can open your mind to brilliant new worlds and take you to a new level of English \ Reading can open your mind to brilliant new worlds and take you to a new level of English \ Reading can open your mind to brilliant new worlds and take you to a new level of English \ books as learning tools can help you reach English fluency faster than ever before.', k, 'Dubai']) self.session.execute(batch) # print('Batch Insert Completed') print('Batch Insert Totally Completed')
def create_inventory(): record = json.loads(request.data) print(record) example1 = PythonCassandraExample() example1.createsession() prd_ins_sql = example1.session.prepare( "INSERT INTO inventory(art_id, name, stock ) VALUES (?,?,?)") batch = BatchStatement() for i in record['inventory']: art_id = i['art_id'] name = i['name'] stock = i['stock'] print(art_id) print(name) print(stock) batch.add(prd_ins_sql, (int(art_id), name, int(stock))) example1.session.execute(batch) return jsonify(record)
def delte_inventory(): record = json.loads(request.data) print(record) example1 = PythonCassandraExample() example1.createsession() del_sql = example1.session.prepare( "DELETE FROM INVENTORY WHERE NAME = ? AND ART_ID = ?") batch = BatchStatement() for i in record['inventory']: art_id = i['art_id'] name = i['name'] stock = i['stock'] print(art_id) print(name) print(stock) batch.add(del_sql, (name, int(art_id))) example1.session.execute(batch) return jsonify(record)
def add_to_db(session, data, _): session.execute('drop table yago') session.execute('create table if not exists yago (i int primary key, s varchar, v varchar, o varchar)') session.execute("create index if not exists sindex on yago (s)") session.execute("create index if not exists vindex on yago (v)") session.execute("create index if not exists oindex on yago (o)") prepared = session.prepare('insert into yago (i, s, v, o) values (?, ?, ?, ?)') batch = BatchStatement(consistency_level=ConsistencyLevel.ANY) count = 0 i = 0 for s, v, o in data: batch.add(prepared, (i, s.decode('latin1'), v.decode('latin1'), o.decode('latin1'))) count += 1 i += 1 if count >= 10: session.execute_async(batch) batch = BatchStatement(consistency_level=ConsistencyLevel.ANY) count = 0 session.execute(batch)
def delte_products(): record = json.loads(request.data) print(record) example1 = PythonCassandraExample() example1.createsession() del_sql = example1.session.prepare( "DELETE FROM PRODUCTS WHERE NAME = ? AND ART_ID = ?") batch = BatchStatement() for i in record['products']: name = i['name'] print(name) for a in i['contain_articles']: art_id = a['art_id'] amount_of = a['amount_of'] print(art_id) print(amount_of) batch.add(del_sql, (name, int(art_id))) example1.session.execute(batch) return jsonify(record)
def create_products(): record = json.loads(request.data) print(record) example1 = PythonCassandraExample() example1.createsession() prd_ins_sql = example1.session.prepare( "INSERT INTO products(name, art_id, amount_of ) VALUES (?,?,?)") batch = BatchStatement() for i in record['products']: name = i['name'] print(name) for a in i['contain_articles']: art_id = a['art_id'] amount_of = a['amount_of'] print(art_id) print(amount_of) batch.add(prd_ins_sql, (name, int(art_id), int(amount_of))) example1.session.execute(batch) return jsonify(record)
def insertData(data): print("Inserting Data") # If we have no data, exit if len(data) == 0: print("No data, exiting data insertion") return # Create a modifiable CQL Query prepared = session.prepare(""" INSERT INTO movietable (title, release_year, genre, director) VALUES (?, ?, ?, ?) """) # Create our batch statement batch = BatchStatement() # Populate the batch statement with data for i in range(1, len(data)): batch.add(prepared, (data[i][0], data[i][1], data[i][2], data[i][3])) # Calls the batch statement, inserting data into the table session.execute(batch)
def insert_data(self): insert_sql = self.session.prepare("INSERT INTO employee (emp_id, ename , sal,city) VALUES (?,?,?,?)") batch = BatchStatement() batch.add(insert_sql, (1, 'LyubovK', 2555, 'Dubai')) batch.add(insert_sql, (2, 'JiriK', 5660, 'Toronto')) batch.add(insert_sql, (3, 'IvanH', 2547, 'Mumbai')) batch.add(insert_sql, (4, 'YuliaT', 2547, 'Seattle')) self.session.execute(batch) self.log.info('Batch Insert Completed')
def saveWeatherreport(dfrecords): if isinstance(CASSANDRA_HOST, list): cluster = Cluster(CASSANDRA_HOST) else: cluster = Cluster([CASSANDRA_HOST]) session = cluster.connect(CASSANDRA_KEYSPACE) counter = 0 totalcount = 0 cqlsentence = "INSERT INTO " + tablename + " (forecastdate, location, description, temp, feels_like, temp_min, temp_max, pressure, humidity, wind, sunrise, sunset) \ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) insert = session.prepare(cqlsentence) batches = [] for idx, val in dfrecords.iterrows(): batch.add(insert, (val['report_time'], val['location'], val['description'], val['temp'], val['feels_like'], val['temp_min'], val['temp_max'], val['pressure'], val['humidity'], val['wind'], val['sunrise'], val['sunset'])) counter += 1 if counter >= 100: print('inserting ' + str(counter) + ' records') totalcount += counter counter = 0 batches.append(batch) batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM) if counter != 0: batches.append(batch) totalcount += counter rs = [session.execute(b, trace=True) for b in batches] print('Inserted ' + str(totalcount) + ' rows in total')
def insert_data(self): insert_sql = self.session.prepare( "INSERT INTO employee (emp_id, ename, sal, city) VALUES (?, ?, ?, ?)" ) batch = BatchStatement() batch.add(insert_sql, (1, 'xidong', 28000, 'Shanghai')) batch.add(insert_sql, (2, 'mengfei', 20000, 'Shanghai')) batch.add(insert_sql, (3, None, 200, "Shanghai")) self.session.execute(batch) print("insert finished")
def parse_indigo_log_file(self, log_file): self.cassandra_client.connect(["s000.blurdev.com"]) cassandra_session = self.cassandra_client.session gzip_file_handler = gzip.open(log_file) line_counter = 0 try: linestr = gzip_file_handler.readline() pstmt = cassandra_session.prepare( "INSERT INTO device_logs.log_entry(id,deviceId,eventDatetime,api,appid,userid,resultCode,latency,service,method)VALUES(?,?,?,?,?,?,?,?,?,?)" ) batch_statement = BatchStatement() while linestr: # 2016-01-04 03:00:00,472 +0000 [0:0:0:0:0:0:0:1] INFO [qtp389572888-163610] com.motorola.blur.cloudsvc.service.CloudService#internalCrossZoneApiCall(1237) - [CloudService.Report.RemoteZone]: # api=/v1/dp/validatesession url=https://api-sg.svcmot.com/v1/dp/validatesession.json?_remotecall=1&_indigo_zone=CN&authtoken=0-fa644269f5406c77fb0143a35a9d265a1031705043&deviceid=1288446770950721536 # result=400 time=599 # -----use this one. # 2016-01-04 03:59:59,293 +0000 [0:0:0:0:0:0:0:1] INFO [qtp389572888-163690] com.motorola.blur.cloudsvc.service.CloudService#invoke(579) - [CloudService.Report.API]: # api=/v1/checkinuploader/upload appid=YDYWOLQB1NM35HHYPKOZW3V3Z33TC85I userid=null deviceid=1342508016933724160 status=200 time=1170 method=POST service=ccs_uploader # URI=/v1/checkinuploader/upload.pb querystring:deviceid=1342508016933724160&appId=YDYWOLQB1NM35HHYPKOZW3V3Z33TC85I&geolocation=China-East&geocheckintimestamp=1451879998028 # print linestr if linestr.count("CloudService.Report.API") > 0: log_fileds = linestr.split(" ") # print len(log_fileds) # print log_fileds if len(log_fileds) == 20: line_counter += 1 field_event_date = log_fileds[0] field_event_time = self.extract_time(log_fileds[1]) field_api = self.grep_value(log_fileds[10]) field_appid = self.grep_value(log_fileds[11]) field_userid = self.grep_value(log_fileds[12]) field_deviceid = self.grep_value(log_fileds[13]) field_result_code = self.get_int(self.grep_value(log_fileds[14])) field_latency = self.get_int(self.grep_value(log_fileds[15])) field_method = self.convert_method(self.grep_value(log_fileds[16])) field_service = self.grep_value(log_fileds[17]) # print field_event_date,field_event_time,field_api,field_appid,field_userid,field_deviceid,field_result_code,field_latency,field_method,field_service edt = self.to_datetime(field_event_date, field_event_time) batch_statement.add( pstmt.bind( ( util.uuid_from_time(edt), field_deviceid, edt, field_api, field_appid, field_userid, field_result_code, field_latency, field_service, field_method, ) ) ) if line_counter % 6000 == 0: cassandra_session.execute(batch_statement) batch_statement = BatchStatement() linestr = gzip_file_handler.readline() if line_counter % 6000 != 0: cassandra_session.execute(batch_statement) except BaseException, e: print e