def main(): client = Client() client.connect('127.0.0.1', 10800) query = '' result = client.sql(query) for row in result: print(row)
def export_users_csv(request): print("Hello") nodes = [ ('127.0.0.1', 10800), ('127.0.0.1', 10801), ] client = Client() client.connect(nodes) yesterday = date.today() - timedelta(days=1) datecsv = yesterday.strftime('%m/%d/%Y') print(datecsv) QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE APPID != '5def994ce96b09565e1f1ddd' AND CREATED_DATE = {} AND EVENTNAME!='_app_crash' AND ID!=855263 ORDER BY ID; '''.format("'"+datecsv+"'") print(QUERY) result = client.sql( QUERY, include_field_names=True, ) print(next(result)) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(datecsv+"_IgniteEvents") writer = csv.writer(response) writer.writerow(['appid','deviceid','devicemodel','platform','apppackage','keyname','mobileoperator','app','song','album','pstate','source','ipaddress','city','station','duration','timestamp','to_char','created_time']) i = 0 for row in result: if i == 0: print(row) x = row[2] y = json.loads(x) if y.get("segmentation") is None: writer.writerow([row[1],row[3],y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),"","","","","",row[7],row[8],"","",row[4],row[11],row[12]]) else: writer.writerow([row[1],row[3],y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),y.get("segmentation").get("Source"),row[7],row[8],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[4],row[11],row[12]]) i = 1 else: print(row) x = row[5] try: y = json.loads(x) except: pass if y.get("segmentation") is None: writer.writerow([row[1],row[6],y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),"","","","","",row[10],row[11],"","",row[7],row[3],row[4]]) else: writer.writerow([row[1],row[6],y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),y.get("segmentation").get("Source"),row[10],row[11],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[7],row[3],row[4]]) client.close() return response
def artistMaster(request): loginUser = request.user nodes = [ ('127.0.0.1', 10800), ('127.0.0.1', 10801), ] client = Client() client.connect(nodes) #QUERY = ''' SELECT ID, TRACKID, TRACKDATA FROM PUBLIC.SONG_DATA ORDER BY ID DESC; ''' QUERY = ''' SELECT TRACKNAME, ALBUMNAME, ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA ORDER BY ID DESC; ''' #print(QUERY) result = client.sql( QUERY, ) datatohtml = [] '''for row in result: try: dataparsed = json.loads(row[2]) try: genre = dataparsed.get("primary_genres").get("music_genre_list")[0].get("music_genre").get("music_genre_name") except Exception as e: genre = "-" dataauto ={ "track_name":dataparsed.get("track_name"), "artist_name":dataparsed.get("artist_name"), "album_name":dataparsed.get("album_name"), "genre":genre } datatohtml.append(dataauto) except Exception as e: pass # print(datatohtml)''' for row in result: dataauto = { "track_name":row[0], "artist_name":row[2], "album_name":row[1], "genre":row[3], } datatohtml.append(dataauto) return render(request,"ArtistMaster.html",{"loginUser":loginUser,"datatohtml":datatohtml})
['CHN', 'Yi', False, Decimal('0.6')], ['CHN', 'Zhuang', False, Decimal('1.4')], ] # establish connection client = Client() client.connect('127.0.0.1', 10800) # create tables for query in [ COUNTRY_CREATE_TABLE_QUERY, CITY_CREATE_TABLE_QUERY, LANGUAGE_CREATE_TABLE_QUERY, ]: client.sql(query) # create indices for query in [CITY_CREATE_INDEX, LANGUAGE_CREATE_INDEX]: client.sql(query) # load data for row in COUNTRY_DATA: client.sql(COUNTRY_INSERT_QUERY, query_args=row) for row in CITY_DATA: client.sql(CITY_INSERT_QUERY, query_args=row) for row in LANGUAGE_DATA: client.sql(LANGUAGE_INSERT_QUERY, query_args=row)
def pythonignite(request): # print("Connection Pending!") loginUser = request.user nodes = [ ('127.0.0.1', 10800), ('127.0.0.1', 10801), ] # nodes = [ # ('35.154.247.92', 10800), # ('35.154.247.92', 10801), # ] client = Client() client.connect(nodes) #QUERY = ''' SELECT * FROM PUBLIC.EVENTSDATA ORDER BY TIMESTAMP DESC LIMIT 2000; ''' #QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA ORDER BY TIMESTAMP DESC LIMIT 2000; ''' #QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM EVENTSDATA WHERE ID > (SELECT MAX(ID)-2000 FROM EVENTSDATA) ORDER BY "TIMESTAMP" DESC LIMIT 0,2000; ''' QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE ID > (SELECT MAX(ID) FROM EVENTSDATA)-2000 ORDER BY ID DESC LIMIT 2000; ''' result = client.sql( QUERY, include_field_names=True, ) print(next(result)) eventsdata = [] i = 0 for row in result: # print(row) if i == 0: eventsdata.append({ "ID":row[0], "APPID":row[1], "EVENTS":row[2], "DEVICE_ID":row[3], "TIMESTAMP":row[4], "MTIMESTAMP":row[5], "UPTIMESTAMP":row[6], "IPADDRESS":row[7], "CITY":row[8], "COUNTRY":row[9], "EVENTNAME":row[10], "CREATED_DATE":row[11], "CREATED_TIME":row[12], }) i = 1 else: eventsdata.append({ "ID": row[0], "APPID": row[1], "EVENTNAME": row[2], "CREATED_DATE": row[3], "CREATED_TIME": row[4], "EVENTS": row[5], "DEVICE_ID": row[6], "TIMESTAMP": row[7], "MTIMESTAMP": row[8], "UPTIMESTAMP": row[9], "IPADDRESS": row[10], "CITY": row[11], "COUNTRY": row[12], }) # print("Connection Establish!") client.close() # Pagenation page = request.GET.get('page', 1) paginator = Paginator(eventsdata, 50) try: contacts = paginator.page(page) except PageNotAnInteger: contacts = paginator.page(1) except EmptyPage: contacts = paginator.page(paginator.num_pages) return render(request,"Pythonignite.html",{"contacts":contacts,"loginUser":loginUser})
def queryresultalter(request): conn = psycopg2.connect(dbname="VTIONData",host= "vtionproddb.chgz4nqwpdta.ap-south-1.rds.amazonaws.com", user="******", password="******") cur = conn.cursor() nodes = [ ('35.154.247.92', 10800), ('35.154.247.92', 10801), ] client = Client() client.connect(nodes) artist_name = '' genre = '' yesterday = "QueryRan" if request.method=='GET': '''squery = request.GET.get('squery')''' starttimestamp = request.GET.get('starttimestamp') endtimestamp = request.GET.get('endtimestamp') print("lol") QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE "TIMESTAMP" > '{}' AND "TIMESTAMP" < '{}' AND EVENTNAME!='_app_crash';'''.format(starttimestamp,endtimestamp) #QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE "TIMESTAMP" > '{}' AND "TIMESTAMP" < '{}' AND EVENTNAME='Register';'''.format(starttimestamp,endtimestamp) print(QUERY) result = client.sql( QUERY, include_field_names=True, ) next(result) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(yesterday+"_IgniteEvents") writer = csv.writer(response) writer.writerow(['appid','deviceid' ,'vtionid','devicemodel','platform','apppackage','keyname','mobileoperator','app','song','album','pstate','program','episode','source','ipaddress','city','station','duration','timestamp','to_char','created_time','artists', 'genre','education','ownership','nccs_code','age','gender','number', 'uninstall']) i = 0 for row in result: nccs = age = gender = number = status = '' if i == 0: print(row) x = row[2] urlSafeEncodedBytes = base64.urlsafe_b64encode(row[3].encode("utf-8")) vtionid = str(urlSafeEncodedBytes, "utf-8") y = json.loads(x) if y.get("segmentation") is None: pass #writer.writerow([row[1],row[3],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),"","","","","","","",row[7],row[8],"","",row[4],row[11],row[12],"","",""]) else: if row[10] == 'Video_Tuned' or row[10] == 'Video_off': song = y.get("segmentation").get("Song") try: if song: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) print(next(details)) for det in details: artist_name = det[0] genre = det[1] print("Testing with second table : ------------------------------------------------------//") if artist_name: pass else: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA_ADD WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) print(next(details)) for det in details: artist_name = det[0] genre = det[1] writer.writerow([row[1],row[3],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),y.get("segmentation").get("Program"),y.get("segmentation").get("Episode"),y.get("segmentation").get("Source"),row[7],row[8],"",y.get("segmentation").get("Duration"),row[4],row[11],row[12], artist_name, genre,"","","","","","",""]) else: pass except Exception as e: raise e elif row[10] == 'Audio_Off': pass else: song = y.get("segmentation").get("Song") education = y.get("segmentation").get("Highest Education") ownership = y.get("segmentation").get("Ownership") if ownership: try: print(ownership) print(education) own = ownership.split(',') print(len(own)) if len(own) >= 9 : num_own = 9 else : num_own = len(own) cur.execute('''SELECT nccs_code FROM public.nccs_flat where education = '{}' and ownership = '{}';'''.format(education,num_own)) nccs = cur.fetchone() print("Found reply from postgres :", nccs[0]) except Exception as e: print("query error") if row[10] == 'Register' or row[10] =='Profile': try: # cur.execute('''SELECT age, gender, number FROM public.installdata where deviceid = '{}';'''.format(row[3])) # data = cur.fetchone() # age = data[0] # gender = data[1] # number = data[2] age = y.get("segmentation").get("age") if age: pass else: age = y.get("segmentation").get("Age") gender = y.get("segmentation").get("Gender") if gender: pass else: gender = y.get("segmentation").get("gender") number = y.get("segmentation").get("Mobile Number") try: cur.execute('''SELECT i_status FROM public.appsflyer where number = '{}';'''.format(number)) status_now = cur.fetchone() status = status_now[0] if status == 'True': status = 'True' else: status = '' print("Status : -------------> ", status) except: pass except Exception as e: print("First case error :",e) try: if song: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) next(details) # print(next(details)) for det in details: artist_name = det[0] genre = det[1] print("lol its here") try: writer.writerow([row[1],row[3],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),"","",y.get("segmentation").get("Source"),row[7],row[8],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[4],row[11],row[12], artist_name, genre, education, ownership, nccs[0], age, gender, number, status]) except Exception as e: print("first case : ",e) writer.writerow([row[1],row[3],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[10],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),"","",y.get("segmentation").get("Source"),row[7],row[8],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[4],row[11],row[12], artist_name, genre, education, ownership,'',age, gender, number,status]) except Exception as e: print(e) pass i = 1 else: print(row) x = row[5] #Encoder urlSafeEncodedBytes = base64.urlsafe_b64encode(row[6].encode("utf-8")) vtionid = str(urlSafeEncodedBytes, "utf-8") #Decoder # decodedBytes = base64.b64decode(urlSafeEncodedStr) # decodedStr = str(decodedBytes, "utf-8") # print("TYPEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEee : ", type(row[6])) try: y = json.loads(x) if y.get("segmentation") is None: writer.writerow([row[1],row[6],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),"","","","","","","",row[10],row[11],"","",row[7],row[3],row[4],"",""]) else: if row[2] == 'Video_Tuned' or row[2] == 'Video_off': song = y.get("segmentation").get("Song") try: if song: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) next(details) # print(next(details)) for det in details: artist_name = det[0] genre = det[1] writer.writerow([row[1],row[6],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),y.get("segmentation").get("Program"),y.get("segmentation").get("Episode"),y.get("segmentation").get("Source"),row[10],row[11],"",y.get("segmentation").get("Duration"),row[7],row[3],row[4], artist_name, genre,"","",""]) else: pass except Exception as e: pass elif row[2] == 'Audio_Off': pass else: song = y.get("segmentation").get("Song") education = y.get("segmentation").get("Highest Education") ownership = y.get("segmentation").get("Ownership") if ownership: try: own = ownership.split(',') if len(own) >= 9 : num_own = 9 else : num_own = len(own) cur.execute('''SELECT nccs_code FROM public.nccs_flat where education = '{}' and ownership = '{}';'''.format(education,num_own)) nccs = cur.fetchone() print("Found reply from postgres :", nccs[0]) except Exception as e: print("query error") if row[2] == 'Register' or row[2] =='Profile': try: # cur.execute('''SELECT age, gender, number FROM public.installdata where deviceid = '{}';'''.format(row[6])) # data = cur.fetchone() # age = data[0] # gender = data[1] # number = data[2] age = y.get("segmentation").get("Age") if age : pass else: age = y.get("segmentation").get("age") gender = y.get("segmentation").get("Gender") if gender: pass else: gender = y.get("segmentation").get("gender") number = y.get("segmentation").get("Mobile Number") try: cur.execute('''SELECT i_status FROM public.appsflyer where number = '{}';'''.format(number)) status_now = cur.fetchone() status = status_now[0] except: pass except Exception as e: print(e) try: if song: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) next(details) # print(next(details)) for det in details: artist_name = det[0] genre = det[1] if artist_name: pass else: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA_ADD WHERE TRACKNAME = '{}';'''.format(song) details = client.sql( QUERY, include_field_names=True, ) print(next(details)) for det in details: artist_name = det[0] genre = det[1] try: writer.writerow([row[1],row[6],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),"","",y.get("segmentation").get("Source"),row[10],row[11],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[7],row[3],row[4], artist_name, genre, education, ownership, nccs[0], age, gender, number, status]) except Exception as e: print("Second case :",e) writer.writerow([row[1],row[6],vtionid,y.get("d"),y.get("p"),y.get("ap"),row[2],y.get("network").get("ope"),y.get("segmentation").get("App"),y.get("segmentation").get("Song"),y.get("segmentation").get("Album"),y.get("segmentation").get("PState"),"","",y.get("segmentation").get("Source"),row[10],row[11],y.get("segmentation").get("Station"),y.get("segmentation").get("Duration"),row[7],row[3],row[4], artist_name, genre, education, ownership, '', age, gender, number, status]) except Exception as e: print("passed without writing main : ",e) pass except Exception as e: pass client.close() return response
schema=OrderedDict([ ('NAME', String), ('LOGIN', String), ('AGE', IntObject), ('GPA', DoubleObject), ]) ): pass student_cache.put( 1, Student(LOGIN='******', NAME='John Doe', AGE=17, GPA=4.25), key_hint=IntObject ) with client.sql(r'SELECT * FROM Student', include_field_names=True) as cursor: print(next(cursor)) # ['SID', 'NAME', 'LOGIN', 'AGE', 'GPA'] print(*cursor) # [1, 'John Doe', 'jdoe', 17, 4.25] # DROP_QUERY = 'DROP TABLE Student' # client.sql(DROP_QUERY) # # pyignite.exceptions.SQLError: class org.apache.ignite.IgniteCheckedException: # Only cache created with CREATE TABLE may be removed with DROP TABLE # [cacheName=SQL_PUBLIC_STUDENT] student_cache.destroy()
class Student(metaclass=GenericObjectMeta, type_name='SQL_PUBLIC_STUDENT_TYPE', schema=OrderedDict([ ('NAME', String), ('LOGIN', String), ('AGE', IntObject), ('GPA', DoubleObject), ])): pass student_cache.put(1, Student(LOGIN='******', NAME='John Doe', AGE=17, GPA=4.25), key_hint=IntObject) result = client.sql(r'SELECT * FROM Student', include_field_names=True) print(next(result)) # ['SID', 'NAME', 'LOGIN', 'AGE', 'GPA'] print(*result) # [1, 'John Doe', 'jdoe', 17, 4.25] # DROP_QUERY = 'DROP TABLE Student' # client.sql(DROP_QUERY) # # pyignite.exceptions.SQLError: class org.apache.ignite.IgniteCheckedException: # Only cache created with CREATE TABLE may be removed with DROP TABLE # [cacheName=SQL_PUBLIC_STUDENT] student_cache.destroy() client.close()
def queryresultalter(): # es = Elasticsearch([{'host': '127.0.0.1', 'port': 9200}]) es = Elasticsearch([{'host': '13.235.1.36', 'port': 9200}]) yesterday = "QueryRan" # print(yesterday) startdate = date.today() - timedelta(days=7) - timedelta(hours=5, minutes=30) startdate = int((time.mktime(startdate.timetuple()))-19800) # print("startdate :{}".format(startdate)) enddate = date.today() enddate = int((time.mktime(enddate.timetuple()))-19800) # print("EndDate :{}".format(enddate)) # Migration job nodes = [ ('35.154.247.92', 10800), ('35.154.247.92', 10801), ] client = Client() client.connect(nodes) QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE "TIMESTAMP" >= '{}' AND "TIMESTAMP" <= '{}' AND EVENTNAME!='_app_crash'; '''.format(startdate,enddate) print(QUERY) result = client.sql( QUERY, include_field_names=True, ) print(next(result)) i = 0 for row in result: if i == 0: str_id = str(row[3]) + str(row[4]) print(str_id) eventdetails = row[2] y = json.loads(eventdetails) doc = { "id" : row[0], "uptimestamp" : row[6], "city" : row[8], "device_id" : row[3], "created_time" : row[12], "timestamp" : row[4], "created_date" : row[11], "country" : row[9], "mtimestamp" : row[5], "ipAddress" : row[7], "app_id" : row[1], "events" : y, "key" : row[10] } else: str_id = str(row[6]) + str(row[7]) print(str_id) eventdetails = row[5] y = json.loads(eventdetails) doc = { "id" : row[0], "uptimestamp" : row[9], "city" : row[11], "device_id" : row[6], "created_time" : row[4], "timestamp" : row[7], "created_date" : row[3], "country" : row[12], "mtimestamp" : row[8], "ipAddress" : row[10], "app_id" : row[1], "events" : y, "key" : row[2] } # save the data to es res = es.index(index="tendays", doc_type='eventdetails',id=str_id, body=doc) print(res['result']) i = i + 1 print(i)
['CHN', 'Uighur', False, Decimal('0.6')], ['CHN', 'Yi', False, Decimal('0.6')], ['CHN', 'Zhuang', False, Decimal('1.4')], ] # establish connection client = Client() client.connect('127.0.0.1', 10800) # create tables for query in [ COUNTRY_CREATE_TABLE_QUERY, CITY_CREATE_TABLE_QUERY, LANGUAGE_CREATE_TABLE_QUERY, ]: client.sql(query) # create indices for query in [CITY_CREATE_INDEX, LANGUAGE_CREATE_INDEX]: client.sql(query) # load data for row in COUNTRY_DATA: client.sql(COUNTRY_INSERT_QUERY, query_args=row) for row in CITY_DATA: client.sql(CITY_INSERT_QUERY, query_args=row) for row in LANGUAGE_DATA: client.sql(LANGUAGE_INSERT_QUERY, query_args=row)
('LOGIN', String), ('AGE', IntObject), ('GPA', DoubleObject), ]) ): pass student_cache.put( 1, Student(LOGIN='******', NAME='John Doe', AGE=17, GPA=4.25), key_hint=IntObject ) result = client.sql( r'SELECT * FROM Student', include_field_names=True ) print(next(result)) # ['SID', 'NAME', 'LOGIN', 'AGE', 'GPA'] print(*result) # [1, 'John Doe', 'jdoe', 17, 4.25] # DROP_QUERY = 'DROP TABLE Student' # client.sql(DROP_QUERY) # # pyignite.exceptions.SQLError: class org.apache.ignite.IgniteCheckedException: # Only cache created with CREATE TABLE may be removed with DROP TABLE # [cacheName=SQL_PUBLIC_STUDENT] student_cache.destroy()
from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) PRODUCT_CREATE_TABLE = ''' create table product( product_id VARCHAR PRIMARY KEY, product_url VARCHAR ) ''' client.sql(PRODUCT_CREATE_TABLE) SIMILARPRODUCTS_CREATE_TABLE = ''' create table similarproducts( product_id VARCHAR , similar_product_id VARCHAR , similariy_score DOUBLE , PRIMARY KEY (product_id, similar_product_id) ) WITH "affinityKey=similar_product_id" ''' client.sql(SIMILARPRODUCTS_CREATE_TABLE) PRODUCT_INSERT_QUERY = ''' insert into product values (? , ?) ''' SIMILARPRODUCTS_INSERT_QUERY = ''' insert into similarproducts values (? , ?, ?) ''' # Code for inserting dummy data # INSERT_PRODUCT_DATA= [] # for i in range(10) :
from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) CITY_CREATE_TABLE_QUERY = '''CREATE TABLE City ( ID INT(11), Name CHAR(35), CountryCode CHAR(3), District CHAR(20), Population INT(11), PRIMARY KEY (ID, CountryCode) ) WITH "affinityKey=CountryCode"''' client.sql(CITY_CREATE_TABLE_QUERY) CITY_CREATE_INDEX = '''CREATE INDEX idx_country_code ON city (CountryCode)''' client.sql(CITY_CREATE_INDEX) CITY_INSERT_QUERY = '''INSERT INTO City( ID, Name, CountryCode, District, Population ) VALUES (?, ?, ?, ?, ?)''' CITY_DATA = [ [3793, 'New York', 'USA', 'New York', 8008278], [3794, 'Los Angeles', 'USA', 'California', 3694820], [3795, 'Chicago', 'USA', 'Illinois', 2896016], [3796, 'Houston', 'USA', 'Texas', 1953631], [3797, 'Philadelphia', 'USA', 'Pennsylvania', 1517550], [3798, 'Phoenix', 'USA', 'Arizona', 1321045],
from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) PRODUCT_CREATE_TABLE = ''' create table product( product_id VARCHAR PRIMARY KEY, product_url VARCHAR ) ''' client.sql(PRODUCT_CREATE_TABLE) PRODUCT_INSERT_QUERY = ''' insert into product values (? , ?) ''' INSERT_DATA = [] for i in range(10): INSERT_DATA.append([str(i), str(i)]) for row in INSERT_DATA: client.sql(PRODUCT_INSERT_QUERY, query_args=row) PRODECT_SELECT_QUERY = ''' select * from product ''' result = client.sql(PRODECT_SELECT_QUERY, include_field_names=True) returnList = [] for i, row in enumerate(result): if i == 0: field_names = row else:
client = Client() client.connect(IGNITE_IP, IGNITE_PORT) PRESENCE_TABLE = ''' CREATE TABLE IF NOT EXISTS presence ( subkey VARCHAR, channel VARCHAR, uuid VARCHAR, metadata BINARY, PRIMARY KEY (subkey, channel, uuid) )''' DROP_PRESENCE_TABLE = ''' DROP TABLE IF EXISTS presence ''' INSERT_PRESENCE_TABLE = ''' INSERT INTO presence( subkey, channel, uuid, metadata ) VALUES (?, ?, ?, ?) ''' args = ["somekey", "somechannel", str(uuid.uuid4()), None] client.sql(DROP_PRESENCE_TABLE) client.sql(PRESENCE_TABLE) client.sql(INSERT_PRESENCE_TABLE, query_args=args) results = client.sql('select * from presence') print([result for result in results])
def artistgenre(): api_token = '905748e88e234954c9849597855d2d57' api_url_base = 'http://api.musixmatch.com/ws/1.1/track.search' headers = {'Content-Type': 'application/json','Authorization': 'Bearer {0}'.format(api_token)} # es = Elasticsearch([{'host': '13.235.1.36', 'port': 9200}]) # nodes = [ # ('127.0.0.1', 10800), # ('127.0.0.1', 10801), # ] nodes = [('35.154.247.92', 10800), ('35.154.247.92', 10801)] val = str(date.today()- timedelta(days=1)).split("-") datelast = val[1]+'/'+val[2]+'/'+val[0] # print(datelast) # datelast = '07/20/2019' # res = es.search(size=2000,index="tendays", body= {"query": { # "bool": { # "must": [ # {"match_phrase": { # "created_date.keyword": datelast # }}, # {"match_phrase": { # "events.key.keyword": "Audio_Tuned" # }} # ] # } # }}) # es = Elasticsearch([{'host': '13.235.1.36', 'port': 9200}]) #datelast = '09/18/2019' es = Elasticsearch([{'host': '13.235.1.36', 'port': 9200}]) body =\ { "query": { "bool": { "must": [ {"match_phrase": { "created_date.keyword": datelast }}, {"match_phrase": { "events.key.keyword": "Audio_Tuned" }} ] } } } res = es.search(index='tendays', body=body, size=10000) print(res) client = Client() client.connect(nodes) i = 0 for item in res["hits"]["hits"]: i = i + 1 try: d = removesmall(item["_source"]["events"]["segmentation"]["Song"]) d = removebig(d) print(d) QUERY = ''' SELECT ID FROM PUBLIC.SONG_DATA Where TRACKNAME = '{}'; '''.format(d) result = client.sql(QUERY) ignite = [] for trackid in result: ignite.append(trackid) if ignite: print("Song already present in 1st table :", d) print("Track ID ===========================:", ignite) pass else: # d = 'Touch the Floor (feat. Masego)' try: QUERY = '''SELECT ID FROM PUBLIC.SONG_DATA_ADD Where VSONG = '{}'; '''.format(d) print("CHECKING of second table query : ", QUERY) result = client.sql(QUERY) print("reached herer..............................") # print(next(result)) except Exception as e: print(e) print("outside the vla") ignite_add = [] for trackid in result: print("############################################") print(trackid[0]) ignite_add.append(trackid[0]) if ignite_add: print("Song 2nd table:", d) print("Track ID ======================= C:", ignite_add) pass else: print("api call") api_url = '{0}?q_track={1}&page_size=1&page=1&s_track_rating=desc&apikey=905748e88e234954c9849597855d2d57'.format(api_url_base,d) response = requests.get(api_url, headers=headers) print(response) if response.status_code == 200: track_data = str(response.content) print(track_data) track_data = track_data[:1] + '"' + track_data[2:] track_data = track_data[:-2] + '"' # print(track_data) track_id = json.loads(response.content.decode('utf-8'))["message"]["body"]["track_list"][0]["track"]["track_id"] track_name = json.loads(response.content.decode('utf-8'))["message"]["body"]["track_list"][0]["track"]["track_name"] track_name = track_name.replace("'","\\'") album_name = json.loads(response.content.decode('utf-8'))["message"]["body"]["track_list"][0]["track"]["album_name"] album_name = album_name.replace("'","\\'") artist_name = json.loads(response.content.decode('utf-8'))["message"]["body"]["track_list"][0]["track"]["artist_name"] artist_name = artist_name.replace("'","\\'") try: genre = json.loads(response.content.decode('utf-8'))["message"]["body"]["track_list"][0]["track"]["primary_genres"]["music_genre_list"][0]["music_genre"]["music_genre_name"] except Exception as e: genre = "-" QUERY = ''' SELECT MAX(ID) FROM PUBLIC.SONG_DATA; ''' re = client.sql( QUERY, include_field_names=True, ) print(next(re)) sqlId = next(re)[0] + 1 print("SQL ID ASSIGNED : ", sqlId) INS_QUERY = """INSERT INTO PUBLIC.SONG_DATA (ID, TRACKID, TRACKDATA, TRACKNAME, ALBUMNAME, ARTISTNAME, GENRE, STATUS) VALUES({sqlId}, {trackid}, '{track_data}', '{track_name}', '{album_name}', '{artist_name}', '{genre}', 0);""".format(sqlId=sqlId,track_name=d,artist_name=artist_name,album_name=album_name,genre=genre,trackid=track_id,track_data=track_data) print(INS_QUERY) try: client.sql(INS_QUERY) except Exception as e: print("Passed due to same data : ", e) print("===========================================================================") QUERY = ''' SELECT MAX(ID) FROM PUBLIC.SONG_DATA_ADD; ''' rre = client.sql( QUERY, include_field_names=True, ) print(next(rre)) try: sql = next(rre)[0] + 1 print("Assigned SQL ID : ",sql) except: sql = 1 print(" =========================> ", sql , type(sql)) INS = """INSERT INTO SONG_DATA_ADD (ID, TRACKID, TRACKDATA, VSONG, TRACKNAME, ALBUMNAME, ARTISTNAME, GENRE, STATUS) VALUES({sqlId}, {trackid}, '{track_data}','{vsong}', '{track_name}', '{album_name}', '{artist_name}', '{genre}', 0);""".format(sqlId=sql,vsong = d,track_name=track_name,artist_name=artist_name,album_name=album_name,genre=genre,trackid=track_id,track_data=track_data) print(INS) try: client.sql(INS) except Exception as e: print("EROOOOOOOR : ",e) pass str_id = meta + str(track_id) print("Yes error is there") print(str_id) doc = { "track_id":track_id, "track_name":track_name, "album_name":album_name, "artist_name":artist_name, "genre":genre } res = es.index(index="songs", doc_type='mixmatch', id=str_id ,body=doc) print(res['result']) else: print("no data") except Exception as e: # raise e pass print(i)
from pyignite import Client client = Client() client.connect('127.0.0.1', 10800) PRODUCT_CREATE_TABLE = ''' create table product( product_id VARCHAR PRIMARY KEY, product_url VARCHAR ) ''' client.sql(PRODUCT_CREATE_TABLE) SIMILARPRODUCTS_CREATE_TABLE = ''' create table similarproducts( product_id VARCHAR , similar_product_id VARCHAR , similariy_score DOUBLE , PRIMARY KEY (product_id, similar_product_id) ) WITH "affinityKey=similar_product_id" ''' client.sql(SIMILARPRODUCTS_CREATE_TABLE) PRODUCT_INSERT_QUERY = ''' insert into product values (? , ?) ''' SIMILARPRODUCTS_INSERT_QUERY = ''' insert into similarproducts values (? , ?, ?) ''' # Code for inserting dummy data INSERT_PRODUCT_DATA= [] for i in range(10000) :
def search_deviceid_ignite(request): value = 1 loginUser = request.user if request.method=='GET': sdeviceid = request.GET.get('sdeviceid') # print(sdeviceid) nodes = [ ('127.0.0.1', 10800), ('127.0.0.1', 10801), ] # nodes = [ # ('35.154.247.92', 10800), # ('35.154.247.92', 10801), # ] client = Client() client.connect(nodes) QUERY = '''SELECT * FROM (SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE DEVICE_ID='{}' AND "TIMESTAMP"> '1575138600' LIMIT 0,500) AS t ORDER BY t.ID desc; '''.format(sdeviceid) result = client.sql( QUERY, include_field_names=True, ) print(next(result)) eventsdata = [] i = 0 for row in result: # print(row) if i == 0: eventsdata.append({ "ID":row[0], "APPID":row[1], "EVENTS":row[2], "DEVICE_ID":row[3], "TIMESTAMP":row[4], "MTIMESTAMP":row[5], "UPTIMESTAMP":row[6], "IPADDRESS":row[7], "CITY":row[8], "COUNTRY":row[9], "EVENTNAME":row[10], "CREATED_DATE":row[11], "CREATED_TIME":row[12], }) i = 1 else: eventsdata.append({ "ID": row[0], "APPID": row[1], "EVENTNAME": row[2], "CREATED_DATE": row[3], "CREATED_TIME": row[4], "EVENTS": row[5], "DEVICE_ID": row[6], "TIMESTAMP": row[7], "MTIMESTAMP": row[8], "UPTIMESTAMP": row[9], "IPADDRESS": row[10], "CITY": row[11], "COUNTRY": row[12], }) # print("Connection Establish!") client.close() return render(request,"Pythonignite.html",{"contacts":eventsdata,"loginUser":loginUser,"value":value,"sdeviceid":sdeviceid})
def export_users_csv(): print("Hello") nodes = [ ('35.154.247.92', 10800), ('35.154.247.92', 10801), ] conn = psycopg2.connect( dbname="VTIONData", host="vtionproddb.chgz4nqwpdta.ap-south-1.rds.amazonaws.com", user="******", password="******") cur = conn.cursor() client = Client() client.connect(nodes) yesterday = date.today() - timedelta(days=1) yesterdayMidnight = int(time.mktime(yesterday.timetuple())) - 19800 print("FROM : ", yesterdayMidnight) todayMidnight = yesterdayMidnight + 86400 print(todayMidnight) yesterdayMidnight = '1579199400' todayMidnight = '1579285800' QUERY = ''' SELECT ID, APPID, EVENTS, DEVICE_ID, "TIMESTAMP", MTIMESTAMP, UPTIMESTAMP, IPADDRESS, CITY, COUNTRY, EVENTNAME, CREATED_DATE, CREATED_TIME FROM PUBLIC.EVENTSDATA WHERE "TIMESTAMP" > '{}' AND "TIMESTAMP" < '{}' ;'''.format( yesterdayMidnight, todayMidnight) result = client.sql( QUERY, include_field_names=True, ) next(result) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) print(BASE_DIR) namespac = BASE_DIR + str("/target/Data_" + yesterday.strftime('%m_%d_%Y') + '.csv') with open(namespac, 'w+') as writeFile: print("START : ", time.localtime(time.time())) writer = csv.writer(writeFile) writer.writerow([ 'appid', 'DeviceId', 'vtionid', 'deviceModel', 'platform', 'apppackage', 'keyname', 'mobileOperator', 'app', 'song', 'album', 'Pstate', 'Program', 'Episode', 'Source', 'ipaddress', 'city', 'station', 'duration', 'timestamp', 'created_date', 'created_time', 'stationName', 'artist_name', 'genre', 'education', 'ownership', 'nccs_code', 'age', 'gender', 'number', 'Uninstall', 'Reward_option' ]) i = 0 for row in result: nccs = age = gender = number = status = '' artist_name = '' genre = '' try: cur.execute( '''SELECT nccs_code FROM public.installdata WHERE deviceid = '{}';''' .format(row[6])) nccs_result = cur.fetchone() nccs_res = nccs_result[0] except Exception as e: nccs_res = '' pass if i == 0: x = row[2] i = 1 else: x = row[5] urlSafeEncodedBytes = base64.urlsafe_b64encode( row[6].encode("utf-8")) vtionid = str(urlSafeEncodedBytes, "utf-8") try: y = json.loads(x) if y.get("segmentation") is None: writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), "", "", "", "", "", "", "", row[10], row[11], "", "", row[7], row[3], row[4] ]) else: if y.get("ap") == "com.video.meter": song = y.get("segmentation").get("Song") app = y.get("segmentation").get("App") education = y.get("segmentation").get( "Highest Education") ownership = y.get("segmentation").get("Ownership") if row[2] == 'FM_Tuned': try: QUERYa = ''' SELECT STATION FROM MASTERTABLEDEMO WHERE CITY = '{}' AND FREQUENCY = '{}'; '''.format( row[11], y.get("segmentation").get("Station")) resulta = client.sql( QUERYa, include_field_names=True, ) next(resulta) stationName = [rowa for rowa in resulta] writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get("PState"), "", "", y.get("segmentation").get("Source"), row[10], row[11], y.get("segmentation").get("Station"), y.get("segmentation").get("Duration"), row[7], row[3], row[4], stationName[0][0], "", "", "", "", nccs_res ]) except Exception as e: writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get("PState"), "", "", y.get("segmentation").get("Source"), row[10], row[11], y.get("segmentation").get("Station"), y.get("segmentation").get("Duration"), row[7], row[3], row[4], "", "", "", "", "", nccs_res ]) elif row[2] == 'Video_Tuned' or row[ 2] == 'Video_off': try: writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get("PState"), y.get("segmentation").get("Program"), y.get("segmentation").get("Episode"), y.get("segmentation").get("Source"), row[10], row[11], "", y.get("segmentation").get("Duration"), row[7], row[3], row[4], "", "", "", "", "", nccs_res ]) except Exception as e: pass elif row[2] == 'Audio_Tuned': try: if song: if song.isdigit(): continue song = song.strip() filter_new = [ 'Download', 'download', 'AUD', 'Advertise', '%' ] status_filter = ( filter_new[0] in song or filter_new[1] in song or filter_new[2] in song or filter_new[3] in song or filter_new[4] in song) if str(status_filter) == 'False': album = y.get("segmentation").get( "Album") if album is None: album_filter = False else: a_filter = [ 'unknown', 'Advertise', 'Sponsored' ] album_filter = ( a_filter[0] in album or a_filter[1] in album or a_filter[2] in album) if str(album_filter) == 'False': QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA WHERE TRACKNAME = '{}';'''.format( song) details = client.sql( QUERY, include_field_names=True, ) next(details) for det in details: artist_name = det[0] genre = det[1] if artist_name: pass else: QUERY = '''SELECT ARTISTNAME, GENRE FROM PUBLIC.SONG_DATA_ADD WHERE TRACKNAME = '{}';'''.format( song) details = client.sql( QUERY, include_field_names= True, ) next(details) for det in details: artist_name = det[0] genre = det[1] if app == 'Amazon Music': pass if '%' in song or song.isdigit( ): continue else: if 'ñ' in song: song = song.replace( 'ñ', 'n') else: pass song = deEmojify( filter( remove_bdata(song). replace( ')', '').replace( '(', ''))) elif app == 'Spotify' or 'Spot' in app: if '%' in song: continue else: song = filter( deEmojify( remove_bdata( song))) elif app == 'Gaana': if 'Gaana' in song or song.isdigit( ) or song == ',': continue else: if 'ñ' in song or '├▒' in song: song = song.replace( 'ñ', 'n').replace( '├▒', 'n') else: pass song = select_before_list( remove_bdata( deEmojify(song)) ).lstrip('Ep 0123456789-.') elif app == 'Google Play Music': aa = song if song.isdigit( ) or 'high quality' in song or song == ',': continue else: song = domain( remove_bdata(song) ).lstrip( ' 0123456789.-' ).strip() if '128k' in song or '256k' in song or 'p3' in song: song = song.replace( '(128k)', '' ).replace( '(256k)', '').replace( '128k', '' ).replace( '256k', '' ).replace( '.mp3', '') song = select_before_list( filter( deEmojify( song))) elif app == 'Hungama': song = filter( remove_bdata( deEmojify(song))) elif app == 'i Music': song = song.replace( '-', ' ').replace( ':', ' ').replace( ' ', ' ').replace( ' ', ' ' ).replace( '.mp3', '') if 'Unknown artist' in song or 'i Music' in song or '//' in song: continue song = deEmojify( select_before_list( filter( domain( remove_bdata( song)) ).lstrip( ' 0123456789.-' ).strip())).strip( ) elif app == 'JioSaavn': song = filter( remove_bdata(song)) elif app == 'Saavn': #combine with jio savan song = filter( domain( remove_bdata( song))) app = 'JioSaavn' elif app == 'MX Player': if '--:--:--' in song or 'Insufficient balance' in song or 'Expiry Date' in song: continue else: song = deEmojify( filter( remove_bdata( song) ).replace( ' ', ' ')).strip() elif app == 'Music Player': song = domain( filter( remove_bdata(song)) ).replace( ' ', ' ').lstrip( ' 0123456789.-' ).strip() elif app == 'Wynk Music': song = remove_bdata( deEmojify(song) ).lstrip(' 0123456789.-' ).strip() if song.isdigit( ) or '%' in song: continue elif '(From' in song: song = song.replace( '(From', '') else: if '")' in song: song = remove_bdata( song.replace( '\")', '')) elif app == 'YouTube Music': song = filter( deEmojify( remove_bdata( song))) else: song = song app = app try: if song: if song.isdigit(): continue song = song.strip() writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network" ).get("ope"), app, song, y.get( "segmentation" ).get("Album"), y.get( "segmentation" ).get("PState"), "", "", y.get( "segmentation" ).get("Source"), row[10], row[11], y.get( "segmentation" ).get("Station"), y.get( "segmentation" ).get("Duration"), row[7], row[3], row[4], "", artist_name, genre, '', '', nccs_res, '', '', '', '' ]) else: pass except Exception as e: pass else: pass else: pass except Exception as e: pass elif row[2] == 'Register' or row[2] == 'Profile': if ownership: try: own = ownership.split(',') if len(own) >= 9: num_own = 9 else: num_own = len(own) cur.execute( '''SELECT nccs_code FROM public.nccs_flat where education = '{}' and ownership = '{}';'''. format(education, num_own)) nccs = cur.fetchone() except Exception as e: pass try: try: cur.execute( '''SELECT payment_option FROM public.payment_option where deviceid = '{}'; '''.format(row[6])) payment_option = cur.fetchone() payment = payment_option[0] except: payment = 'Amazon voucher' pass age = y.get("segmentation").get("age") if age: pass else: age = y.get("segmentation").get("Age") gender = y.get("segmentation").get( "Gender") if gender: pass else: gender = y.get("segmentation").get( "gender") number = y.get("segmentation").get( "Mobile Number") num_status = "+" in number if str(num_status) == 'True': number = '' else: try: cur.execute( '''SELECT i_status FROM public.appsflyer where number = '{}';'''.format( number)) status_now = cur.fetchone() status = status_now[0] if status != 'True': status = '' except: pass try: writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get( "PState"), "", "", y.get("segmentation").get( "Source"), row[10], row[11], y.get("segmentation").get( "Station"), y.get("segmentation").get( "Duration"), row[7], row[3], row[4], "", artist_name, genre, education, ownership, nccs[0], age, gender, number, status, payment ]) except Exception as e: pass writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get( "PState"), "", "", y.get("segmentation").get( "Source"), row[10], row[11], y.get("segmentation").get( "Station"), y.get("segmentation").get( "Duration"), row[7], row[3], row[4], "", artist_name, genre, education, ownership, "", age, gender, number, status, payment ]) except Exception as e: pass else: writer.writerow([ row[1], row[6], vtionid, y.get("d"), y.get("p"), y.get("ap"), row[2], y.get("network").get("ope"), y.get("segmentation").get("App"), y.get("segmentation").get("Song"), y.get("segmentation").get("Album"), y.get("segmentation").get("PState"), "", "", y.get("segmentation").get("Source"), row[10], row[11], y.get("segmentation").get("Station"), y.get("segmentation").get("Duration"), row[7], row[3], row[4], "", artist_name, genre, education, ownership, "" ]) except Exception as e: pass client.close() writeFile.close()
['CHN', 'Uighur', False, Decimal('0.6')], ['CHN', 'Yi', False, Decimal('0.6')], ['CHN', 'Zhuang', False, Decimal('1.4')], ] # establish connection client = Client() with client.connect('127.0.0.1', 10800): # create tables for query in [ COUNTRY_CREATE_TABLE_QUERY, CITY_CREATE_TABLE_QUERY, LANGUAGE_CREATE_TABLE_QUERY, ]: client.sql(query) # create indices for query in [CITY_CREATE_INDEX, LANGUAGE_CREATE_INDEX]: client.sql(query) # load data for row in COUNTRY_DATA: client.sql(COUNTRY_INSERT_QUERY, query_args=row) for row in CITY_DATA: client.sql(CITY_INSERT_QUERY, query_args=row) for row in LANGUAGE_DATA: client.sql(LANGUAGE_INSERT_QUERY, query_args=row)