Example #1
0
def delete():
    print("""
    删除用户相关信息,谨慎操作!!!!
    按q & Q取消操作:
    """)
    while True:
        name = input("""删除你所指定的用户:""").strip()
        if not name:
            print("你输入的为空,请重新输入")
        else:
            sql = """select name from user"""
            try:
                result = connect(sql, 'find')
                userlist = [p[0] for p in result]
            except:
                userlist = []
            if name in userlist:
                sql = """delete from user where name = '{}'""".format(name)
                result = connect(sql, 'delete')
                # po = userlist.index(name)
                # data.pop(po)
                # data = json.dumps(data)
                # with open("mysql.txt", "w") as f:
                #     f.write(data)
                print("删除用户{}成功!!!".format(name))
                logging.warning("\033[31m{}用户已经被删除\033[0m".format(name))
                break
            else:
                print("你所输入的用户不存在,请重新输入")
def product():
    date = time.strftime("%Y-%m-%d",time.localtime())
    conn = mysql.connect()
    cursor = conn.cursor()
    results = get_chid(date)   
    
    url = 'http://hz.tvsou.com/jm/bjxest/data_ajJ9orFapp.asp'#?id=1&date=2014-06-30
    try:    
        for row in results:           
            urls = url+'?id=%s&date=%s'%(row[0],date)
            print 'get url',urls            
            logging.info('get url:%s' % urls)
            datas = parse_xml(urls,row[1])            
            if datas==None:
                logging.error('get data error,get None')
                continue
            logging.info('get data from tvsou OK')
            data_1 = tuple()
            data_2 = tuple()
            adata = []
            ndata = []
            for each_data in datas:                
                #adata.append((row[0],each_data[0],row[1],each_data[1],each_data[2],each_data[3]))
                #ndata.append((each_data[1],row[1])) 
                milisec = get_miliseconds(each_data[2], date)
                message = """insert into live_movie (chid,program_name,catalogid,date,start_time,timelength,gwtime) values(%s,'%s',%s,'%s','%s',%s,%s)""" % (row[0],each_data[0],row[1],each_data[1],each_data[2],each_data[3],milisec)
                conn = mysql.connect()
                cursor = conn.cursor()
                try:
                    cursor.execute(message)
                    conn.commit()                    
                except:
                    logging.error('update db failure')
                    print 'update db failure'
                ms = "update  live_catalog set status=1 where date='%s' and catalogid=%s" % (each_data[1],row[1])
                try:
                    cursor.execute(ms)
                    conn.commit()
                    conn.close()
                except:
                    logging.error('update live_catalog failure')
                    print 'db failure too'
                    pass
            logging.info('update db success')
    except:
        print'no action update movie'
        logging.info('no action update movie,chid=%s' % row[0])
        pass
Example #3
0
 def drop_sql_table(self, table_name):
     """This drops given sql table in the specific db."""
     import mysql-connector-python as mysql
     with mysql.connect(**self.params) as conn:
         with conn.cursor() as cursor:
             cursor.execute("DROP TABLE {}".format(table_name))
         conn.commit()
Example #4
0
def find():
    print("""尊敬的用户,您好!!!欢迎使用查询功能
    请输入您的选择!!!
    q & Q)退出本页面   """)
    # with open("mysql.txt", "r+") as f:
    #     file = f.read()
    #     data = json.loads(file)
    while True:
            name = input(""" 输入查询用户名称,不允许为空:""").strip()
            if name == "q" or name == "Q":
                break
            else:
                #userlist = [p['name'] for p in data]
                sql = """select server,qq from user where name = '{}'""".format(name)
                try:
                    result = connect(sql, 'find')
                    server = result[0][0]
                    qq     = result[0][1]
                except:
                    logging.critical("\033[31m在查询数据库时发生了严重的错误\033[0m")
                    result = None
                if not name:
                    print("用户名为空,请重新输入")
                elif not result:
                    print("该用户不存在,请重新输入")
                else:
                    print("""
                    查询用户成功!!!!
                    用户名 :  {}
                    qq号:    {}
                    server:   {}                    
                    """.format(name,qq,server))
Example #5
0
def get_calogid(date):
    check_date = format_time(date)
    print 'check date',check_date
    conn = mysql.connect()
    cursor = conn.cursor()
    cid_message = """select tvsou.chid,tvsou.md5 from tvs_updatelog as tvsou,live_channel as orl where orl.chid=tvsou.chid and date='%s' order by tvsou.chid""" % check_date
    cata_message = """select catalogid,chid from live_catalog where date='%s'""" % check_date

    cursor.execute(cid_message)
    tvsou_info=cursor.fetchall()
    cursor.execute(cata_message)
    cata_info = cursor.fetchall()    
    stup=[]        
    for tup in flatten(tvsou_info):
        stup.append(tup)   
    sstup=stup[:]    
    del_cid = []
    for each in flatten(stup):
    #if len(stup)>1:
        sstup.remove(each)   
        for n in sstup:      
            if each[1]==n[1]:            
                del_cid.append(each[0])
                break    
    del_cid = list(set(del_cid))
    del_catalogid = []
    for n in del_cid:
        for each in flatten(cata_info):
            if n==each[1]:
                del_catalogid.append(each[0])
                break           
    conn.close()
    return (del_catalogid)
Example #6
0
 def abre_conexao(self):
     try:
         con = mdb.connect('localhost', 'root', 'root', 'bdPython')
         print("Conexão aberta")
         return con
     except Exception as e:
         sys.exit("Não foi possível abrir comunicação com o banco")
Example #7
0
def find():
    print("""尊敬的用户,您好!!!欢迎使用查询功能
    请输入您的选择!!!
    q & Q)退出本页面   """)
    while True:
        name = input(""" 输入查询用户名称,不允许为空:""").strip()
        if name == "q" or name == "Q":
            break
        else:
            sql = """select server,qq from user where name = '{}'""".format(
                name)
            try:
                result = connect(sql, 'find')
                server = result[0][0]
                qq = result[0][1]
            except:
                logging.critical("\033[31m在查询数据库时发生了严重的错误\033[0m")
                result = None
            if not name:
                print("用户名为空,请重新输入")
            elif not result:
                print("该用户不存在,请重新输入")
            else:
                print("""
                    查询用户成功!!!!                 
                    """)
                table = clean([[name, server, qq]])
                print(table)
Example #8
0
    def find(self, text):
        print(text)
        self.list.clear()
        length = len(text)
        it = root
        print(length)
        for index in range(length):
            if it == 'end':
                break
            it = self.find_it(it, text[index])
        print(text)
        if len(self.list) == 0:
            return self.answer

        sql = "SELECT answer FROM root"
        for u in range(0, len(self.list) - 1):
            sql = sql + "_" + str(self.list[u])
        sql = sql + " WHERE q_id = " + str(self.list[len(self.list) - 1]) + ";"
        print(sql)

        conn = mysql.connect()
        if conn is None:
            return self.answer

        cursor = conn.cursor()
        cursor.execute(sql)
        result = cursor.fetchone()

        if result is None:
            return self.answer

        mysql.close(conn)
        return result[0]
def update_by_date():
    date_l = []
    conn = mysql.connect()
    cur = conn.cursor()
    sql = """SELECT count(DISTINCT date),min(date) FROM `live_catalog`; """
    sqls = """select distinct(date) from live_catalog;"""
    cur.execute(sql)    
    results = cur.fetchall()
    cur.execute(sqls)
    resu = cur.fetchall()    
    conn.close()
    print resu
    num = len(resu)    
    count = int(results[0][0])    
    min_d = results[0][1].encode('utf-8')      
    ft = int(min_d.split('-',2).pop(2))
    for i in range(num):        
        date_l.append(resu[i][0].encode('utf-8'))   
    dt = min_d
    while True:
        next_d = find_n_time(dt)
        dt = next_d       
        if next_d > time.strftime("%Y-%m-%d",time.localtime()):
            print 'out ot time',next_d
            break
        if next_d in date_l:               
                continue
        else:            
            print 'do something',next_d            
            print '------------------update-------------------'
            print '------------------%s-------------------' % next_d
            print '-------------------------------------------'
            update_catalog(next_d)
Example #10
0
def step_impl(context, url, username, password, database):
    """
    :type context behave.runner.Context
    """
    print("\n **** Scenario 1 **** \n\n")

    context.conn = mysql.connect(url, username, password, database)
Example #11
0
def connect_db():
    """Connects to the specific database."""
    """conn = mysql.connector"""
    rv = mysql.connect(user='******', passwd='123456', db='mt_cust', charset='utf-8')
    # rv = mysql.connect(app.config['DATABASE'])
    rv.row_factory = rv.Row
    return rv
Example #12
0
    def login(self, *args):
        from kivy.storage.jsonstore import JsonStore
        import mysql
        connection = mysql.connect('127.0.0.1',
                                   '8080',
                                   password='******',
                                   db='accounts',
                                   username='******')
        con = connection.execute()
        store = JsonStore('account.json')
        if str(self.n_username.text) and str(self.n_email.text) and str(
                self.n_phone) and str(self.n_password):
            store.put('credentilas',
                      user=self.n_username.text,
                      email=self.n_email.text,
                      phone=self.n_phone.text,
                      password=self.n_password.text)
            if True:
                co = VideoPlayer(source='video/video2.avi',
                                 state='play',
                                 volume=1)
                q = Popup(title='details',
                          title_align='center',
                          content=co,
                          size_hint=(1, 1))
                q.open()
                Clock.schedule_once(q.dismiss, 40)

        else:
            co = Label(text='retry')
            e = Popup(title='error will uploading check your details',
                      title_color=[1, 0, 1, 1],
                      content=co,
                      size_hint=(.2, .2)).open()
            Clock.schedule_once(e.dismiss, .90)
Example #13
0
def update_list():
    
    conn = mysql.connect()
    cursor = conn.cursor()   
    cursor.execute("SET NAMES utf8")
    conn.commit()   
    cursor.execute("insert into live_server values(0,'http://zhibo.tv-cloud.cn')")
    sorts = [(1,u"央视".encode('utf-8')),(2,u"卫视".encode('utf-8')),(3,u"地方".encode('utf-8'))]
    status = [(0,u"发布".encode('utf-8')),(1,u"未发布".encode('utf-8')),(2,u"删除".encode('utf-8'))]
    
    for i in range(3):
        sqli = """insert into live_sort values(%s,'%s')""" % sorts[i] 
        try:
            cursor.execute(sqli)
            conn.commit()    
        except:
            pass
    
    for n in range(3):
        sqln = """insert into live_status values(%s,'%s')""" % status[n]
        try:
            print sqln
            cursor.execute(sqln)
            conn.commit()
        except:
            pass        
    conn.close()
    
#if __name__=='__main__':
    #create_tables()
Example #14
0
def get_programInfo_from_store(catalogid):
    conn = mysql.connect()
    cursor = conn.cursor()
    message = """ select chid,catalogid,programid,program_name,start_time,timelength,gwtime from live_movie where catalogid=%s order by programid""" % catalogid
    cursor.execute(message)    
    response = cursor.fetchall()
    conn.close()
    return(response)
Example #15
0
 def fecha_conexao(self):
     try:
         con = mdb.connect('localhost', 'root', 'root', 'bdPython')
         con.close()
         print("Conexão encerrada")
     except Exception as e:
         sys.exit(
             "Não foi possível encerrar comunicação com o banco de dados")
def load_data_from_mysqlDB(NOW, step, delay):

    # Connecting to mySQL DB and reading the data
    db = MySQLdb.connect(host="your mysql DB host",
                         user="******",
                         passwd="your mysql DB pw",
                         db="cyour mysql DB name")
    cur = db.cursor()
    Now, ED_flag = Check_End_of_Day(NOW, cur)
    Start = Now.strftime('%Y-%m-%d %H:%M:%S')
    Stop = (Now + step).strftime('%Y-%m-%d %H:%M:%S')
    # Force the machine to sleep until the temporal data is stored in mysql DB
    time.sleep(delay)
    # For Testing: Dummy time_stamps for a testing
    # Start,Stop='2015-10-08 07:30:50','2015-10-09 21:46:33'
    print(Start, Stop)
    # Select temporal data from DB
    query1 = 'SELECT * FROM citypulse1.AarhusTweet WHERE time BETWEEN ' + "'" + Start + "'" + ' AND ' + "'" + Stop + "'"

    cur.execute(query1)
    result_set1 = cur.fetchall()
    if not result_set1:
        noData_flag = True
        return ([], [], noData_flag, Now, ED_flag)
    else:
        noData_flag = False
        twitterId,temporal_tweet,Ttemporal_tweet,time_stamp,lat,long=[],[],[],[],[],[]
        for row in result_set1:
            # print "%s, %s, %s, %s, %s" % (row[0], row[2].replace('\n', ' ').replace('\r', ''), row[3], row[4], row[5])
            twitterId.append(row[0])
            temporal_tweet.append(row[2].replace('\n', ' ').replace('\r', ''))
            time_stamp.append(row[3])
            long.append(row[4])
            lat.append(row[5])
        Ttemporal_tweet = list(gs.translate(temporal_tweet, 'en'))
        # For Testing:
        # Ttemporal_tweet=temporal_tweet
        #Translated Tweet
        trans_df = pd.DataFrame({
            'twitterId': twitterId,
            'time': time_stamp,
            'text': Ttemporal_tweet,  #.str.strip('"')
            'lat': lat,  #.str.strip('"')
            'long': long
        })  #.str.strip('"')
        # Danish Tweet
        raw_df = pd.DataFrame({
            'twitterId': twitterId,
            'time': time_stamp,
            'text': temporal_tweet,  #.str.strip('"')
            'lat': lat,  #.str.strip('"')
            'long': long
        })  #.str.strip('"')
    # Close the mysql cursor object
    cur.close()
    # Close the mysql DB connection
    db.close()
    return (trans_df, raw_df, noData_flag, Now, ED_flag)
Example #17
0
def button_sql():
    name = name_box.get()
    mail = mail_box.get()
    pw = pass_box.get()

    sql = connect()
    cnct, cur, table, status = sql.setting()
    req = sql.insert(name, mail, pw, cnct)
    output.insert(tk.END, req)
Example #18
0
def alldata(ret):
    name = ret.group(1)
    con = mysql.connect(host='localhost', port=3306, user='******', password='******', database='stock_db',charset='utf8')
    cs = con.cursor()
    cs.execute("select i.code,i.short from info as i inner join focus as f\
on i.id=f.info_id;")
    stock_infos = cs.fetchall()
    cs.close()
    con.close()
    return "add (%s) ok" %name
Example #19
0
def del_info(ret):
    stock_code = ret.group(1)
    con = mysql.connect(host='localhost', port=3306, user='******', password='******', database='stock_db',charset='utf8')
    cs = con.cursor()
    sql = """select * from info where code=%s;"""
    cs.execute(sql, (stock_code,))
    if not cs.fetchone():        
        cs.close()
        con.close()
        return “wrong info”
Example #20
0
 def submit():
     conn = mysql.connect(user='******', password='******')
     cursor = conn.cursor()
     cursor.execute("use ganesh")
     cursor.execute(
         'CREATE TABLE IF NOT EXISTS UserInfo(name varchar(200), age int(20))'
     )
     cursor.execute('INSERT INTO UserInfo VALUES(%s,%s)',
                    (entry1.get(), entry2.get()))
     conn.commit()
Example #21
0
def load_data_from_mysqlDB(NOW,step,delay):
    
    # Connecting to mySQL DB and reading the data
    db = MySQLdb.connect(host="your mysql DB host", 
                         user="******", 
                         passwd="your mysql DB pw", 
                         db="cyour mysql DB name") 
    cur = db.cursor()
    Now,ED_flag=Check_End_of_Day(NOW,cur)
    Start=Now.strftime('%Y-%m-%d %H:%M:%S')
    Stop = (Now+step).strftime('%Y-%m-%d %H:%M:%S')
    # Force the machine to sleep until the temporal data is stored in mysql DB
    time.sleep(delay)
    # For Testing: Dummy time_stamps for a testing
    # Start,Stop='2015-10-08 07:30:50','2015-10-09 21:46:33'
    print(Start,Stop)
    # Select temporal data from DB
    query1='SELECT * FROM citypulse1.AarhusTweet WHERE time BETWEEN '+ "'"+ Start+ "'"+ ' AND '+"'"+ Stop+"'"

    cur.execute(query1)
    result_set1 = cur.fetchall()
    if not result_set1:
        noData_flag=True
        return([],[],noData_flag,Now,ED_flag)
    else:
        noData_flag=False
        twitterId,temporal_tweet,Ttemporal_tweet,time_stamp,lat,long=[],[],[],[],[],[]
        for row in  result_set1:
            # print "%s, %s, %s, %s, %s" % (row[0], row[2].replace('\n', ' ').replace('\r', ''), row[3], row[4], row[5])
            twitterId.append(row[0])
            temporal_tweet.append(row[2].replace('\n', ' ').replace('\r', ''))
            time_stamp.append(row[3])
            long.append(row[4])
            lat.append(row[5])
        Ttemporal_tweet=list(gs.translate(temporal_tweet,'en'))
        # For Testing:
        # Ttemporal_tweet=temporal_tweet
        #Translated Tweet
        trans_df = pd.DataFrame({ 'twitterId' : twitterId,
                           'time' : time_stamp,
                           'text' : Ttemporal_tweet,#.str.strip('"')
                           'lat' : lat,#.str.strip('"')
                           'long' : long})#.str.strip('"')
        # Danish Tweet
        raw_df = pd.DataFrame({ 'twitterId' : twitterId,
                           'time' : time_stamp,
                           'text' : temporal_tweet,#.str.strip('"')
                           'lat' : lat,#.str.strip('"')
                           'long' : long})#.str.strip('"')
    # Close the mysql cursor object
    cur.close ()
    # Close the mysql DB connection
    db.close ()
    return(trans_df,raw_df,noData_flag,Now,ED_flag)
Example #22
0
def get_chid(date):   
    message = """select chid,catalogid from live_catalog where date='%s' and (status=0 or status=2) order by chid """ % date    
   
    #select channelid for order_list
    conn = mysql.connect()
    cursor = conn.cursor()
    cursor.execute(message)
    results = cursor.fetchall()
    print 'cids,',results
    conn.close()
    return (results)
Example #23
0
 def submit2():
     conn = mysql.connect(user='******', password='******')
     cursor = conn.cursor()
     cursor.execute("use ganesh")
     cursor.execute(
         'CREATE TABLE IF NOT EXISTS UserMarks(10th int(20), 12th int(20), Graduation int(20), PG int(20), OtherExam varchar(200), OtherMarks int(20) )'
     )
     cursor.execute('INSERT INTO UserMarks VALUES(%s,%s,%s,%s,%s,%s)',
                    (entry3.get(), entry4.get(), entry5.get(), entry6.get(),
                     entry7.get(), entry8.get()))
     conn.commit()
Example #24
0
 def try_connection(self):
     """
     Test if db specific connection module is installed and if you can
     connect to db.
     """
     try:
         import mysql-connector-python as mysql
         self.is_module = True
         with mysql.connect(**self.params):
             self.is_active = True
     except:
         pass
Example #25
0
def main():
    ip = sys.argv[1]
    try:
        db = mariadb.connect(host=ip, user=USER, password=PASS)
    except mariadb.Error as e:
        done(STATUS_NOT_OK, str(e), e)

    try:
        check(db)
    finally:
        db.close()

    done(STATUS_OK)
Example #26
0
def processing_set():
    try:
        con = mysql.connect('localhost', 'root', '######')
        cur = con.cursor()
        
        cur.execute("Select query from query_box")
        data = cur.fetchall()
        cur.execute("Delete * from query_box")
        con.commit()
        return data
    except:
        "RIP connection failed"
    finally:
        con.close()
Example #27
0
def get_channelInfo_from_store():
    m1 = """SELECT live_channel.chid,live_channel.sort_id,tvs_channel.chname,"""
    #m2 = """live_logo.ip,live_logo.path,live_url.live_ip,live_url.live_path from live_channel,tvs_channel,"""
    #m3 = """live_url,live_logo where live_channel.chid=tvs_channel.chid and live_channel.chid=live_logo.chid and live_channel.chid = live_url.chid order by live_channel.chid;"""  
    m2 = """live_server.live_ip,live_url.live_path from live_channel,tvs_channel,"""
    m3 = """live_url,live_server where live_channel.chid=tvs_channel.chid and """
    m4 = """live_channel.chid = live_url.chid and live_url.serverid=live_server.serverid and live_channel.live_urlid=live_url.live_urlid order by live_channel.chid;"""
    message = m1+m2+m3+m4
    print message
    conn = mysql.connect()
    cursor = conn.cursor()
    cursor.execute(message)
    response = cursor.fetchall()
    conn.close()
    return(response)
Example #28
0
    def create_sql_table(self, data, sql_column_types=None,
                         sql_column_names=None, table_name=None):
        """This creates a new sql table in the specific db."""
        data = list(data)

        if table_name is None:
            table_name = ''.join(random.choices(string.ascii_lowercase, k=16))

        if sql_column_types is None:
            column_size = self._get_column_types(data)
            sql_column_types = [
                'float' if size == 0 else 'varchar({})'.format(size)
                for size in column_size
            ]

        if sql_column_names is None:
            sql_column_names = ["col{}".format(i)
                                for i in range(len(sql_column_types))]
        else:
            sql_column_names = map(lambda x: '"{}"'.format(x), sql_column_names)

        drop_table_sql = "DROP TABLE IF EXISTS {}".format(table_name)

        create_table_sql = "CREATE TABLE {} ({})".format(
            table_name,
            ", ".join('{} {}'.format(n, t)
                      for n, t in zip(sql_column_names, sql_column_types)))

        insert_values = ", ".join(
            "({})".format(
                ", ".join("NULL" if v is None else "'{}'".format(v)
                          for v, t in zip(row, sql_column_types))
            ) for row in data
        )

        insert_sql = "INSERT INTO {} VALUES {}".format(table_name,
                                                       insert_values)

        import mysql-connector-python as mysql
        with mysql.connect(**self.params) as conn:
            with conn.cursor() as cursor:
                cursor.execute(drop_table_sql)
                cursor.execute(create_table_sql)
                if insert_values:
                    cursor.execute(insert_sql)
            conn.commit()

        return self.params, table_name
Example #29
0
def update():
    now = time.strftime("%Y-%m-%d",time.localtime())
    deleting_catalogid = get_calogid(now)
    conn = mysql.connect()
    cursor = conn.cursor()
    if deleting_catalogid:
        print 'update catalogid set status=0'
        for each_id in deleting_catalogid:
            message = """update live_catalog set status=0 where catalogid=%s """ % (each_id)        
            cursor.execute(message)
            conn.commit()
            ms = """delete from live_movie where  catalogid=%s""" % each_id
            cursor.execute(ms)
            conn.commit()        
            
    conn.close()
Example #30
0
def run():
    """
        Method to run the entire pipeline.
    """
    
    CONFIG_PATH = "config/config.cfg"

    config=ConfigParser()
    config.read(CONFIG_PATH)
    host=config.get(DATABASE, HOST)
    user=config.get(DATABASE, USER)
    password=config.get(DATABASE, PASSWORD)
    db=config.get(DATABASE, DB)
    charset=config.get(DATABASE,CHARSET)
    use_unicode=config.get(DATABASE, USE_UNICODE)
    consumer_key=config.get(TWITTER, CONSUMER_KEY)
    consumer_secret=config.get(TWITTER, CONSUMER_SECRET)
    access_token=config.get(TWITTER, ACCESS_TOKEN)
    access_token_secret=config.get(TWITTER, ACCESS_TOKEN_SECRET)
    log_error_file=config.get(LOG, ERROR_FILE)

    auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
    auth.set_access_token(access_token, access_token_secret)

    conn = mysql.connect(host, user, password, db, charset, use_unicode)
    cursor=conn.cursor()

    f = open(log_error_file,'a')

    flag = True
    while flag:
        try:
            sql = "SELECT max(id) FROM tweet"
            result = mysql.read(sql, cursor)
            if result[0][0]:
                since_id = int(result[0][0])
            else:
                since_id = -1
            print since_id
            for payload in process_pipeline(get_tweets(auth = auth, screen_name = "Calvinn_Hobbes", since_id = since_id), [parse_tweet]):
                insert_tweet(payload, cursor, conn, f)
            flag = False
        except tweepy.error.TweepError as e:
            error_message = "ERROR \n" + str(e) + "\nsleeping for 15 minutes\n\n\n"
            f.write(error_message)
            sleep(15*60)
    f.close()
def update_catalog(date=''):
    cid = ''
    if date=='':
        date = time.strftime("%Y-%m-%d",time.localtime())
    ms = """select live_channel.chid from live_channel,live_status where live_channel.statusid=live_status.statusid and live_status.sname='%s'""" % u'发布'.encode('utf-8')
    print ms
    conn = mysql.connect()
    cursor = conn.cursor()
    cursor.execute(ms)
    dic_cid = cursor.fetchall()
    cids = []
    #print(dic_cid)
    for each in dic_cid:
        message = """insert into live_catalog (chid,date,status) values (%s,'%s',%s) """ % (each[0],date,0)
        ma = """select date from live_catalog where status = 0 and chid=%s""" % each[0]
        ms = """select date from live_catalog where chid=%s and date='%s' """ % (each[0],date)
        cursor.execute(ma)
        gets = cursor.fetchall()
        print ms
        cursor.execute(ms)
        result = cursor.fetchall()
        print 'select date',result        
        sl = []
        if gets:            
            for each in gets:
                sl.append(each[0])
                    
            try:
                if sl.index(date):                   
                    break
            except ValueError:
                logging.info('execute insert,chid=%s' % each[0])
                cursor.execute(message)
                conn.commit()
        elif result:
            print 'continue'
            continue
        else:
            print 'execute insert'
            logging.info('execute insert,chid=%s' % each[0])
            cursor.execute(message)
            conn.commit()
    conn.close()
    return 
Example #32
0
def download_report():
    conn = None
    cursor = None
    try:
        conn = mysql.connect()
        cursor = conn.cursor(pymysql.cursors.DictCursor)

        cursor.execute("SELECT * FROM id")
        result = cursor.fetchall()

        #output in bytes
        output = io.BytesIO()
        #create WorkBook object
        workbook = xlwt.Workbook()
        #add a sheet
        sh = workbook.add_sheet('Data Report')

        #add headers
        sh.write(0, 0, 'ID')
        sh.write(0, 1, 'Name')
        sh.write(0, 2, 'Mobile_Number')

        idx = 0
        for row in result:
            sh.write(idx + 1, 0, str(row['ID']))
            sh.write(idx + 1, 1, row['Name'])
            sh.write(idx + 1, 2, row['Mobile_Number'])
            idx += 1

        workbook.save(output)
        output.seek(0)
        cursor.execute("TRUNCATE id;")  # Truncate id table
        return Response(output,
                        mimetype="application/ms-excel",
                        headers={
                            "Content-Disposition":
                            "attachment;filename=ID_report.xls"
                        })

    except Exception as e:
        print(e)
    finally:
        cursor.close()
        conn.close()
Example #33
0
def get_catalogid_from_store(cid=0,date=0):
    conn = mysql.connect()
    cur = conn.cursor()
    #print cid,date
    if cid == 0:
        if len(date) != 1:           
            message = """select catalogid from live_catalog where date = '%s'""" % (date,)            
        else:
            message = """select catalogid from live_catalog """
    elif cid != 0:
        if len(date) != 1:
            message = """select catalogid from live_catalog where date = '%s' and chid = %s """ % (date,cid)

        else:
            message = """select catalogid from live_catalog where chid = %s """ % (cid,)
    try:
        cur.execute(message)
        result = cur.fetchall()
        conn.close()
    except:        
        return None        
    return(result)
Example #34
0
    def __init__(self):
        try:
            self.db = mysql.connect(host="localhost",
                                    user="******",
                                    passwd="root")
        except:
            print "ERROR"

        cursor = self.db.cursor()
        try:
            cursor.execute("create database IF NOT EXISTS spider")
        except:
            print "db already exist"

        cursor.execute('use spider')
        try:
            cursor.execute(
                'create table IF NOT EXISTS victime (id INT(6) PRIMARY KEY AUTO_INCREMENT, email varchar(255) NOT NULL, UNIQUE (email))'
            )
        except:
            print "table already exist"

        self.db.commit()
Example #35
0
def _test():
    import nose.tools as nt
    import mysql
    import insert

    db = mysql.connect(database="wikicat",
                       user="******", host="localhost")

    nt.ok_(db)

    # point all the models at this database
    database_proxy.initialize(db)
    use_confirmations(False)

    # create the tables
    create_tables(drop_if_exists=True)

    # some example data
    dataset = [
        {'category': u'Category:Futurama', 'label': u'Futurama'},
        {'category': u'Category:World_War_II', 'label': u'World War II'},
        {'category': u'Category:Programming_languages', 'label': u'Programming languages'},
        {'category': u'Category:Professional_wrestling', 'label': u'Professional wrestling'},
        {'category': u'Category:Algebra', 'label': u'Algebra'},
        {'category': u'Category:Anime', 'label': u'Anime'},
        {'category': u'Category:Abstract_algebra', 'label': u'Abstract algebra'},
        {'category': u'Category:Mathematics', 'label': u'Mathematics'},
        {'category': u'Category:Linear_algebra', 'label': u'Linear algebra'},
        {'category': u'Category:Calculus', 'label': u'Calculus'},
        {'category': u'Category:Monarchs', 'label': u'Monarchs'},
        {'category': u'Category:British_monarchs', 'label': u'British monarchs'},
    ]

    datasetVersion = dataset_version(version='3.9', language='en', date='2013-04-03')
    imported = insert.insert_dataset(data=dataset, dataset='category_labels', version_instance=datasetVersion)

    nt.assert_equal(len(dataset), imported)
Example #36
0
# -*- coding: utf-8 -*-
"""
Spyder Editor

This is a temporary script file.
"""

#!/usr/bin/python

import mysql
import random


# Open database connection
db = mysql.connect("localhost","root","r34lw4r3","infradb")

# prepare a cursor object using cursor() method
cursor = db.cursor()

# Prepare SQL query to INSERT a record into the database.
sql = "SELECT * FROM utenti"
try:
   # Execute the SQL command
   cursor.execute(sql)
   # Fetch all the rows in a list of lists.
   results = cursor.fetchall()
   for row in results:
      id = row[0]
      nome = row[1]
      cognome = row[2]
      ip = row[3]
Example #37
0
# -*- coding: utf-8 -*-
"""
Spyder Editor

This is a temporary script file.
"""

#!/usr/bin/python

import mysql
import random

# Open database connection
db = mysql.connect("localhost", "root", "r34lw4r3", "infradb")

# prepare a cursor object using cursor() method
cursor = db.cursor()

# Prepare SQL query to INSERT a record into the database.
sql = "SELECT * FROM utenti"
try:
    # Execute the SQL command
    cursor.execute(sql)
    # Fetch all the rows in a list of lists.
    results = cursor.fetchall()
    for row in results:
        id = row[0]
        nome = row[1]
        cognome = row[2]
        ip = row[3]
        ip_vpn = row[4]
Example #38
0
def sqlQuery(sql=""):
    db = mysql.connect()
    mysql.query(db, sql)
    mysql.close(db)
Example #39
0
__author__ = 'deddo_000'
import mysql



connection = mysql.connect("127.0.0.1","ThisPassword")
connection.selectdb("Groceries")


FruitsArray = mysql.Query("FROM GreenGrocers Select Fruits")
FruitsArray.Execute()
Example #40
0
def get_file(file_url):
	file_path_local = file_url.split(".com")[1]
	file_path_local = file_path_local[1:]
	file_path = config.TALK_ENGLISH_PATH + "/" + file_path_local
	arr = file_path.split("/")
	file_name = arr[len(arr) - 1]
	directory = file_path.replace("/" + file_name, "")

	if not os.path.exists(directory):
		os.makedirs(directory)
	
	if not os.path.exists(file_path):
		urllib.urlretrieve (file_url, file_path)
		
	return file_path_local
		
def run():

	lessons = common.getLesson()
	
	for lesson in lessons:
		print(lesson[2])
		get_words(lesson[0], lesson[2])
	
#############
mysql.connect()

run()

mysql.disconnect()
Example #41
0
import xlsxwriter

excel_grafico = xlsxwriter.Workbook('excel_graficas.xlsx')
worksheet = excel_grafico.add_worksheet()

data = num_artistas.values
worksheet.write_column('A1', data)

chart = excel_grafico.add_chart({'type': 'line'})

chart.add_series({'values': '=Sheet1!$A$1:$A$6'})

worksheet.insert_chart('C1', chart)

excel_grafico.close()

######## Exportar a base de datos

with sqlite3.connect('bbd_artist.bd') as conexion:
    df4.to_sql('py_artistas', conexion)

## Exportar a mysql

import mysql.connector
with mysql.connect('mysql://*****:*****@localhost:32771/test') as conexion:
    df4.to_sql('tabla_mysql', conexion)

### JSON #######

df2.to_json('artista.json', orient='table')
Example #42
0
import _mysql
import mysql
import sys
import argparse

parser = argparse.ArgumentParser(description='')
mysql.add_args(parser)
args = parser.parse_args()
con = None

try:
    con = mysql.connect(args)

    print 'Summarising data for simulations in {}'.format(args.db)

    print "Creating views..."
    # Create mysql views
    con.query("""
		CREATE OR REPLACE VIEW `allocationRatios` AS 
		select `playerScore`.`simID` AS `ID`,
		`playerScore`.`player` AS `player`,
		`playerScore`.`round` AS `round`,
		`playerScore`.`cluster` AS `cluster`,
		least((`playerScore`.`r` / `playerScore`.`d`),1) AS `ratio` 
		from `playerScore` where (`playerScore`.`d` > 0);
	""")
    con.query("""
		CREATE OR REPLACE VIEW `aggregatedSimulations` AS 
		select `simulationSummary`.`Name` AS `strategy`,
		`simulationSummary`.`cluster` AS `cluster`,
		avg(`simulationSummary`.`ut. C`) AS `ut. C`,
Example #43
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json

# Importando arquivos
import lex
import ocr
import frequency
import mysql
import archive

documents = lex.tokenize(archive.get_text("exemplo1.docx", "docs/docx/"))
connection = mysql.connect()
mysql.saveTokens(connection, 'documents.txt',
                 archive.get_text("exemplo1.docx", "docs/docx/"),
                 json.dumps({"exe01.txt": documents}))
print(documents)

# archive = open("documents.txt", "w", encoding='utf-8')
# archive.write(json.dumps({"exe01.txt":documents}, ensure_ascii=False))
# archive.close()

# archive = open("documents.txt", "r", encoding='utf-8')
# text = json.loads(archive.read())
# archive.close()

# print('\n\ntext',text['exe01.txt'])

# connection = mysql.connect()
# mysql.saveTokens(connection, 'documents.txt', json.dumps({"exe01.txt":documents}))
# tokens = mysql.getTokens(connection, 'documents.txt')
Example #44
0
import mysql
import time
date = time.strftime("%Y-%m-%d",time.localtime())

cid_message = """select tvsou.channelid,tvsou.md5 from tvsou_update_list tvsou inner join order_list orl on orl.channelid=tvsou.channelid and date='%s' order by tvsou.channelid""" % date
cata_message = """select catalogid,channelid from catalog_list where date='%s'""" % date


conn = mysql.connect()
cursor = conn.cursor()
cursor.execute(cid_message)
tvsou_info = cursor.fetchall()
cursor.execute(cata_message)
cata_info = cursor.fetchall()
#print tvsou_info
#print cata_info
def flatten(nested):
    for each in nested:
        yield each
        
stup=[]        
for tup in flatten(tvsou_info):
    stup.append(tup)
#print stup   
sstup=stup
del_cid = []
for each in flatten(sstup):
    #if len(stup)>1:
    stup.remove(each)   
    for n in stup:      
        if each[1]==n[1]:            
Example #45
0
    sql = "SELECT sm_ch_name from xml_submodel where sm_name = '%s'"% (englishname)
    cursor.execute(sql)
    return cursor.fetchone()

def editRepeat(parentNode,name,i):
    if parentNode.getAttribute("name") == name:
        if i not in editlist:
            editlist.append(i)
        isEdit = True
        parentNode.setAttribute("name",name+" ")
    if parentNode.tagName != "mainpart":
        editRepeat(parentNode.parentNode,name,i)

print("start")
# 打开数据库连接
db = mysql.connect("localhost", "root", "mysql", "vesal",charset='utf8')

# 使用 cursor() 方法创建一个游标对象 cursor
cursor = db.cursor()
# 使用 execute()  方法执行 SQL 查询
cursor.execute("SELECT VERSION()")
# 使用 fetchone() 方法获取单条数据.
data = cursor.fetchone()
print("Database version : %s " % data)

def EditRepeatXml(i):
    doc = minidom.parse(i)
    root = doc.documentElement
    models = root.getElementsByTagName("model")
    for model in models:
        #print(model.getAttribute("name"))
Example #46
0
def connect_db():
    return mysql.connect()
Example #47
0
def example_using_mysql():
	import mysql
	connection = mysql.connect('my.databaseserver.com', username='', password='')
	res = connection.execute('select * from blah')
	for row in res:
		do_something(row) # define do_something elsewhere
Example #48
0
def main():
    args = docopt("""
    Usage:
      {f} [options] FILES...

    Options:
      --database DB  mysql database[default: ratedb]
      --limit LIMIT  [default: 1000]
    """.format(f=sys.argv[0]))
    files = args['FILES']
    db = args['--database']
    limit = int(args['--limit'])

    conn = mysql.connect(
        user='******', passwd='root')  # , cursorclass=mysql.cursors.SSCursor)

    for f_i, f_name in enumerate(files):
        sqlite_conn = sqlite3.connect(f_name)
        r = sqlite_conn.execute(
            'SELECT name FROM sqlite_master WHERE type="table"')
        tables = list(sorted(set([x[0].lower() for x in r.fetchall()])))

        for t_i, table in enumerate(sorted(tables)):

            with conn as c:
                c.execute('CREATE DATABASE IF NOT EXISTS {}'.format(db))
                c.execute('USE {}'.format(db))
                c.execute("""CREATE TABLE IF NOT EXISTS {}(
                    time DATETIME(6) PRIMARY KEY,
                    bid FLOAT,
                    ask FLOAT
                    )""".format(table))

            def gen_data(table):
                dt_from = datetime.min
                while True:
                    r = sqlite_conn.execute(
                        'SELECT time, bid, ask FROM {} WHERE ? < time ORDER BY time ASC LIMIT {}'
                        .format(table, limit), (dt_from, ))
                    results = r.fetchall()
                    if not results:
                        break
                    yield results
                    dt_from = results[-1][0]

            with conn as c:
                c.execute('SET sql_log_bin=OFF')
                i = 0
                for results in gen_data(table):
                    N = len(results)

                    def dt_convert(x):
                        dt = timeutil.to_datetime(x[0])
                        if not dt.tzinfo:
                            dt = dt.replace(tzinfo=timeutil.TOKYO)
                        return (dt, ) + x[1:]

                    flatten_results = [
                        flatten for inner in results
                        for flatten in dt_convert(inner)
                    ]
                    c.execute(
                        'INSERT IGNORE INTO {} VALUES {}'.format(
                            table, ','.join(['(%s,%s,%s)'] * N)),
                        flatten_results)
                    i += N
                    print(
                        '# {}/{} {}/{} #{}'.format(f_i + 1,
                                                   len(files), t_i + 1,
                                                   len(tables), i), f_name,
                        table, flatten_results[-3:])
                    conn.commit()
Example #49
0
def create_tables():
    
    #-------------------------#
    """create table for tvsou"""
    #-------------------------#
    tvs_channel = """create table tvs_channel (chid int(11) not null auto_increment,tvid int(11),chname varchar(25),status int(11),primary key(chid))"""
    tvs_updatelog = """create table tvs_updatelog ( interface_id int(30) not null auto_increment primary key,chid int(11) not null ,p_date varchar(20),md5 varchar(255),create_datetime varchar(20))"""
    
    #-------------------------#
    """create tables for live"""
    #-------------------------#
    #live_sort
    live_sort = """ create table live_sort (sort_id int(11) not null auto_increment primary key,sort_name varchar(20))"""
    
    #live_channel
    live_channel = ("""create table live_channel (id int(11) not null auto_increment,chid int(11) not null unique,statusid int(11),
    live_urlid int(11),createtime varchar(11),date varchar(11),logoid int(11),sort_id int(11), primary key(id,chid))""")
    livechannel_fk_channelid = """alter table live_channel add  constraint tvs_channel_chid_fk foreign key(chid) references tvs_channel(chid)"""
    livechannel_fk_statusid = """alter table live_channel add constraint live_status_statusid_fk foreign key(statusid) references live_status (statusid)"""   
    livechannel_fk_logoid = """alter table live_channel add constraint live_logo_logoid_fk foreign key (logoid) references live_logo(logoid)"""
    livechannel_fk_sortid = """alter table live_channel add constraint live_catalog_classid_fk foreign key(sort_id) references live_sort (sort_id)"""
    livechannel_fk_live_urlid = """alter table live_channel add constraint live_url_urlid_fk foreign key (live_urlid) references live_url (live_urlid)"""
              
    
    live_url = """ create table live_url(live_urlid int(11) not null auto_increment primary key,chid int(11),serverid int(10),port int(10),live_path varchar(150),status int(11))"""
    
    live_logo = """ create table live_logo(logoid int(11) not null auto_increment primary key,chid int(11),serverid int(10),port varchar(10),path varchar(30),status int(10))"""
                 
    
    url_fk_sid = """alter table live_url add constraint live_logo_server_id_fk foreign key(serverid) references live_server(serverid)"""
    logo_fk_sid = """alter table live_logo add constraint live_logo_path_id_fk foreign key(serverid) references live_server(serverid)"""
    
    
    live_server = """create table live_server (serverid int(11) not null auto_increment primary key,live_ip varchar(50))"""
    live_status = """create table live_status (statusid int(11) not null auto_increment primary key,sname varchar(255))"""
    #live_catalog
    live_catalog = """create table live_catalog (catalogid int(11) not null auto_increment primary key,chid int(11),date varchar(20),status int(11))"""
    live_catalog_fk = """alter table live_catalog add constraint live_channel_chid_fk foreign key(chid) references live_channel(chid)"""
    #live_movie
    live_movie = """create table live_movie (programid int(11) not null auto_increment primary key,catalogid int(11),chid int(11),program_name varchar(255),date varchar(20),gmtime varchar(25),gwtime varchar(30),start_time varchar(20),timelength varchar(20),uri varchar(255),publish_status int(2))"""
    live_movie_fk = """alter table live_movie add constraint live_catalog_catalogid_fk foreign key(catalogid) references live_catalog (catalogid)"""


    
    conn = mysql.connect()
    cursor = conn.cursor()
    try:
        cursor.execute('drop table live_movie')
        conn.commit()
    except MySQLdb.OperationalError:
        pass
    
    try:
        cursor.execute('drop table live_catalog')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table live_channel')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table live_sort')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table live_catalog')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table tvs_channel')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table live_m3u8')
        conn.commit()
    except MySQLdb.OperationalError:
        pass
    
   
    try:
        cursor.execute('drop table live_url')
        conn.commit()
    except MySQLdb.OperationalError:
        pass  
    
    try:
        cursor.execute('drop table live_logo')
        conn.commit()
    except MySQLdb.OperationalError:
        pass 
            
    try:
        cursor.execute('drop table live_server')
        conn.commit()
    except MySQLdb.OperationalError:
        pass 


    try:
        cursor.execute('drop table live_status')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    try:
        cursor.execute('drop table tvs_updatelog')
        conn.commit()
    except MySQLdb.OperationalError:
        pass

    

    
    
    
    cursor.execute(tvs_channel)
    conn.commit()

    cursor.execute(tvs_updatelog)
    conn.commit()
   
    
 

    cursor.execute(live_sort)
    conn.commit()
    
    #sqli = """insert into live_sort values(%s,'%s')"""
    #cursor.execute("SET NAMES utf8")
    #conn.commit()
    #cursor.executemany(sqli,[(1,"央视"),(2,"卫视"),(3,"地方"),])
    #cursor.commit()
    
    cursor.execute(live_channel)
    conn.commit()

    cursor.execute(live_catalog)
    conn.commit()

    cursor.execute(live_movie)
    conn.commit()

    cursor.execute(live_url)
    conn.commit()
       

    cursor.execute(live_logo)
    conn.commit()
    
    cursor.execute(live_server)
    conn.commit()    
    
    cursor.execute(url_fk_sid)
    conn.commit()
    
    cursor.execute(logo_fk_sid)
    conn.commit()
    

    
    cursor.execute(live_status)
    conn.commit()
   

    
    cursor.execute(live_catalog_fk)
    conn.commit()

    cursor.execute(live_movie_fk)
    conn.commit()
    
    cursor.execute(livechannel_fk_channelid)
    conn.commit()
    
    cursor.execute(livechannel_fk_statusid)
    conn.commit()
    

    
    cursor.execute(livechannel_fk_logoid)
    conn.commit()

    cursor.execute(livechannel_fk_live_urlid)
    conn.commit()

    conn.close()
Example #50
0
import _mysql
import mysql
import sys
import argparse

parser = argparse.ArgumentParser(description='')
mysql.add_args(parser)
args = parser.parse_args()
con = None

try:
	con = mysql.connect(args)

	print 'Summarising data for simulations in {}'.format(args.db)

	print "Creating views..."
	# Create mysql views
	con.query("""
		CREATE OR REPLACE VIEW `allocationRatios` AS 
		select `playerScore`.`simID` AS `ID`,
		`playerScore`.`player` AS `player`,
		`playerScore`.`round` AS `round`,
		`playerScore`.`cluster` AS `cluster`,
		least((`playerScore`.`r` / `playerScore`.`d`),1) AS `ratio` 
		from `playerScore` where (`playerScore`.`d` > 0);
	""")
	con.query("""
		CREATE OR REPLACE VIEW `aggregatedSimulations` AS 
		select `simulationSummary`.`Name` AS `strategy`,
		`simulationSummary`.`cluster` AS `cluster`,
		avg(`simulationSummary`.`ut. C`) AS `ut. C`,
Example #51
0
def _test():
    import nose.tools as nt
    import mysql, models, insert

    db = mysql.connect('wikicat', user='******', host='localhost', password='')

    nt.ok_(db)

    models.database_proxy.initialize(db)
    models.use_confirmations(False)
    models.create_tables(drop_if_exists=True)

    # some example data
    dataset = [
        {'broader': u'Animals', 'narrower': u'Mammals'},
        {'broader': u'Animals', 'narrower': u'Birds'},
        {'broader': u'Animals', 'narrower': u'Reptiles'},
        {'broader': u'Mammals', 'narrower': u'Dogs'},
        {'broader': u'Mammals', 'narrower': u'Cats'},
        {'broader': u'Reptiles', 'narrower': u'Lizards'},
        {'broader': u'Reptiles', 'narrower': u'Snakes'},
        {'broader': u'Birds', 'narrower': u'Ostriches'},
        {'broader': u'Birds', 'narrower': u'Penguins'},
        {'broader': u'Birds', 'narrower': u'Eagles'},
        {'broader': u'Cats', 'narrower': u'Lions'},
        {'broader': u'Cats', 'narrower': u'Tigers'}
    ]

    datasetVersion = models.dataset_version(version='3.9', language='en', date='2013-04-03')
    imported = insert.insert_dataset(data=dataset, dataset='category_categories', version_instance=datasetVersion)
    nt.eq_(imported, len(dataset))

    cats = Category.get(Category.name=='Cats')
    typesOfCat = [c.name for c in descendants(cats)]
    typesOfCat.sort()
    nt.eq_(['Cats', 'Lions', 'Tigers'], typesOfCat)

    eagle = Category.get(Category.name=='Eagles')
    eagleParents = [e.name for e in ancestors(eagle)]
    eagleParents.sort()
    nt.eq_(['Animals', 'Birds', 'Eagles'], eagleParents)

    mammals = Category.get(Category.name=='Mammals')
    mammalChildren = [m.name for m in descendants(mammals, max_levels=0)]
    mammalChildren.sort()
    nt.eq_(['Mammals'], mammalChildren)

    mammalChildren = [m.name for m in descendants(mammals, max_levels=1)]
    mammalChildren.sort()
    nt.eq_(['Cats', 'Dogs', 'Mammals'], mammalChildren)

    mammalChildren = [m.name for m in descendants(mammals, max_levels=2)]
    mammalChildren.sort()
    nt.eq_(['Cats', 'Dogs', 'Lions', 'Mammals', 'Tigers'], mammalChildren)

    links = [(f.broader.name, f.narrower.name) for f in descendant_links(mammals)]
    links.sort()
    nt.eq_([(u'Cats', u'Lions'), (u'Cats', u'Tigers'), (u'Mammals', u'Cats'), (u'Mammals', u'Dogs')], links)

    links = [(f.broader.name, f.narrower.name) for f in descendant_links(mammals, max_levels=0)]
    links.sort()
    nt.eq_([(u'Mammals', u'Cats'), (u'Mammals', u'Dogs')], links)

    # now add a node with two parents
    dataset = [
        {'broader': u'Animals', 'narrower': u'Mammals'},
        {'broader': u'Animals', 'narrower': u'Birds'},
        {'broader': u'Animals', 'narrower': u'Reptiles'},
        {'broader': u'Mammals', 'narrower': u'Monotremes'},
        {'broader': u'Reptiles', 'narrower': u'Monotremes'}
    ]

    imported = insert.insert_dataset(data=dataset, dataset='category_categories', version_instance=datasetVersion)
    nt.eq_(imported, len(dataset))

    monotremes = Category.get(Category.name=='Monotremes')
    monoParents = [e.name for e in ancestors(monotremes, norepeats=False)]
    monoParents.sort()
    nt.eq_(['Animals', 'Animals', 'Mammals', 'Monotremes', 'Reptiles'], monoParents)

    monoParents = [e.name for e in ancestors(monotremes, norepeats=True)]
    monoParents.sort()
    nt.eq_(['Animals', 'Mammals', 'Monotremes', 'Reptiles'], monoParents)

    # now add a "second root"
    dataset = [
        {'broader': u'Animals', 'narrower': u'Mammals'},
        {'broader': u'Animals', 'narrower': u'Birds'},
        {'broader': u'Animals', 'narrower': u'Reptiles'},
        {'broader': u'Mammals', 'narrower': u'Dogs'},
        {'broader': u'Mammals', 'narrower': u'Cats'},
        {'broader': u'Reptiles', 'narrower': u'Lizards'},
        {'broader': u'Reptiles', 'narrower': u'Snakes'},
        {'broader': u'Birds', 'narrower': u'Ostriches'},
        {'broader': u'Birds', 'narrower': u'Penguins'},
        {'broader': u'Birds', 'narrower': u'Eagles'},
        {'broader': u'Cats', 'narrower': u'Lions'},
        {'broader': u'Cats', 'narrower': u'Tigers'},
        {'broader': u'Pets', 'narrower': u'Lizards'},
        {'broader': u'Pets', 'narrower': u'Dogs'},
        {'broader': u'Pets', 'narrower': u'Cats'},
    ]
    imported = insert.insert_dataset(data=dataset, dataset='category_categories', version_instance=datasetVersion)
    nt.eq_(imported, len(dataset))

    lizards = Category.get(Category.name=='Lizards')
    lizardParents = [e.name for e in ancestors(lizards)]
    lizardParents.sort()
    nt.eq_(['Animals', 'Lizards', 'Pets', 'Reptiles'], lizardParents)

    reptiles = Category.get(Category.name=='Reptiles')
    reptileTypes = [e.name for e in descendants(reptiles)]
    reptileTypes.sort()
    nt.eq_(['Lizards', 'Reptiles', 'Snakes'], reptileTypes)
Example #52
0
def create_post(name, cantidad, price):
    con = sql.connect(path.join( ROOT, 'store.db')) 
    cur = con.cursor()
    cur.execute('insert into products ( name, quantity_in_stock, unit_price)  values ( ? , ? ,?)', (name, cantidad, price))
    con.commit()
    con.close()
Example #53
0
 def __init__(self, username, password):
     try:
         NoteMakingDB.db = mysql.connect(host="localhost", user=username, password=password)
         NoteMakingDB.cursor = NoteMakingDB.db.cursor()
     except Exception as e:
         raise