Ejemplo n.º 1
0
def get_show_data(series_title, mode = '', sitemode = '', url = '', sitedata = None):
	series_title = replace_signs(smart_unicode(series_title))
	if not os.path.exists(ustvpaths.DBFILE):
		database.create_db()
	database.check_db_version()
	showdata = get_serie(series_title, mode, sitemode, url, forceRefresh = False, sitedata = sitedata)
	return showdata
Ejemplo n.º 2
0
def main():
    create_db()
    print("App started. Listenning on %d" % int(os.environ.get('PORT', 8888)))
    tornado.options.parse_command_line()
    tornado.httpserver.HTTPServer(Application(),
            xheaders=True).listen(options.port)
    tornado.ioloop.IOLoop.instance().start()
Ejemplo n.º 3
0
def get_show_data(series_title, mode = '', sitemode = '', url = '', sitedata = None):
	series_title = replace_signs(smart_unicode(series_title))
	if not os.path.exists(ustvpaths.DBFILE):
		database.create_db()
	database.check_db_version()
	showdata = get_serie(series_title, mode, sitemode, url, forceRefresh = False, sitedata = sitedata)
	return showdata
Ejemplo n.º 4
0
    def GoToInputRequest(_data):
        requestedDT = _data["dt"]
        if not noDB:

            # Check database if not exists then this method creates it.
            database.create_db(c, "pysoc_server");
            # Check table if not exists then this method creates it.
            database.create_table(c, "client_name",
                "pysoc_server");

            clientDictList     = []                                    # All client information that will be sent to client.
            clientNameList     = DatabaseGetAllClientNameMod1(c, db)            # All client name from `client_name` table in database.
            tableNameList      = DatabaseGetAllTableName(c, db)                 # All tables in database.

            for i in clientNameList:

                clientName = i.get("client_name")
                face_temp = (database.get_first_doc_value(c, str(requestedDT), "dt", "face", "{}_face".format(clientName), dbN));
                pitch_temp = (database.get_first_doc_value(c, str(requestedDT), "dt", "pitch", "{}_pitch".format(clientName), dbN));
                presence_temp = (database.get_first_doc_value(c, str(requestedDT), "dt", "presence", "{}_presence".format(clientName), dbN));
                volume_temp = (database.get_first_doc_value(c, str(requestedDT), "dt", "volume", "{}_volume".format(clientName), dbN));

                if type(face_temp) is not list: i["face"] = face_temp
                if type(pitch_temp) is not list: i["pitch"] = pitch_temp
                if type(presence_temp) is not list: i["presence"] = presence_temp
                if type(volume_temp) is not list: i["volume"] = volume_temp

                clientDictList.append(i)

            emit("inputSend", clientDictList)
Ejemplo n.º 5
0
def main():
    create_db()
    print("App started. Listenning on %d" % int(os.environ.get('PORT', 8888)))
    tornado.options.parse_command_line()
    tornado.httpserver.HTTPServer(Application(),
                                  xheaders=True).listen(options.port)
    tornado.ioloop.IOLoop.instance().start()
Ejemplo n.º 6
0
def refresh_db():
    if not os.path.isfile(ustvpaths.DBFILE):
        database.create_db()
    networks = get_networks()
    dialog = xbmcgui.DialogProgress()
    dialog.create(smart_utf8(addon.getLocalizedString(39016)))
    total_stations = len(networks)
    current = 0
    increment = 100.0 / total_stations
    all_shows = []
    for network in networks:
        network_name = network.NAME
        if addon.getSetting(network.SITE) == 'true':
            percent = int(increment * current)
            dialog.update(
                percent,
                smart_utf8(addon.getLocalizedString(39017)) + network.NAME,
                smart_utf8(addon.getLocalizedString(39018)))
            showdata = network.masterlist()
            for show in showdata:
                try:
                    series_title, mode, submode, url = show
                except:
                    series_title, mode, submode, url, siteplot = show
                all_shows.append((smart_unicode(series_title.lower().strip()),
                                  smart_unicode(mode), smart_unicode(submode)))
            total_shows = len(showdata)
            current_show = 0
            for show in showdata:
                percent = int((increment * current) +
                              (float(current_show) / total_shows) * increment)
                dialog.update(
                    percent,
                    smart_utf8(addon.getLocalizedString(39017)) + network.NAME,
                    smart_utf8(addon.getLocalizedString(39005)) + show[0])
                get_serie(show[0],
                          show[1],
                          show[2],
                          show[3],
                          forceRefresh=False)
                current_show += 1
                if (dialog.iscanceled()):
                    return False
        current += 1
    command = 'select tvdb_series_title , series_title, mode, submode, url from shows order by series_title'
    shows = database.execute_command(command, fetchall=True)
    for show in shows:
        tvdb_series_title, series_title, mode, submode, url = show
        if ((smart_unicode(series_title.lower().strip()), smart_unicode(mode),
             smart_unicode(submode)) not in all_shows and
            (smart_unicode(tvdb_series_title.lower().strip()),
             smart_unicode(mode), smart_unicode(submode)) not in all_shows):
            command = 'delete from shows where series_title = ? and mode = ? and submode = ? and url = ?;'
            values = (series_title, mode, submode, url)
            print "Deleting - " + series_title + " " + mode + " " + submode + " " + url
            database.execute_command(command,
                                     values,
                                     fetchone=True,
                                     commit=True)
Ejemplo n.º 7
0
def runServer():
   print('start')
   database.create_db()
   try:
      server = ChatingServer((HOST, PORT), MyTcpHandler)
      server.serve_forever()
      server.server_bind()
   except KeyboardInterrupt:
      server.shutdown()
      server.server_close()
Ejemplo n.º 8
0
def create_app():

    app = Flask(__name__)
    app.config.from_object(os.environ['APP_SETTINGS'])

    app.secret_key = app.config['SECRET_KEY']

    app.add_url_rule('/graphql',
                     view_func=GraphQLView.as_view('graphql',
                                                   schema=schema,
                                                   graphiql=True))

    create_db()

    return app
Ejemplo n.º 9
0
    def setupUi(self, data_Window):
        data_Window.setObjectName("data_Window")
        data_Window.resize(800, 600)
        self.centralwidget = QtWidgets.QWidget(data_Window)
        self.centralwidget.setObjectName("centralwidget")
        self.id_db_create = QtWidgets.QLineEdit(self.centralwidget)
        self.id_db_create.setGeometry(QtCore.QRect(270, 110, 111, 25))
        self.id_db_create.setMinimumSize(QtCore.QSize(12, 12))
        self.id_db_create.setObjectName("id_db_create")
        self.label = QtWidgets.QLabel(self.centralwidget)
        self.label.setGeometry(QtCore.QRect(260, 20, 111, 20))
        self.label.setObjectName("label")
        self.btn_db_create = QtWidgets.QPushButton(self.centralwidget)
        self.btn_db_create.setGeometry(QtCore.QRect(260, 170, 121, 25))
        self.btn_db_create.setObjectName("btn_db_create")

        ####################################################

        #########################################""
        self.btn_db_create.clicked.connect(
            lambda: create_db(self.id_db_create.text()))
        ####################################################

        ###########################################################""
        data_Window.setCentralWidget(self.centralwidget)
        self.statusbar = QtWidgets.QStatusBar(data_Window)
        self.statusbar.setObjectName("statusbar")
        data_Window.setStatusBar(self.statusbar)

        self.retranslateUi(data_Window)
        QtCore.QMetaObject.connectSlotsByName(data_Window)
Ejemplo n.º 10
0
def fetch_showlist(favored = 0):
	if not os.path.exists(ustvpaths.DBFILE):
		database.create_db()
		refresh_db()
	elif not favored:
		refresh = False
		command = 'select distinct mode from shows order by mode'
		modes = database.execute_command(command, fetchall = True)
		mode_list = [element[0] for element in modes]
		for network in get_networks():
			if addon.getSetting(network.SITE) == 'true' and network.SITE not in mode_list:
				refresh = True
		if refresh:
			refresh_db()
	database.check_db_version()
	command = "select * from shows  where url <> '' and hide <> 1 and favor = ? order by series_title"
	return database.execute_command(command, fetchall = True, values = [favored]) 
Ejemplo n.º 11
0
def fetch_showlist(favored = 0):
	if not os.path.exists(ustvpaths.DBFILE):
		database.create_db()
		refresh_db()
	elif not favored:
		refresh = False
		command = 'select distinct mode from shows order by mode'
		modes = database.execute_command(command, fetchall = True)
		mode_list = [element[0] for element in modes]
		for network in get_networks():
			if addon.getSetting(network.SITE) == 'true' and network.SITE not in mode_list:
				refresh = True
		if refresh:
			refresh_db()
	database.check_db_version()
	command = "select * from shows  where url <> '' and hide <> 1 and favor = ? order by series_title"
	return database.execute_command(command, fetchall = True, values = [favored]) 
Ejemplo n.º 12
0
def main():
    options = int(input("1: Add User"
                        "\n2: Delete User"
                        "\n3: Check User"
                        "\n4: Display Db"
                        "\n5: Create Db\n"))

    if options == 1:
        try:
            database.new_user()
        except Exception as error:
            print("ERROR:", error)
        finally:
            print("\n")
            main()
    if options == 2:
        try:
            database.delete_user()
        except Exception as error:
            print("ERROR:", error)
        finally:
            print("\n")
            main()
    if options == 3:
        try:
            database.check_user()
        except Exception as error:
            print("ERROR:", error)
        finally:
            print("\n")
            main()
    if options == 4:
        database.display_db()
        print("\n")
        main()
    if options == 5:
        try:
            database.create_db()
            print("Database created\n")
        except Exception as error:
            print("Error", error)
        finally:
            main()
Ejemplo n.º 13
0
def play_game():
    cursor = database.create_db()
    name = ''
    words = []
    n = 1
    score = 0

    with open(".\\resource\\word.txt", 'r') as f:
        for c in f:
            words.append(c.strip())

    start = time.time()

    while n <= 5:
        os.system("cls")

        random.shuffle(words)
        q = random.choice(words)

        print("[ Q {} ]".format(n))
        print(q)

        answer = input()
        print("result : ", end='')

        if str(q).strip() == str(answer).strip():
            print("Correct!!!")
            winsound.PlaySound(".\\resource\\sound\\correct.wav",
                               winsound.SND_FILENAME)
            score = score + 1
        else:
            print("Wrong...")
            winsound.PlaySound(".\\resource\\sound\\wrong.wav",
                               winsound.SND_FILENAME)

        n = n + 1

    end = time.time()
    play_time = end - start
    play_time = format(play_time, ".3f")

    os.system("cls")

    if score >= 3:
        print("Pass!!!")
    else:
        print("Fail...")

    print("Play time : ", play_time, "s, ", "Score : {}".format(score))
    print()

    while name == '':
        name = input(">>> Enter your name (No blank) : ")

    database.insert_record(cursor, name, score, play_time)
Ejemplo n.º 14
0
def refresh_db():
	if not os.path.isfile(ustvpaths.DBFILE):
		database.create_db()
	networks = get_networks()
	dialog = xbmcgui.DialogProgress()
	dialog.create(smart_utf8(addon.getLocalizedString(39016)))
	total_stations = len(networks)
	current = 0
	increment = 100.0 / total_stations
	all_shows = []
	for network in networks:
		network_name = network.NAME
		if addon.getSetting(network.SITE) == 'true':
			percent = int(increment * current)
			dialog.update(percent, smart_utf8(addon.getLocalizedString(39017)) + network.NAME, smart_utf8(addon.getLocalizedString(39018)))
			showdata = network.masterlist()
			for show in showdata:
				try:
					series_title, mode, submode, url = show
				except:
					series_title, mode, submode, url, siteplot = show
				all_shows.append((smart_unicode(series_title.lower().strip()), smart_unicode(mode), smart_unicode(submode)))
			total_shows = len(showdata)
			current_show = 0
			for show in showdata:
				percent = int((increment * current) + (float(current_show) / total_shows) * increment)
				dialog.update(percent, smart_utf8(addon.getLocalizedString(39017)) + network.NAME, smart_utf8(addon.getLocalizedString(39005)) + show[0])
				get_serie(show[0], show[1], show[2], show[3], forceRefresh = False)
				current_show += 1
				if (dialog.iscanceled()):
					return False
		current += 1
	command = 'select tvdb_series_title , series_title, mode, submode, url from shows order by series_title'
	shows = database.execute_command(command, fetchall = True) 
	for show in shows:
		tvdb_series_title, series_title, mode, submode, url = show
		if ((smart_unicode(series_title.lower().strip()),smart_unicode(mode), smart_unicode(submode)) not in all_shows and (smart_unicode(tvdb_series_title.lower().strip()),smart_unicode(mode), smart_unicode(submode)) not in all_shows):
			command = 'delete from shows where series_title = ? and mode = ? and submode = ? and url = ?;'
			values = (series_title, mode, submode, url)
			print "Deleting - " + series_title + " " + mode + " " + submode + " " + url
			database.execute_command(command, values, fetchone = True, commit = True)
Ejemplo n.º 15
0
def main():
    conn = db.create_connection(db.DB_FILE)
    with conn:
        db.create_db(conn)

    while True:
        temperature = read_temperature()
        fever_event = get_fever_event(temperature)
        current_time = get_current_time()

        send_to_firebase(fever_event,current_time)
        write_in_plotly(TEMP_UNDER_THRESHOLD,temperature,current_time)
        with conn:
            db.insert_row(conn, current_time, temperature, fever_event, db.TABLE_NAME)

        log.debug("temperature:{}".format(temperature))
        log.debug("TEMP_UNDER_THRESHOLD:{}".format(TEMP_UNDER_THRESHOLD))
        log.debug("TEMP_OVER_THRESHOLD:{}".format(TEMP_OVER_THRESHOLD))
        log.debug("fever_event:{}".format(fever_event))

        time.sleep(SLEEP_DURATION)
Ejemplo n.º 16
0
    def LatestInputRequest():

        if not noDB:

            # Check database if not exists then this method creates it.
            database.create_db(c, "pysoc_server");
            # Check table if not exists then this method creates it.
            database.create_table(c, "client_name",
                "pysoc_server");

            clientDictList     = []
            clientNameList     = DatabaseGetAllClientNameMod1(c, db)            # All client name from `client_name` table in database.
            latestInput        = GetLatestInputMod1(clientNameList)
            latestInputStr     = latestInput[1]                                 # Latest input time in string as we received from database.
            tableNameList      = DatabaseGetAllTableName(c, db)                 # All tables in database.

            for i in clientNameList:

                if i.get("latest_input") == latestInputStr:
                    clientName = i.get("client_name")
                    #clientNameDict = {}
                    #clientNameDict["client_name"] = clientName

                    def DatabaseGetLatestInputTableValueToDictMod1(_tableName, *_colName):
                        DatabaseGetLatestInputTableValueToDict(c, i,
                            i.get("latest_input"), _tableName, tableNameList, *_colName)
                    #DatabaseGetLatestInputTableValueToDictMod1(clientName + "_cam", "faces")
                    #DatabaseGetLatestInputTableValueToDictMod1(clientName + "_ir" , "ir_code")
                    #DatabaseGetLatestInputTableValueToDictMod1(clientName + "_mic", "pitch", "volume")
                    DatabaseGetLatestInputTableValueToDictMod1(clientName + "_face", "face")
                    DatabaseGetLatestInputTableValueToDictMod1(clientName + "_pitch", "pitch")
                    DatabaseGetLatestInputTableValueToDictMod1(clientName + "_presence" , "presence")
                    DatabaseGetLatestInputTableValueToDictMod1(clientName + "_volume", "volume")

                    clientDictList.append(i)

            emit("inputSend", clientDictList)
Ejemplo n.º 17
0
    def save_price(self):
        create_db()
        create_table()

        while True:
            start = time.time()

            orderbook = self._get_orderbook(self.market_instance)
            end_tick = time.time()

            if orderbook != False:
                is_insert = insert_data(orderbook, self.market, self.symbol,
                                        end_tick - start)
                if is_insert:
                    end = time.time()
                    print('Get tick at',
                          datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                          'CPU Time:', end - start)

            else:
                end = time.time()
                print('Fail to get tick at',
                      datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                      'CPU Time:', end - start)
Ejemplo n.º 18
0
def download_sp500(startdate, enddate, dbfilename):
    
    # Download list of tickers, company name and GICS Sectors from Wikipedia
    url = 'http://en.wikipedia.org/wiki/List_of_S%26P_500_companies'
    page = html.parse(url)
    symbol = page.xpath('//table[@class="wikitable sortable"]/tr/td[1]/a/text()')
    company = page.xpath('//table[@class="wikitable sortable"]/tr/td[2]/a/text()')
    sector = page.xpath('//table[@class="wikitable sortable"]/tr/td[4]/text()')
    
    # Add the index itself
    symbol.append('^GSPC')    
    company.append('S&P 500')    
    sector.append(None)    
    
    # Since Dec-12, BRK.B Yahoo! Finance lists BRK.B as BRK-B
    if 'BRK.B' in symbol: symbol[symbol.index('BRK.B')]='BRK-B'
    
    # If database doesn't exist, create it
    if not os.path.exists(dbfilename):
        database.create_db(dbfilename)    
    
    conn = sqlite3.connect(dbfilename, 
           detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
    c = conn.cursor()
    
    # Load data from Wikipedia into Load table
    conn.execute("CREATE TABLE LoadAssets (Cd text, Name text, SectorName text)")
    sql = "INSERT INTO LoadAssets (Cd, Name, SectorName) VALUES (?, ?, ?)"    
    c.executemany(sql, zip(symbol, company, sector))
    conn.commit()
    
    database.populate_db(symbol, startdate, enddate, dbfilename)
    
    # TODO: download with lxml (?)
    # http://en.wikipedia.org/wiki/Global_Industry_Classification_Standard
    # Note: Telecommunication Services is listed as Telecommunications Services
    conn.execute("DELETE FROM GicsSectors")
    conn.execute("INSERT INTO GicsSectors VALUES(10, 'Energy')")
    conn.execute("INSERT INTO GicsSectors VALUES(15, 'Materials')")
    conn.execute("INSERT INTO GicsSectors VALUES(20, 'Industrials')")
    conn.execute("INSERT INTO GicsSectors VALUES(25, 'Consumer Discretionary')")
    conn.execute("INSERT INTO GicsSectors VALUES(30, 'Consumer Staples')")
    conn.execute("INSERT INTO GicsSectors VALUES(35, 'Health Care')")
    conn.execute("INSERT INTO GicsSectors VALUES(40, 'Financials')")
    conn.execute("INSERT INTO GicsSectors VALUES(45, 'Information Technology')")
    conn.execute("INSERT INTO GicsSectors VALUES(50, 'Telecommunications Services')")
    conn.execute("INSERT INTO GicsSectors VALUES(55, 'Utilities')")
    
    conn.execute("DELETE FROM Assets WHERE Cd IN (SELECT Cd FROM LoadAssets)")
    
    conn.execute("""
    INSERT INTO Assets
    SELECT l.Cd, l.Name, g.Id
    FROM LoadAssets l
    LEFT JOIN GicsSectors g ON l.SectorName = g.Name""")
    
    conn.execute("DROP TABLE LoadAssets")
    conn.commit()
   
    c.close()
    conn.close()
Ejemplo n.º 19
0
    m, _, r = r.partition(d2)
    return m


def getbw(s):
    if "GB/s" in s:
        return float(s[:-4]) * 1024
    if "MB/s" in s:
        return float(s[:-4])
    if "KB/s" in s:
        return float(s[:-4]) / 1024


if __name__ == '__main__':
    ctx = parse_args()
    database.create_db()

    files = find('output.*', ctx.input_directory)
    totals = {}
    for inputname in files:
        # strip off the input directory
        params = inputname[len(ctx.input_directory):].split("/")[3:-1]
        # make readahead into an int
        params[3] = int(params[3][7:])

        # Make op_size into an int
        params[4] = int(params[4][8:])

        # Make cprocs into an int
        params[5] = int(params[5][17:])
Ejemplo n.º 20
0
#!/usr/bin/env python

import os
import uuid
import json
import tornado.ioloop
import tornado.web
import database
from tornado import websocket
from tornado.web import url

db_filename = 'clamchat.db'

# Line creates a db to store all sent messages, unless one lready exists.
database.create_db(db_filename)

class RoomHandler(object):
    """Store data about connections, rooms, which users are in which rooms, etc."""
    def __init__(self):
        self.client_info = {}  # for each client id we'll store  {'wsconn': wsconn, 'room':room, 'nick':nick}
        self.room_info = {}  # dict  to store a list of  {'cid':cid, 'nick':nick , 'wsconn': wsconn} for each room
        self.roomates = {}  # store a set for each room, each contains the connections of the clients in the room.

    def add_roomnick(self, room, nick):
        """Add nick to room. Return generated clientID"""
        # meant to be called from the main handler (page where somebody indicates a nickname and a room to join)
        cid = uuid.uuid4().hex  # generate a client id.
       
        # it's a new room if room not in self.room_info
        if room not in self.room_info:
           self.room_info[room] = []
Ejemplo n.º 21
0
    def download_sp500(self, startdate, enddate, dbfilename):
        """
        Downloads S&P500 tickers from Wikipedia and daily time series from Yahoo! Finance
        """    
        
        # Download list of tickers, company name and GICS Sectors from Wikipedia
        url = 'http://en.wikipedia.org/wiki/List_of_S%26P_500_companies'
        page = html.parse(url)
        symbol = page.xpath('//table[@class="wikitable sortable"]/tr/td[1]/a/text()')
        company = page.xpath('//table[@class="wikitable sortable"]/tr/td[2]/a/text()')
        sector = page.xpath('//table[@class="wikitable sortable"]/tr/td[4]/text()')
        
        # Add the index itself
        symbol.append('^GSPC')
        company.append('S&P 500')
        sector.append(None)
        
        # Since Dec-12, BRK.B Yahoo! Finance lists BRK.B as BRK-B
        if 'BRK.B' in symbol: symbol[symbol.index('BRK.B')]='BRK-B'
        
        # Debugging: restrict to the first 10 stocks of the index
#        symbol = symbol[:10]
#        company = company[:10]
#        sector = sector[:10] 
        
        # If database doesn't exist, create it
        if not os.path.exists(dbfilename):
            database.create_db(dbfilename)    
        
        conn = sqlite3.connect(dbfilename, 
               detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
        c = conn.cursor()
        
        # Load data from Wikipedia into Load table
        conn.execute("CREATE TABLE LoadAssets (Cd text, Name text, SectorName text)")
        sql = "INSERT INTO LoadAssets (Cd, Name, SectorName) VALUES (?, ?, ?)"    
        c.executemany(sql, zip(symbol, company, sector))
        conn.commit()
        
        # Download time series from Yahoo! finance and store into db
        i = 0
        for s in list(symbol):
            data = database.get_yahoo_prices(s, startdate, enddate)
            database.save_to_db(data, dbfilename)   
            self.downloaded.emit(i) # emit signal to update progressbar
            i += 1
        
        # From: http://en.wikipedia.org/wiki/Global_Industry_Classification_Standard
        # Note: Telecommunication Services is listed as Telecommunications Services
        conn.execute("DELETE FROM GicsSectors")
        conn.execute("INSERT INTO GicsSectors VALUES(10, 'Energy')")
        conn.execute("INSERT INTO GicsSectors VALUES(15, 'Materials')")
        conn.execute("INSERT INTO GicsSectors VALUES(20, 'Industrials')")
        conn.execute("INSERT INTO GicsSectors VALUES(25, 'Consumer Discretionary')")
        conn.execute("INSERT INTO GicsSectors VALUES(30, 'Consumer Staples')")
        conn.execute("INSERT INTO GicsSectors VALUES(35, 'Health Care')")
        conn.execute("INSERT INTO GicsSectors VALUES(40, 'Financials')")
        conn.execute("INSERT INTO GicsSectors VALUES(45, 'Information Technology')")
        conn.execute("INSERT INTO GicsSectors VALUES(50, 'Telecommunications Services')")
        conn.execute("INSERT INTO GicsSectors VALUES(55, 'Utilities')")
        
        conn.execute("DELETE FROM Assets WHERE Cd IN (SELECT Cd FROM LoadAssets)")
        
        conn.execute("""
        INSERT INTO Assets
        SELECT l.Cd, l.Name, g.Id
        FROM LoadAssets l
        LEFT JOIN GicsSectors g ON l.SectorName = g.Name""")
        
        conn.execute("DROP TABLE LoadAssets")
        conn.commit()
       
        c.close()
        conn.close()
Ejemplo n.º 22
0
    id, car_leaving = is_car_leaving(car_data.number)

    if car_leaving:
        # car is leaving so we update the row and set TIME_LEFT of the row where id=id
        print('{} leaving at {}'.format(car_data.number, car_data.time))
        exit_car(id, car_data.time)
    else:
        # car is entering so we insert a new row
        print('{} entering at {}'.format(car_data.number, car_data.time))
        insert_car(car_data)

# The part under this until the function __name__ == '__main__' is called whenever we import from this file or execute the file with python <file>

# if database doesn't exist create it
if not os.path.isfile('cars.db'):
    create_db()

# setup OpenALPR
try:
    alpr = Alpr("eu", '/etc/openalpr/openalpr.conf', '/usr/share/openalpr/runtime_data')
except:
    print('OpenALPR isn\'t setup correctly, try following the instructions in README.md')
    sys.exit()

alpr.set_top_n(20)
alpr.set_default_region("nl")


# executed if script is called on it's own
if __name__ == '__main__':
    if len(sys.argv) < 2:
Ejemplo n.º 23
0
def adminDB():
    adb.create_db()
    return 'done'
Ejemplo n.º 24
0
import binascii
import struct
import base64
import configparser
import logging
import calendar
import database as db

from flask import Flask, Response, request, json, redirect
from flask_socketio import SocketIO, emit, join_room, leave_room, send
from flask_cors import CORS
from datetime import datetime, timedelta
from pyfcm import FCMNotification

conn = db.create_connection("pythonsqlite.db")
db.create_db(conn)

config = configparser.ConfigParser()

app = Flask(__name__)
sio = SocketIO(app, transports=["websocket"])

CORS(app, support_credentials=True, resources={r"*": {"origins": ["*"]}})

usersInRoom = {}
messageQueue = {}
socketRoom = {}

epoch = datetime.utcfromtimestamp(0)

logging.getLogger("werkzeug").setLevel(level=logging.ERROR)
Ejemplo n.º 25
0
docs = Swagger(app)

v1 = "/api/v1"

app.config["SECRET_KEY"] = SECRET
app.config["JWT_SECRET_KEY"] = JWT_SECRET
app.config["JWT_TOKEN_LOCATION"] = ["cookies"]
app.config["JWT_COOKIE_CSRF_PROTECT"] = True
app.config["JWT_REFRESH_COOKIE_PATH"] = f"{v1}/token/refresh"
app.config["SQLALCHEMY_DATABASE_URI"] = DB_URI
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config['SECURITY_PASSWORD_SALT'] = SALT

with app.app_context():
    db.init_app(app)
    create_db(db.engine)
    ma.init_app(app)
    cache.init_app(app)
    resource_cache.init_app(app)
    db.create_all()

api.add_resource(users.UsersResource, f"{v1}/users")
api.add_resource(users.UserResource, f"{v1}/users/<int:user_id>")
api.add_resource(users.CurrentUserResource, f"{v1}/users/current")
api.add_resource(login.Login, f"{v1}/login")
api.add_resource(token.Auth, f"{v1}/token/auth")
api.add_resource(token.Refresh, f"{v1}/token/refresh")
api.add_resource(token.Remove, f"{v1}/token/remove")
api.add_resource(login.ConfirmEmail, f"{v1}/confirm/<conf_token>")

Ejemplo n.º 26
0
Archivo: test.py Proyecto: ASBishop/cbt
def splits(s,d1,d2):
    l,_,r = s.partition(d1)
    m,_,r = r.partition(d2)
    return m

def getbw(s):
    if "GB/s" in s:
        return float(s[:-4])*1024
    if "MB/s" in s:
        return float(s[:-4])
    if "KB/s" in s:
        return float(s[:-4])/1024

if __name__ == '__main__':
    ctx = parse_args()
    database.create_db()

    files = find('output.*', ctx.input_directory)
    totals = {}
    for inputname in files:
        # strip off the input directory
        params = inputname[len(ctx.input_directory):].split("/")[3:-1]
        # make readahead into an int
        params[3] = int(params[3][7:])

        # Make op_size into an int
        params[4] = int(params[4][8:])

        # Make cprocs into an int
        params[5] = int(params[5][17:])
Ejemplo n.º 27
0
def update_database(mycursor, mydb):
    '''We update the database'''

    create_db(mycursor)
    create_tables(mycursor)
    fill_products(mycursor, mydb)
Ejemplo n.º 28
0
def main():

    #get new authorization token
    token = get_token()

    #get current time
    p_time = int(datetime.datetime.now().strftime("%s")) * 1000

    #retrieve data
    dat, dat2, loc_dict, loc_dict2 = final_output(token,
                                                  start=p_time,
                                                  end=p_time)

    dat2 = dat2[dat2.locationUid.isin(loc_dict2.keys())]

    loc_direction = pd.DataFrame(
        dat.groupby(["locationUid", "direction"]).pedestrianCount.sum())
    loc_speed = pd.DataFrame(
        dat.groupby(["locationUid", "direction"]).speed.mean())

    loc_df = loc_direction.merge(loc_speed, left_index=True, right_index=True)

    #add/re-adjust columns
    loc_df["direction"] = list(
        pd.DataFrame(loc_df.index).apply(lambda x: x[0][1], axis=1))
    loc_df.index = pd.DataFrame(loc_df.index).apply(lambda x: x[0][0], axis=1)
    loc_df["lat"] = list(
        pd.Series(loc_df.index).apply(lambda x: loc_dict[x][0]))
    loc_df["long"] = list(
        pd.Series(loc_df.index).apply(lambda x: loc_dict[x][1]))
    loc_df["timestamp"] = p_time

    kmeans_results = num_clustering(loc_df)
    kmeans_results.index.name = "locationUid"
    kmeans_results['date'] = pd.to_datetime(kmeans_results['timestamp'],
                                            unit='ms')

    loc_direction = pd.DataFrame(
        dat2.groupby(["locationUid", "direction"]).vehicleCount.sum())
    loc_speed = pd.DataFrame(
        dat2.groupby(["locationUid", "direction"]).speed.mean())

    loc_df = loc_direction.merge(loc_speed, left_index=True, right_index=True)

    #add/re-adjust columns
    loc_df["direction"] = list(
        pd.DataFrame(loc_df.index).apply(lambda x: x[0][1], axis=1))
    loc_df.index = pd.DataFrame(loc_df.index).apply(lambda x: x[0][0], axis=1)
    loc_df["lat"] = list(
        pd.Series(loc_df.index).apply(lambda x: loc_dict2[x][0]))
    loc_df["long"] = list(
        pd.Series(loc_df.index).apply(lambda x: loc_dict2[x][1]))
    loc_df["timestamp"] = p_time

    kmeans_results2 = num_clustering(loc_df)
    kmeans_results2.index.name = "locationUid"
    kmeans_results2['date'] = pd.to_datetime(kmeans_results2['timestamp'],
                                             unit='ms')

    #insert values into database
    database.create_db(loc_dict, loc_dict2, kmeans_results, kmeans_results2)

    return kmeans_results, kmeans_results2
Ejemplo n.º 29
0
        id_o = int(params['id_o'][0])

        # check whether the person is already in db
        person = scraperwiki.sqlite.select('id,name FROM people WHERE id=?', data=[id_o])
        if person:
            return id_o

        person = {}
        person['id'] = id_o
        person['name'] = a.text

        content = self.get_content(self.PEOPLE_TPL.format(id_o))
        if content:
            person.update(self.parse_person_contact_details(content))
        else:
            person['email'] = None
            person['other'] = None

        scraperwiki.sqlite.save(['id'], person, table_name='people')

        return id_o


if __name__ == '__main__':
    # create tables explicitly
    create_db()

    logging.basicConfig(format='%(levelname)s: %(message)s')
    scraper = BratislavaScraper(sleep=1/10)
    scraper.scrape()
Ejemplo n.º 30
0
def main():

    #------------------------------------------------------------------------------------------------------------------------
    #---------------------------------------------|      Step1 : Download the Files     |------------------------------------
    #------------------------------------------------------------------------------------------------------------------------

    hello.download_files()

    #-------------------------------------------------------------------------------------------------------------------------
    #------------------------------------|      Step2 :  Create and Initialise my Database      |-----------------------------
    #-------------------------------------------------------------------------------------------------------------------------

    conn = db.create_connection()
    c = conn.cursor()
    db.create_db(c, conn)
    db.table_key(c, conn)  #Search in database =>  foreignKeys

    #----------------------------------------------------------------------------------------------------------------------------
    #------------------------------------|  Step3 : Initialise my tracking lists ==> --------------------------------------------
    #------------------------------------   these lists will keep the necessary records  => -------------------------------------
    #------------------------------------   which plots will be drawn by using these lists  |------------------------------------
    #----------------------------------------------------------------------------------------------------------------------------

    year = []
    tt = []
    transport = []
    country = []

    #-----------------------------------------------------------------------------------------------------------------------------
    #-----------------------------------------|  Step4 : Proccess excel files one by one |----------------------------------------
    #-----------------------------------------------------------------------------------------------------------------------------

    book = ['2011', '2012', '2013', '2014',
            '2015']  #let's create a list with excells' name

    for b in range(5):
        month3 = []
        temp = 0
        for s in range(2, 12, 3):

            sheet = s
            df = pd.read_excel('text' + book[b] + '.xls',
                               sheet_name=sheet,
                               index_col=None,
                               header=None)

            df.columns = [
                'NO', 'COUNTRY', 'AIR', 'RAILWAY', 'SEA', 'ROAD', 'TOTAL'
            ]
            df = df.drop(columns=['NO'])
            df = df.dropna(thresh=6)
            df = df.set_index('COUNTRY')
            df = df.drop(index='of which:')
            df = df[['AIR', 'RAILWAY', 'SEA', 'ROAD', 'TOTAL']].astype('int')
            df = df.sort_values(by=['TOTAL'], ascending=False)

            i, j = df.shape  #diamensions
            i, j = i - 1, j - 1

            #=======================================================================================================================
            #--------------------------           |         By 3 Months           |       ------------------------------------------
            #=======================================================================================================================
            temp = df.iloc[0, j] - temp
            month3.append(
                temp.tolist()
            )  # Changed from month3.append(temp). BLOB instead of integer.[HEX-repr]
            temp = df.iloc[0, j]

        year.append(month3)
        db.update_month(conn, c, month3, b)

        print("By Year List", year)
        print("By 3Months List", month3)

        #=======================================================================================================================
        #--------------------------      |         PerYear|Total            |       --------------------------------------------
        #=======================================================================================================================

        total = df.iloc[0, j]
        tt.append(total)

        total = total.tolist()
        db.update_year(conn, c, total, b)

        #print(total)
        #print(df.loc[df['TOTAL']==max(df['TOTAL'])])

        #=======================================================================================================================
        #------------------------------        |           Top5           |         --------------------------------------------
        #=======================================================================================================================

        top = df.drop(index='TOTAL ARRIVALS').head(5)
        top = top.iloc[:, j]

        country.append(top)

        countries = top.index.tolist()
        db.update_top(conn, c, countries, b)

        #print(countries)
        #print(top)

        #=======================================================================================================================
        #--------------------------             |         Transportation           |            --------------------------------
        #=======================================================================================================================

        tr = df.iloc[0, 0:4]
        tr = tr.tolist()
        db.update_trans(conn, c, tr, b)

        transport.append(tr)  # keep track for every year => plot


#        print(transport)

#=======================================================================================================================
#==============================            |       TESTING     AREA      |           ===================================
#=======================================================================================================================

#db.select_from(c,'month')

#=======================================================================================================================
#==============================================         |PLOTS|            =============================================
#=======================================================================================================================

#        plots.plot_top(top,book,b)                                 #print different graph for ever year
    top = pd.concat(country, keys=['2011', '2012', '2013', '2014',
                                   '2015'])  #solid printint
    plots.plot_top(top, book, b)
    plots.plot_rail(transport)
    plots.plot_year(tt, book)
    plots.plot_month3(year)
    plots.plt.show()

    #=======================================================================================================================
    #========================================        |Database Elements|            ========================================
    #=======================================================================================================================

    #    db.select_all(c)

    csvFiles.write(c)
    conn.close()
Ejemplo n.º 31
0
        url = "https://%s:%s/dataservice/%s"%(self.vmanage_host, self.vmanage_port, mount_point)
        #print(url)
        payload = json.dumps(payload)
        #print (payload)

        response = self.session[self.vmanage_host].post(url=url, data=payload, headers=headers, verify=False)
        #print(response.text)
        #exit()
        #data = response
        return response

#add DBs in list form
databases = ['hostnames', 'username', 'interface_bw', 'interface_drops', 'CloudExpress'] 

#Calls database.py, passes DB list, creates DBs in influx on local machine
database.create_db(databases)

#Establishes communication and authenticates to vManage
vmanage_session = rest_api_lib(vmanage_host, vmanage_port, username, password)

#Calls specific functions in Attributes.py 

attributes.hostnames(vmanage_session)

attributes.username(vmanage_session)

attributes.interface_bw(vmanage_session)

attributes.interface_drops(vmanage_session)

attributes.CloudExpress(vmanage_session)
Ejemplo n.º 32
0
    id_pic = get_picture.json()[0]["id"]  # Get picture id from json
    base64_pic = base64.b64encode(
        requests.get(url_pic).content).decode("UTF-8")

    cur = get_db().cursor()  # Connect to db
    database.add_cat_to_bd(base64_pic, id_pic, cur)  # Add new record to db
    get_db().commit()  # Commit changes
    return render_template('index.html', data=base64_pic)


@app.route('/<cat_id>/')
def get_cat_by_id(cat_id):
    # TODO return cat picture by id if exists

    cur = get_db().cursor()
    url = database.find_cat_id(cat_id, cur)  # Try to find cat_id in db
    if url is None:
        abort(404)  # If database doesn't have url return 404
    else:
        return render_template('index.html', data=url[0])


@app.errorhandler(404)
def not_found_error(error):
    return render_template('error.html'), 404


if __name__ == '__main__':
    database.create_db()  # Will create database if not exists
    app.run()
Ejemplo n.º 33
0
def parse_output(archives=[], path='.'):
    database.create_db()
    #FIXME the database structure should be updated to include all the
    # fields currently produced by CBT. The code below ignores client readahead.
    # The Rbdfio/rbdfio structure produced by CBT seems redundant.
    #
    #start by parsing the Radosbench output
    files = []
    for archive in archives:
        directory = path + '/' + archive + '/00000000/Radosbench'
        files.extend(find('output.*.*', directory))
    for inputname in files:
        filepattern = re.compile(path + '/' + '(.+)')
        m = filepattern.match(inputname)
        mydirectory = m.group(1)
        params = mydirectory.split("/")
        baselist = params[:]
        baselist.pop()
        basedir = '/'.join(baselist)
        settings = []
        settings.extend(find('ceph_settings.out.*', basedir))
        # make readahead into an int
        params[3] = int(params[3][7:])
        # Make op_size into an int
        params[4] = int(params[4][8:])
        # Make cprocs into an int
        params[5] = int(params[5][17:])
        outputname = params[7]
        params[7] = params[6]
        #       I'm not sure what iodepth should be for radosbench. Setting to 1
        params[6] = 1
        params = [outputname] + params
        params_hash = mkhash(params)
        params = [params_hash] + params
        params.extend([0, 0])
        database.partial_insert(params)

        if len(settings) > 0:
            with open(settings[0]) as ceph_cluster_settings:
                cluster_settings = json.load(ceph_cluster_settings)
            database.update_columns(params[0], cluster_settings)

        pattern = re.compile('Bandwidth \(MB/sec\):\s+(\d+\.\d+)')
        for line in open(inputname):
            m = pattern.match(line)
            if m:
                bw = float(m.group(1))
                if params[9] == 'write':
                    database.update_writebw(params_hash, bw)
                else:
                    database.update_readbw(params_hash, bw)
    #repeat with fio output
    files = []
    for archive in archives:
        directory = path + '/' + archive + '/00000000/RbdFio'
        files.extend(find('output.*', directory))
    for inputname in files:
        filepattern = re.compile(path + '/' + '(.+)')
        m = filepattern.match(inputname)
        mydirectory = m.group(1)
        params = mydirectory.split("/")
        #        print(params)
        # make readahead into an int
        params[3] = int(params[4][7:])
        # Make op_size into an int
        params[4] = int(params[6][8:])
        # Make cprocs into an int
        params[5] = int(params[7][17:])
        # Make iodepth into an int
        params[6] = int(params[8][8:])
        params[7] = params[9]
        params[8] = 0.0
        params[9] = 0.0
        outputname = params.pop()
        params = [outputname] + params
        params_hash = mkhash(params)
        params = [params_hash] + params
        #        print(params)
        database.insert(params)
        for line in open(inputname):
            if "aggrb" in line:
                bw = getbw(splits(line, 'aggrb=', ','))
                if "READ" in line:
                    database.update_readbw(params_hash, bw)
                elif "WRITE" in line:
                    database.update_writebw(params_hash, bw)
Ejemplo n.º 34
0
#!/usr/bin/python
import psycopg2
import sys

from database import create_db, create_table, set_initial_price
from bitfinex import get_last, get_previous
from validate_data import validate_data
from seperate_data import percentage_over_periods


try:
    conn = psycopg2.connect("dbname=divergence25 user=bkawk")
except psycopg2.OperationalError as err:
    print(err)
    create_db('divergence25')
    create_table('divergence25', 'price')


def main():
    qty = 1000
    most_recent = get_last('BTCUSD')[0]
    # Previous below also needs adding to the database too!
    previous = get_previous('BTCUSD', most_recent, qty)
    for i in previous:
        try:
            time = int(i[0])
            open = int(i[1])
            close = int(i[2])
            high = int(i[3])
            low = int(i[4])
            volume = int(i[5])
Ejemplo n.º 35
0
    def download_sp500(self, startdate, enddate, dbfilename):
        """
        Downloads S&P500 tickers from Wikipedia and daily time series from Yahoo! Finance
        """

        # Download list of tickers, company name and GICS Sectors from Wikipedia
        url = 'http://en.wikipedia.org/wiki/List_of_S%26P_500_companies'
        page = html.parse(url)
        symbol = page.xpath(
            '//table[@class="wikitable sortable"]/tr/td[1]/a/text()')
        company = page.xpath(
            '//table[@class="wikitable sortable"]/tr/td[2]/a/text()')
        sector = page.xpath(
            '//table[@class="wikitable sortable"]/tr/td[4]/text()')

        # Add the index itself
        symbol.append('^GSPC')
        company.append('S&P 500')
        sector.append(None)

        # Since Dec-12, BRK.B Yahoo! Finance lists BRK.B as BRK-B
        if 'BRK.B' in symbol: symbol[symbol.index('BRK.B')] = 'BRK-B'

        # Debugging: restrict to the first 10 stocks of the index
        #        symbol = symbol[:10]
        #        company = company[:10]
        #        sector = sector[:10]

        # If database doesn't exist, create it
        if not os.path.exists(dbfilename):
            database.create_db(dbfilename)

        conn = sqlite3.connect(dbfilename,
                               detect_types=sqlite3.PARSE_DECLTYPES
                               | sqlite3.PARSE_COLNAMES)
        c = conn.cursor()

        # Load data from Wikipedia into Load table
        conn.execute(
            "CREATE TABLE LoadAssets (Cd text, Name text, SectorName text)")
        sql = "INSERT INTO LoadAssets (Cd, Name, SectorName) VALUES (?, ?, ?)"
        c.executemany(sql, zip(symbol, company, sector))
        conn.commit()

        # Download time series from Yahoo! finance and store into db
        i = 0
        for s in list(symbol):
            data = database.get_yahoo_prices(s, startdate, enddate)
            database.save_to_db(data, dbfilename)
            self.downloaded.emit(i)  # emit signal to update progressbar
            i += 1

        # From: http://en.wikipedia.org/wiki/Global_Industry_Classification_Standard
        # Note: Telecommunication Services is listed as Telecommunications Services
        conn.execute("DELETE FROM GicsSectors")
        conn.execute("INSERT INTO GicsSectors VALUES(10, 'Energy')")
        conn.execute("INSERT INTO GicsSectors VALUES(15, 'Materials')")
        conn.execute("INSERT INTO GicsSectors VALUES(20, 'Industrials')")
        conn.execute(
            "INSERT INTO GicsSectors VALUES(25, 'Consumer Discretionary')")
        conn.execute("INSERT INTO GicsSectors VALUES(30, 'Consumer Staples')")
        conn.execute("INSERT INTO GicsSectors VALUES(35, 'Health Care')")
        conn.execute("INSERT INTO GicsSectors VALUES(40, 'Financials')")
        conn.execute(
            "INSERT INTO GicsSectors VALUES(45, 'Information Technology')")
        conn.execute(
            "INSERT INTO GicsSectors VALUES(50, 'Telecommunications Services')"
        )
        conn.execute("INSERT INTO GicsSectors VALUES(55, 'Utilities')")

        conn.execute(
            "DELETE FROM Assets WHERE Cd IN (SELECT Cd FROM LoadAssets)")

        conn.execute("""
        INSERT INTO Assets
        SELECT l.Cd, l.Name, g.Id
        FROM LoadAssets l
        LEFT JOIN GicsSectors g ON l.SectorName = g.Name""")

        conn.execute("DROP TABLE LoadAssets")
        conn.commit()

        c.close()
        conn.close()
Ejemplo n.º 36
0
            entry1 = i[0].center(m.CHAR_LIMIT)
            entry2 = str(i[1]).center(m.CHAR_LIMIT)
            entry3 = str(i[2]).center(m.CHAR_LIMIT)
            print(fill_in.format(entry1, entry2, entry3))
            print(separator)

    elif answer == 'Delete Robot':
        li = retrieve_robots(conn)
        if li:
            m.list_robots['choices']=li
            robot = prompt(m.list_robots)['name']
            confirm = prompt(m.confirm)['continue']
            if confirm and delete_robot(conn, robot):
                conn.commit()
        else:
            print('No robots have been created yet!')

    elif answer == 'Exit':
        return
    
    main(conn)

if __name__ == "__main__":
    try:
        conn = create_db()
        main(conn)
    except Error as e:
        print(e)
    finally:
        if conn:
            conn.close()
Ejemplo n.º 37
0
def init_db():
    database.create_db()
    database.create_classes()
    click.echo("Initialized the database")
Ejemplo n.º 38
0
 def setUpClass(cls):
     create_db()
     cls.client = app.app.test_client()
     cls.db_session = app.db_session
Ejemplo n.º 39
0

log_module.configure_logging()

log = logging.getLogger("server")

app = bottle.Bottle()
app.install(sql_alchemy_plugin)
# Configure the SessionMiddleware
session_opts = {
    'session.type': 'ext:database',
    'session.url': 'sqlite:///session.db',
}
app_with_session = SessionMiddleware(app, session_opts)

db = create_db()

auth_uri = flow.step1_get_authorize_url()


def get_user_info(credentials):
    """Get user info from credentials.

    Arguments:
    credentials -- to access the Drive API
    """

    http_auth = credentials.authorize(httplib2.Http())
    people_service = build('people', 'v1', http=http_auth)
    people_resource = people_service.people()
    info = people_resource.get(resourceName='people/me').execute()
 def create_db(self):
     if not os.path.isfile(database.file_name):
         file = filedialog.askopenfile()
         database.create_db(
             database.file_name,
             database.open_from_file(file.name, _encoding=file.encoding))
Ejemplo n.º 41
0
import binascii
import struct
import base64
import configparser
import database as db
import logging

from flask import Flask, Response, request, json
from flask_socketio import SocketIO, emit
from flask_cors import CORS
from datetime import datetime, timedelta
from pyfcm import FCMNotification

epoch = datetime.utcfromtimestamp(0)
conn = db.create_connection("pythonsqlite.db")  # connection
db.create_db(conn)  # create tables

config = configparser.ConfigParser()
config.read('config.ini')
push_service = FCMNotification(api_key=config['DEFAULT']['API_KEY'])

app = Flask(__name__)
sio = SocketIO(app)
CORS(app, resources={r"*": {"origins": ["*"]}})

# Disables the default spamm logging that's caused by flask.
logging.getLogger("werkzeug").setLevel(level=logging.ERROR)

logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
Ejemplo n.º 42
0
def initialization():
	savegamedir, savegames = get_current_savegame_filenames()
	if create_db() == True:
		# if just created db, add all savegames to it
		set_monitors()
	return savegamedir