def insert_list_to_db(videos):
    try:
        connection = storage.connect()
        cursor = connection.cursor()
        count_row = 0
        for mydict in videos:
            columns = ', '.join("`" + str(x).replace('/', '_') + "`"
                                for x in mydict.keys())
            values = ', '.join("'" + str(x).replace('/', '_') + "'"
                               for x in mydict.values())
            sql = "INSERT IGNORE INTO %s ( %s ) VALUES ( %s );" % (
                'videos_details', columns, values)
            cursor.execute(sql)
            connection.commit()
            if cursor.rowcount == 1:
                count_row = count_row + 1

        print(count_row)
        #update is_done filed after insert to table. it's a flag to show it's taken.
        sql = "UPDATE youtube_list SET is_done= 1 WHERE video_id='%s' LIMIT 1" % (
            row_id)
        cursor.execute(sql)
        connection.commit()
        cursor.close()

    except mysql.connector.Error as error:
        print("Failed to insert record into videos_details table {}".format(
            error))

    finally:
        if (connection.is_connected()):
            connection.close()
            print("MySQL connection is closed")
def main():
	config.parse_options()
	global_vars.storage_handle = storage.connect(global_vars.db_filename)
	model_list = generate_model_list()
	clear_tables(model_list)
	create_new_tables(model_list)
	storage.close(global_vars.storage_handle)
def insert_list_to_db(videos):
    try:
        connection = storage.connect()
        cursor = connection.cursor()
        count_row = 0
        for mydict in videos:
            columns = ', '.join("`" + str(x).replace('/', '_') + "`"
                                for x in mydict.keys())
            values = ', '.join("'" + str(x).replace('/', '_') + "'"
                               for x in mydict.values())
            sql = "INSERT IGNORE INTO %s ( %s ) VALUES ( %s );" % (
                'youtube_list', columns, values)
            cursor.execute(sql)
            connection.commit()
            if cursor.rowcount == 1:
                count_row = count_row + 1

        print(count_row)
        cursor.close()

    except mysql.connector.Error as error:
        print(
            "Failed to insert record into Youtube_list table {}".format(error))

    finally:
        if (connection.is_connected()):
            connection.close()
            print("MySQL connection is closed")
def update_db(txx, rid, timestr, datestr):

    timebty = bytearray(timestr, 'utf-8')
    datebty = bytearray(datestr, 'utf-8')
    txx = bytearray(txx, 'utf-8')
    rid = bytearray(rid, 'utf-8')
    conn = storage.connect()
    c = conn.cursor(buffered=True)
    c.execute("SELECT * FROM veh_all_bl where veh_no=%s", (txx, ))
    row = c.fetchone()
    if not row:
        #print("Not exist")
        c.execute("SELECT MAX(veh_id) FROM  veh_all_bl")
        row1 = c.fetchone()
        new_id = row1[0] + 1
        #print(row1[0])
        c.execute("INSERT INTO veh_all_bl(veh_id,veh_no) VALUES(%s,%s)",
                  (new_id, txx))
        time.sleep(0.1)
        c.execute(
            "INSERT INTO veh_lpinfotbl(lp_noid,ckpnt_id,lp_time,lp_date) VALUES(%s,%s,%s,%s)",
            (new_id, rid, timebty, datebty))
    else:
        #print ("exist")
        c.execute("SELECT veh_id FROM veh_all_bl WHERE veh_no =%s", (txx, ))
        row2 = c.fetchone()
        c.execute(
            "INSERT INTO veh_lpinfotbl(lp_noid,ckpnt_id,lp_time,lp_date) VALUES(%s,%s,%s,%s)",
            (row2[0], rid, timebty, datebty))
        time.sleep(0.1)
    conn.commit()
    c.close()
Example #5
0
 def delete_data(self):
     con = storage.connect()
     cur = con.cursor()
     cur.execute("delete from destinations where priority=%s", self.priority.get())
     rows = cur.fetchall()
     con.commit()
     con.close()
     self.fetch_all()
     self.clear_it()
Example #6
0
 def fetch_all(self):
     con = storage.connect()
     cur = con.cursor()
     cur.execute("select * from destinations")
     rows = cur.fetchall()
     if len(rows) != 0:
         self.travel_table.delete(*self.travel_table.get_children())
         for x in rows:
             self.travel_table.insert('', END, values=x)
     con.commit()
     con.close()
Example #7
0
 def search_all(self):
     con = storage.connect()
     cur = con.cursor()
     cur.execute("select * from destinations where " + str(self.search_drop.get()) + " LIKE '%" + str(
         self.search_text.get()) + "%'")
     rows = cur.fetchall()
     if len(rows) != 0:
         self.travel_table.delete(*self.travel_table.get_children())
         for x in rows:
             self.travel_table.insert('', END, values=x)
     con.commit()
     con.close()
Example #8
0
def connectandload(load_sql):
    try:
        conn = storage.connect()
        cursor = conn.cursor()
        cursor.execute(load_sql)
        print("Successfully loaded the table from csv.")
    except mysql.connector.Error as e:
        if e.errno == errorcode.ER_ACCESS_DENIED_ERROR:
            print("Something is wrong with your user name or password")
        elif e.eno == errorcode.ER_BAD_DB_ERROR:
            print("Database does not exist")
        else:
            print("Error: {}").format(str(e))
            sys.exit(1)
    else:
        print("Successful execution!")
        conn.close()
def execute_geocoding(club_cluster):

    cnx = connect()
    cursor = cnx.cursor()

    cursor.execute('SELECT club_name from cluster_fount')

    clubs = cursor.fetchall()

    cnx.commit()
    cursor.close()

    for club in clubs:
        if club[0] not in str(club_cluster):

            validLetters = "abcdefghijklmnopqrstuvwxyz/:-|&.#0123456789 "

            club = ''.join([char for char in str(club).lower()[2:] if char in validLetters])

            response = requests.get('https://maps.googleapis.com/maps/api/geocode/json?address=' + str(club) + ',Berlin,Germany&key=AIzaSyBnxwGFcisM77sVnpepLzoKXkEtt9Bnwlo')
            results = response.json()

            if len(results['results']) == 0:
                output = {
                    "formatted_address": None,
                    "latitude": None,
                    "longitude": None,
                }
            else:
                answer = results['results'][0]
                output = {
                    "formatted_address": answer.get('formatted_address'),
                    "latitude": answer.get('geometry').get('location').get('lat'),
                    "longitude": answer.get('geometry').get('location').get('lng'),
                }

            cursor = cnx.cursor()

            cursor.execute('UPDATE cluster_fount SET latitude="%s", longitude="%s", event_address="%s" WHERE club_name="%s"' % (output['latitude'], output['longitude'], output['formatted_address'], club))

            cnx.commit()
            cursor.close()
            print("Uploaded Coordinates and Address Database")
Example #10
0
 def update_data(self):
     con = storage.connect()
     cur = con.cursor()
     cur.execute(
         "update destinations set name=%s, country=%s, climate=%s, activities=%s, travel_mode=%s, stay=%s where "
         "priority=%s",
         (
             self.name.get(),
             self.country.get(),
             self.climate.get(),
             self.txt_activity.get('1.0', END),
             self.travel_mode.get(),
             self.stay.get(),
             self.priority.get()
         ))
     con.commit()
     self.fetch_all()
     self.clear_it()
     con.close()
Example #11
0
 def add_travels(self):
     if self.priority.get() == "" or self.name.get() == "":
         messagebox.showerror("Error", "Priority and Name cannot be blank")
     else:
         con = storage.connect()
         cur = con.cursor()
         cur.execute("insert into destinations values(%s, %s, %s, %s, %s, %s, %s)", (
             self.priority.get(),
             self.name.get(),
             self.country.get(),
             self.climate.get(),
             self.txt_activity.get('1.0', END),
             self.travel_mode.get(),
             self.stay.get()
         ))
     con.commit()
     self.fetch_all()
     self.clear_it()
     con.close()
     messagebox.showinfo("Congrats", "New destination entered.  You better start saving $$$!")
def chkdbtime():
    try:
        conn = storage.connect()
        c = conn.cursor(buffered=True)
        c.execute(
            "SELECT UPDATE_TIME FROM information_schema.tables WHERE TABLE_SCHEMA = 'db_lp_ckpnt20193' AND TABLE_NAME = 'veh_all_bl'"
        )
        c.execute(
            "SELECT UPDATE_TIME FROM information_schema.tables WHERE TABLE_SCHEMA = 'db_lp_ckpnt20193' AND TABLE_NAME = 'veh_lpinfotbl'"
        )

        y = c.fetchone()
        tdb = y[0]  #the data from db is tuple, convert it to date timetuple
        tdb = datetime.strptime(str(tdb), "%Y-%m-%d %H:%M:%S")
        tdb = str(tdb)
        conn.commit()
        c.close()
        return (tdb)
    except Exception as e:
        print(e)
        pass
def get_viol():
    global told
    try:
        conn = storage.connect()
        c = conn.cursor(buffered=True)
        tnew = chkdbtime()
        if tnew > told:
            print("Updating viol list")
            #c.execute("SELECT veh_no FROM veh_all_bl WHERE bl_flg=%s",(bl,))
            f = open('viol.txt', 'w')
            c.execute("SELECT veh_no FROM veh_all_bl WHERE bl_flg='1'")
            rows = c.fetchall()
            for row in rows:
                #print(row[0])
                f.write(row[0] + '\n')
            f.close
            conn.commit()
            c.close()
            told = tnew
        else:
            print("no viol updates")
    except Exception as e:
        print(e)
        pass
Example #14
0
from oauth2client.client import OAuth2WebServerFlow
from werkzeug.exceptions import Unauthorized

CLIENT_ID = os.environ['CLIENT_ID']
CLIENT_SECRET = os.environ['CLIENT_SECRET']
SCOPE = ['https://mail.google.com/',
         'https://www.googleapis.com/auth/userinfo.email',
         'https://www.googleapis.com/auth/gmail.readonly',
         'https://www.googleapis.com/auth/gmail.modify',
         'https://www.googleapis.com/auth/gmail.labels',
         'https://www.googleapis.com/auth/gmail.settings.basic']
APP_SECRET_KEY = 'VfedCzx,eT88kj7A33^K'

logger = log.build_logger(__name__)
app = flask.Flask(__name__)
conn = storage.connect(os.environ['DATABASE_URL'])
storage.migrate(conn)

app.config['DEBUG'] = True
app.config['SECRET_KEY'] = APP_SECRET_KEY
app.config['CONNECTION'] = conn
app.config['TIMER'] = services.start_timer(conn)

@app.route('/timer/start', methods=['POST'])
def start():
    conn = app.config['CONNECTION']
    user_id = flask.session['user_id']
    logger.info('starting timer for user: {}')
    storage.save_timer(conn, user_id, True)
    return flask.redirect(flask.url_for('index'))
 def db_connect(self) -> ModLinkBotConnection:
     """Connect to the database."""
     return connect(getattr(self.config, "database_path", "modlinkbot.db"))
Example #16
0
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
from matplotlib import dates as mpl_dates
from matplotlib.ticker import (MultipleLocator, FormatStrFormatter, AutoMinorLocator)
import datetime
import storage
conn = storage.connect()
today = (datetime.datetime.now().strftime("%Y-%m-%d"))
print(today)
todayweek = (datetime.datetime.now().strftime("%W"))

# Make the figure wider to see things better
plt.figure(figsize=(12,6))


### 2017 
my_query2017 = ''' SELECT `day_of_year`, YEAR(d_utc) AS yeary, `temp_f_davg` , `temp_f_dmin` , `temp_f_dmax`, recs FROM `weather`.`v_E1248_daily` WHERE YEAR(d_utc)=2018 AND recs >200 ORDER BY day_of_year ASC;'''
df1 = pd.read_sql_query(my_query2017,conn)
tempf_vals2017 = df1['temp_f_davg']
dater2017 = df1['day_of_year']
lower_y_error2017 = df1['temp_f_dmin']
upper_y_error2017 = df1['temp_f_dmax']
y_error = [lower_y_error2017, upper_y_error2017]
# plt.errorbar(x, y, yerr = y_error
plt.errorbar(dater2017, tempf_vals2017, yerr = y_error, fmt='-o')
plt.plot(dater2017, tempf_vals2017, linestyle="", color="red", linewidth=1, marker='o', label="2017", markersize=2)

### Max  
from datetime import datetime
import mysql.connector
from mysql.connector import Error, errorcode
import time
import configparser
import storage

#connection to db and get token
config = configparser.ConfigParser()
config.read('config.ini')
key = config['token']['key']
q = config['keyword']['q']

#Get start and end date
try:
    connection = storage.connect()
    cursor = connection.cursor()
    #select a row of video's ID table, to get detail of the video.
    get_row = "SELECT * FROM youtube_list WHERE is_going=0 and is_done=0 LIMIT 1"
    cursor.execute(get_row)
    records = cursor.fetchall()

    #Check if there is a row to select or not
    if not records:
        print("there is no video ID left in youtube_list to select")
        sys.exit()

    #if there is a row, select it's video_id
    for record in records:
        row_id = record[0]
    connection.commit()
Example #18
0
import storage as st
collectors_db = st.connect()


class Collector:
    def __init__(self,
                 _id=None,
                 id=None,
                 fullName=None,
                 cellphone=None,
                 password=None):
        self._id = _id
        self.id = id
        self.fullName = fullName
        self.cellphone = cellphone
        self.password = password

    def get(self, _id):
        collector = collectors_db.collectors.find_one({'id': _id},
                                                      {'_id': False})
        if collector:
            return collector
        else:
            return None
Example #19
0
                + "£" + str(int(avprice)))

    except:
        print("Cannot calculate average")

    save_prompt = input("\nSave results as spreadsheet? y/n: ")

    if "y" in save_prompt:
        filename = f"{search_time}_{city}"
        df.to_csv(f"{filename}.csv")
        print(f"Saved data in file: '{filename}.csv'")
    else:
        print("No spreadsheet saved")

    print(f"Saving {len(proplist)} properties to {city.upper()} database...")
    storage.connect(city)

    properties_saved = 0
    properties_existing = 0

    for p in proplist:  # consider adding tqdm - and removing print statements in storage
        if storage.insert(city, 'N/A', p['Price'], p['Address'], p['Beds'],
                          'N/A', 'N/A', p['Agent_Name'], p['Agent_tel'],
                          p['Website'], p['Acquire_time']) == 'new':
            properties_saved += 1
        else:
            properties_existing += 1
        print(
            f"Saved {properties_saved} to {city} - {properties_existing} already in database"
        )
Example #20
0
from flask_restful import Resource, reqparse, request
from flask_jwt_extended import (create_access_token, create_refresh_token,
                                jwt_required, jwt_refresh_token_required,
                                get_jwt_identity)
import Controllers.CollectorController as CollectorController

import storage as st
db = st.connect()
# collection donde estan toda la info de los analistas
CollectorCollection = db.collectors
CollectorCodesCollection = db.collector_codes

# para validar que el JSON del body tenga los campos
parser = reqparse.RequestParser()
parser.add_argument('id', help='This field cannot be blank', required=True)
parser.add_argument('password',
                    help='This field cannot be blank',
                    required=True)

# validar todo los campos al momento de registrar un collector
parserCollector = reqparse.RequestParser()
parserCollector.add_argument('id',
                             help='This field cannot be blank',
                             required=True)
parserCollector.add_argument('fullName',
                             help='This field cannot be blank',
                             required=True)
parserCollector.add_argument('password',
                             help='This field cannot be blank',
                             required=True)
parserCollector.add_argument('cellphone',
Example #21
0
import storage as st

analist_db = st.connect()
from bson.json_util import dumps


class Analist:
    def __init__(self,
                 _id=None,
                 id=None,
                 fullName=None,
                 password=None,
                 state=None):
        self._id = _id
        self.id = id
        self.fullName = fullName
        self.password = password
        self.state = state

    def get(self, _id):
        analist = analist_db.analists.find_one({'id': _id}, {'_id': False})
        if analist:
            return analist
        else:
            return None

    def insert(self, id, fullName, password, state):
        analist = {
            "id": id,
            "fullName": fullName,
            "password": password,
Example #22
0
def localstorage():
    return json.dumps(connect())