Example #1
0
    def _merge(self):
        if not os.path.isfile(self._fromdb):
            raise OriginError("%r is not a file." % self._fromdb)

        new = sqlite.connect(self._fromdb)
        self._fromcursor = new_cursor = new.cursor(_DictCursor)

        if not os.path.isfile(self._todb):
            raise DestinationError("%r is not a file." % self._todb)

        old = sqlite.connect(self._todb)
        self._tocursor = old_cursor = old.cursor(_DictCursor)

        new_tables = new_cursor.execute(
                "SELECT name FROM sqlite_master "
                "WHERE type='table'").fetchall()
        for table in new_tables:
            name = table['name']
            # Verify that this table is in the older db, or not
            old_table = old_cursor.execute(
                    "SELECT name FROM sqlite_master "
                    "WHERE type='table' AND name=?", (name, )).fetchone()

            if old_table is None:
                # The older db does not have this table, create it there as
                # well the related triggers.
                print "Adding the table '%s' and related triggers." % name
                if self._dryrun:
                    continue
                self._create_table(name)
                self._create_triggers(name)

            else:
                self._merge_table(name)
                self._merge_triggers(name)
Example #2
0
	def __init__(self, dbfile):
		if not os.path.exists(dbfile):
			self.conn = sqlite.connect(dbfile)
			self.cursor = self.conn.cursor()
			self.create_table()
		self.conn = sqlite.connect(dbfile)
		self.cursor = self.conn.cursor()
Example #3
0
 def onPlayBackStopped(self):
     '''
     Called when playback is stopped (normal or otherwise)
     
     Checks to see if we've watched more than watch_percent. If so, then the bookmark is deleted and 
     watchedCallback is called if it exists.
     If we didn't play at all, raises a playback failed exception.
     Otherwise, save a new bookmark at the furthest watched spot.
     '''
     addon.log('> onPlayBackStopped')
     self._playbackLock.clear()
     
     playedTime = self._lastPos
     addon.log('playedTime / totalTime : %s / %s = %s' % (playedTime, self._totalTime, playedTime/self._totalTime))
     if playedTime == 0 and self._totalTime == 999999:
         raise PlaybackFailed('XBMC silently failed to start playback')
     elif (((playedTime/self._totalTime) > self.watch_percent) and (self.video_type == 'movie' or (self.season and self.episode))):
         addon.log('Threshold met.')
         if self.watchedCallback: self.watchedCallback()
         db = sqlite.connect(DB_PATH)
         db.execute('DELETE FROM bookmarks WHERE plugin=? AND video_type=? AND title=? AND season=? AND episode=? AND year=?', (self.plugin, self.video_type, self.title, self.season, self.episode, self.year))
         db.commit()
         db.close()
     else:
         addon.log('Threshold not met. Saving bookmark')
         db = sqlite.connect(DB_PATH)
         db.execute('INSERT OR REPLACE INTO bookmarks (plugin, video_type, title, season, episode, year, bookmark) VALUES(?,?,?,?,?,?,?)',
                   (self.plugin, self.video_type, self.title, self.season, self.episode, self.year, playedTime))
         db.commit()
         db.close()
Example #4
0
    def __init__(self, path, db_name="base.db"):
        '''constructor'''
        self.path = path
        self.db_name = db_name

        self.events = {}
        self.groups = {}
        self.accounts = {}

        full_path = os.path.join(path, db_name)

        if os.path.exists(full_path + "copy"):
            shutil.copy(full_path + "copy", full_path)

        self.connection = sqlite.connect(full_path)
        self.cursor = self.connection.cursor()

        if self.__need_clean():
            self.__clean()

        self.connection = sqlite.connect(full_path)
        self.cursor = self.connection.cursor()

        self._count = 0

        try:
            self._create()
        except sqlite.OperationalError:
            self._load_events()
            self._load_groups()
            self._load_accounts()
            self._load_account_by_group()
Example #5
0
def main(argv):
    if len(argv) != 3:
        print >> sys.stderr, "Syntax: %s old.db new.db" % argv[0]
        print >> sys.stderr, "New db must exist but have no contents."

    con1 = sqlite.connect(argv[1])
    con1.text_factory = str
    cur1 = con1.cursor()
    con2 = sqlite.connect(argv[2])
    cur2 = con2.cursor()

    # Convert table board
    cur1.execute('SELECT id, title FROM board')
    for result in cur1.fetchall():
        cur2.execute(u'INSERT INTO board (id, title) VALUES (%s, %s)' %
                     tuple([sqlquote(x) for x in result]))
    # Convert for table forum
    cur1.execute('SELECT id, boardid, title FROM forum')
    for result in cur1.fetchall():
        cur2.execute(u'INSERT into forum (id, boardid, title) VALUES '\
                     u'(%s, %s, %s)' % tuple(
                         [sqlquote(x) for x in result]))
    # Convert for table message
    cur1.execute('SELECT id, forumid, boardid, mdate, mtime, mto, mfrom, reference, subject, body FROM message')
    for result in cur1.fetchall():
        cur2.execute(u'INSERT into message (id, forumid, boardid, mdate, mtime, mto, mfrom, reference, subject, body) VALUES '\
                     u'(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' % tuple(
                         [sqlquote(x) for x in result]))
    con2.commit()
    con2.close()
Example #6
0
    def onPlayBackStopped(self):
        addon.log("> onPlayBackStopped")
        self._playbackLock.clear()

        playedTime = self._lastPos
        watched_values = [0.7, 0.8, 0.9]
        min_watched_percent = watched_values[int(addon.get_setting("watched-percent"))]
        addon.log("playedTime / totalTime : %s / %s = %s" % (playedTime, self._totalTime, playedTime / self._totalTime))
        if playedTime == 0 and self._totalTime == 999999:
            raise PlaybackFailed("XBMC silently failed to start playback")
        elif (playedTime / self._totalTime) > min_watched_percent:
            addon.log("Threshold met. Marking item as watched")
            self.ChangeWatched(
                self.imdbnum, self.video_type, self.title, self.season, self.episode, self.year, watched=7
            )
            db = sqlite.connect(DB)
            db.execute(
                "DELETE FROM bookmarks WHERE video_type=? AND title=? AND season=? AND episode=? AND year=?",
                (self.video_type, self.title, self.season, self.episode, self.year),
            )
            db.commit()
            db.close()
        else:
            addon.log("Threshold not met. Saving bookmark")
            db = sqlite.connect(DB)
            db.execute(
                "INSERT OR REPLACE INTO bookmarks (video_type, title, season, episode, year, bookmark) VALUES(?,?,?,?,?,?)",
                (self.video_type, self.title, self.season, self.episode, self.year, playedTime),
            )
            db.commit()
            db.close()
Example #7
0
def create_database(driver, database, username=None, password=None, 
                    hostname=None):
    if driver == 'sqlite':
        db = SqliteDatabase(database)
        return db
    elif driver == 'mysql':
        db = MysqlDatabase(database, username, password, hostname)
    elif driver == 'postgres':
        # TODO
        raise DatabaseDriverNotSupported
    else:
        raise DatabaseDriverNotSupported
                
    # Try to connect to database
    try:
        db.connect().close()
        return db
    except AccessDenied, e:
        if password is None:
            import sys
            import getpass

            # FIXME: catch KeyboardInterrupt exception
            # FIXME: it only works on UNIX (/dev/tty),
            #  not sure whether it's bug or a feature, though
            oldout, oldin = sys.stdout, sys.stdin
            sys.stdin = sys.stdout = open('/dev/tty', 'r+')
            password = getpass.getpass()
            sys.stdout, sys.stdin = oldout, oldin
            
            return create_database(driver, database, username, password, 
                                   hostname)
        raise e
	def connect(self):
		if self.LOGGING_LEVEL == 1:
			IGNORE_UNIQUE_ERRORS = False
			SILENT_STATEMENTS = False
		try:	
			import mysql.connector as database
			self.log("Loading mysql.connector as DB engine")
			self.DBH = database.connect(self.dbname, self.username, self.password, self.host, buffered=True)
		except:
			import MySQLdb as database
			self.log("Loading MySQLdb as DB engine")
			self.DBH=database.connect(host=self.host,user=self.username,passwd=self.password,db=self.dbname)
		self.DBC = self.DBH.cursor()
		try:		
			row = self.query("SELECT version, (version < ?) AS outdated FROM rw_version ORDER BY version DESC LIMIT 1", [DATABASE_VERSION])
			outdated = str2bool(str(row[1]))
			if outdated:
				self.log("Database outdated", level=0)
				self.initialize()
				self.log("Upgrading database", level=0)
				for v in range(row[0]+1, DATABASE_VERSION+1):
					upgrade_file = "upgrade.mysql.%s.sql" % str(v)
					self.runSQLFile(upgrade_file)
				self.commit()
			self.log("Database version: %s" % str(DATABASE_VERSION))
		except:
			self.initialize()
Example #9
0
def alterFavourites(content, favourites):
    try:
        dbcon = database.connect(control.databaseFile)
        dbcur = dbcon.cursor()
        dbcon2 = database.connect(control.favouritesFile)
        dbcur2 = dbcon2.cursor()
        dbcur2.execute("CREATE TABLE IF NOT EXISTS %s (""id TEXT, ""items TEXT, ""UNIQUE(id)"");" % content)
    except:
        return

    for i in favourites:
        try:
            dbcur.execute("DELETE FROM favourites WHERE imdb_id = '%s'" % i['imdb'])
        except:
            pass
        try:
            dbcur2.execute("INSERT INTO %s Values (?, ?)" % content, (i['imdb'], repr({'title': i['title'], 'year': i['year'], 'imdb': i['imdb'], 'tmdb': i['tmdb'], 'tvdb': i['tvdb'], 'tvrage': i['tvrage'], 'poster': i['poster'], 'fanart': i['fanart']})))
        except:
            pass

    try:
        dbcon.commit()
        dbcon2.commit()
    except:
        return
Example #10
0
    def __conectar(self):
        """Método privado, que nos servirá para conectarnos a la base de datos."""

# Comprobamos si ya existe la base de datos
# para simplemente, conectarnos.

        if os.path.exists(self.__fichero):
            return sqlite.connect(self.__fichero)

        # En caso de que no exista, creamos la base de datos.

        else:

            conexion = sqlite.connect(self.__fichero)
            cursor = conexion.cursor()
# Creamos la tabla CLIENTES
            TABLA = """
                    CREATE TABLE CLIENTES (
                    ID INTEGER PRIMARY KEY,
                    NOMBRE VARCHAR(20),
                    APELLIDOS VARCHAR(40),
                    EMAIL VARCHAR(30),
                    TELEFONO VARCHAR(12),
                    EMPRESA VARCHAR(30),
                    CIFONIF VARCHAR(30),
                        );"""
                        # ejecutamos la sentencia
            cursor.execute(TABLA)
                        # MUY IMPORTANTE:
            conexion.commit()
                        # devolvemos la conexión
            return conexion
Example #11
0
    def __init__(self, api_key='', debug=False, lang='fr'):

        self.api_key = self.ADDON.getSetting('api_tmdb')
        self.debug = debug
        self.lang = lang
        self.poster = 'https://image.tmdb.org/t/p/%s' % self.ADDON.getSetting('poster_tmdb')
        self.fanart = 'https://image.tmdb.org/t/p/%s'  % self.ADDON.getSetting('backdrop_tmdb')
        #self.cache = cConfig().getFileCache()

        try:
            #if not os.path.exists(self.cache):
            if not xbmcvfs.exists(self.CACHE):
                #f = open(self.cache,'w')
                #f.close()
                self.db = sqlite.connect(self.REALCACHE)
                self.db.row_factory = sqlite.Row
                self.dbcur = self.db.cursor()
                self.__createdb()
        except:
            VSlog('erreur: Impossible d ecrire sur %s' % self.REALCACHE )
            pass

        try:
            self.db = sqlite.connect(self.REALCACHE)
            self.db.row_factory = sqlite.Row
            self.dbcur = self.db.cursor()
        except:
            VSlog('erreur: Impossible de ce connecter sur %s' % self.REALCACHE )
            pass
Example #12
0
    def __init__(self, dbPath=".", dbName="UbuWeb.db", dbURL=None, updateURL=None, init=True, xbmc=True):
        # TODO
        # Make option to check status of database
        self.dbPath = os.path.join(dbPath, dbName)
        self.dbURL = dbURL
        self.updateURL = updateURL

        if init:
            if not os.path.exists(dbPath):
                os.makedirs(dbPath)
                if dbURL is not None:
                    self.getDB(self.dbURL)

                    self.db = sqlite.connect(self.dbPath)
                    self.db.text_factory = str
                else:
                    self.db = sqlite.connect(self.dbPath)
                    self.db.text_factory = str
                    self.createUbuWebDB()
                    self.parseFilmListingPage(numLinks=None, startLink=16)
            else:
                self.db = sqlite.connect(self.dbPath)
                self.db.text_factory = str
                if self.updateURL is not None:
                    self.checkDB()
Example #13
0
    def __conectar(self):
        """Método privado, que nos servirá para conectarnos a la base de datos."""

        # Comprobamos si ya existe la base de datos
        # para simplemente, conectarnos.

        if os.path.exists(self.__fichero):
            return sqlite.connect(self.__fichero)

        # En caso de que no exista, creamos la base de datos.

        else:
            conexion = sqlite.connect(self.__fichero)
            cursor = conexion.cursor()

            #CREAMOS LA TABLA BUZONEOS
            TABLA = """
                    CREATE TABLE BUZONEOS (
                        ID INTEGER PRIMARY KEY,
                        BUZONEOSCODIGOPOSTAL INT(9)
                        BUZONEOSCLIENTE VARCHAR(30),
                        BUZONEOSZONA VARCHAR(30),
                        NUMEROFOLLETOSBUZONEOS VARCHAR(30),
                        TIPOBUZONEO VARCHAR(30),
                        FECHABUZONEO VARCHAR(30),
                        )"""
            cursor.execute(TABLA)
            # MUY IMPORTANTE:
            conexion.commit()
            # devolvemos la conexión
            return conexion
Example #14
0
    def __init__(self, sqlite_file, maf_file, target_seqname):
        """Indexes or loads the index of a MAF file"""

        import os
        
        try:
            from sqlite3 import dbapi2 as _sqlite
        except ImportError:
            from Bio import MissingPythonDependencyError
            raise MissingPythonDependencyError("Requires sqlite3, which is "
                                               "included Python 2.5+")
        
        self._target_seqname = target_seqname
        self._maf_file = maf_file
        
        # make sure maf_file exists, then open it up
        if os.path.isfile(self._maf_file):
            self._maf_fp = open(self._maf_file, "r")
        else:
            raise ValueError("Error opening %s -- file not found" % (self._maf_file,))
        
        # if sqlite_file exists, use the existing db, otherwise index the file
        if os.path.isfile(sqlite_file):
            self._con = _sqlite.connect(sqlite_file)
            self._record_count = self.__check_existing_db()
        else:
            self._con = _sqlite.connect(sqlite_file)
            self._record_count = self.__make_new_index()
            
        # lastly, setup a MafIterator pointing at the open maf_file
        self._mafiter = MafIterator(self._maf_fp)
Example #15
0
def deleteFavourite(meta, content):
    try:
        meta = json.loads(meta)
        imdb = meta['imdb']
        if 'title' in meta: title = meta['title']
        if 'tvshowtitle' in meta: title = meta['tvshowtitle']

        try:
            dbcon = database.connect(control.favouritesFile)
            dbcur = dbcon.cursor()
            dbcur.execute("DELETE FROM %s WHERE id = '%s'" % (content, imdb))
            dbcon.commit()
        except:
            pass
        try:
            dbcon = database.connect(control.databaseFile)
            dbcur = dbcon.cursor()
            dbcur.execute("DELETE FROM favourites WHERE imdb_id = '%s'" % imdb)
            dbcon.commit()
        except:
            pass

        control.refresh()
        control.infoDialog(control.lang(30412).encode('utf-8'), heading=title)
    except:
        return
Example #16
0
    def __conectar(self):
        """Método privado, que nos servirá para conectarnos a la base de datos."""

# Comprobamos si ya existe la base de datos
# para simplemente, conectarnos.

        if os.path.exists(self.__fichero):
            return sqlite.connect(self.__fichero)

        # En caso de que no exista, creamos la base de datos.

        else:

            conexion = sqlite.connect(self.__fichero)
            cursor = conexion.cursor()
            #CREAMOS LA TABLA ZONAS
            TABLA = """
                    CREATE TABLE ZONAS (
                    ID INTEGER PRIMARY KEY,
                    CODIGOPOSTAL VARCHAR(20),
                    TIPOZONA VARCHAR(30),
                    CANTIDADFOLLETOS VARCHAR(20),
                    PRECIOPORMILLAR VARCHAR(30),
                    PRECIOTOTAL VARCHAR(20)"""
            cursor.execute(TABLA)
            #CREAMOS LA TABLA BUZONEOS
            conexion.commit()
                        # devolvemos la conexión
            return conexion
def update_musicbrainzid( type, info ):
    log( "Updating MusicBrainz ID", xbmc.LOGDEBUG )
    artist_id = ""
    try:
        if type == "artist":  # available data info["local_id"], info["name"], info["distant_id"]
            name, artist_id, sortname = get_musicbrainz_artist_id( info["name"] )
            conn = sqlite3.connect(addon_db)
            c = conn.cursor()
            c.execute('UPDATE alblist SET musicbrainz_artistid="%s" WHERE artist="%s"' % (artist_id, info["name"]) )
            try:
                c.execute('UPDATE lalist SET musicbrainz_artistid="%s" WHERE name="%s"' % (artist_id, info["name"]) )
            except:
                pass
            conn.commit
            c.close()
        if type == "album":
            album_id = get_musicbrainz_album( info["title"], info["artist"], 0 )["id"] 
            conn = sqlite3.connect(addon_db)
            c = conn.cursor()
            c.execute("""UPDATE alblist SET musicbrainz_albumid='%s' WHERE title='%s'""" % (album_id, info["title"]) )
            conn.commit
            c.close()
    except:
        print_exc()
    return artist_id
Example #18
0
 def __init__(self,db_filename):
     self.db_filename = db_filename
     if not os.path.isfile(self.db_filename):
         self.con = sqlite.connect(self.db_filename)
         self.con.execute("create table data (key PRIMARY KEY,value)")
     else:
         self.con = sqlite.connect(self.db_filename)
def makeSQliteTables():
#2008-05-29 rdc@mote
#This is pretty tricky...
#We first set up the SQLite and MySQL connector statements
#And then we grab all the counties from the county table
#on coolprime and push into a SQLite db. Once we have the
#counties we iterate through each county and grab the column
#headers so we can build a 'create xxxxx_county_reports' statement
#based on the structure of the table on coolprime. This way we 
#can mod the table structure at will and this code will adapt.
#Sweet!
#

    #SQLite
    global beachreports
    if(DEBUG == 0):
        beachreports = sqlite.connect(":memory:", isolation_level=None)
    
    else:
        beachreports = sqlite.connect("./beachreports.db", isolation_level=None)
    
    cur1 = beachreports.cursor()
    cur2 = beachreports.cursor()
    cur1.execute('create table counties (id integer primary key, county varchar(25))')
    cur1.execute('create table subscribers (id integer primary key, email varchar(25), county varchar(25))')

    #MySQL
    global db
    db = MySQLdb.connect(host=dbHost, user=dbUser, passwd=dbPass,db=dbDB)
    cursor1 = db.cursor()
    cursor2 = db.cursor()
    countyQuery = "SELECT county from counties ORDER BY county ASC"
    result = cursor1.execute(countyQuery)
    if (result != 0):
        for (myCounty) in cursor1:
            createStatement = ""
            county = "%s" % myCounty
            county_reports = "%s_county_reports" % county
            #print "County: %s" % county
            cur2.execute("INSERT into counties(county) VALUES(?)", (county, ))
            #get column headers
            columnQuery = "DESCRIBE %s" % county_reports
            result = cursor2.execute(columnQuery)
            index = 1
            if (result != 0):
                for (header) in cursor2:
                    if(index < result):
                       createStatement = createStatement + "%s %s," % (header[0],header[1])
                    else:
                       createStatement = createStatement + "%s %s" % (header[0],header[1])
                    index += 1
                createStatement = "create table " + county_reports + \
                "(" "id integer primary key," + createStatement + ")"
            if(DEBUG > 1):
                print "makeSQLiteTables(): createStatement = %s\n" % createStatement
            cur2.execute(createStatement)
    cur1.close()
    cur2.close()
    cursor1.close()
    cursor2.close()
Example #20
0
 def _initialiseDB(self):
     from sqlite3 import dbapi2 as sqlite
     self._dbFile = os.path.join(self._g.DATA_PATH, 'art.db')
     self._db = sqlite.connect(self._dbFile)
     self._createDB()
     self._menuFile = os.path.join(self._g.DATA_PATH, 'menu-%s.db' % self._g.MarketID)
     self._menuDb = sqlite.connect(self._menuFile)
Example #21
0
 def connect(self):
     con = None
     if not os.path.isfile(self.dict_path):
         con = sqlite.connect(self.dict_path)
         con.execute("create table data (key PRIMARY KEY,value)")
     else:
         con = sqlite.connect(self.dict_path)
     return con
Example #22
0
	def __init__(self,db_address="qad_rssreader.db"):
		if (os.path.exists(db_address)):
			self.conn = sqlite.connect(db_address)
			self.cursor = self.conn.cursor()
		else:
			self.conn = sqlite.connect(db_address)
			self.cursor = self.conn.cursor()
			self.default_db()
def UpgradePlayerBuffer(publicName,buffer):
    global PCONN,NEWPCONN,LASTCHARACTER
    if LASTCHARACTER != publicName[0]:
        LASTCHARACTER = publicName[0]
        print "Upgrading Players: %s"%LASTCHARACTER
    
    dbuffer = zlib.decompress(buffer)
    f = file("./data/tmp/pbuffer","wb")
    f.write(dbuffer)
    f.close()
    
    PCONN = sqlite.connect("./data/tmp/pbuffer")
    if os.path.exists("./data/tmp/nbuffer"):
        os.remove("./data/tmp/nbuffer")
    
    #get character names
    
    #cursor = PCONN.cursor()
    #cursor.execute("SELECT name from character")
    #for name in cursor.fetchall():
    #    n = (name[0],)
    #    assert n not in CHARACTER_NAMES, "Character name collision: %s -> %s"%(publicName,n[0])
    #    CHARACTER_NAMES.append(n)
    
    NEWPCONN = sqlite.connect("./data/tmp/nbuffer",isolation_level=None)
    nc = NEWPCONN.cursor()
    nc.execute("BEGIN TRANSACTION;")
    nc.executescript(CREATE_PLAYER_TABLE_SQL)
    
    map(DoTable, PLAYER_TABLES)
    
    # Create an alias 'content' for the 'item' translation entry.
    # Needed for the ItemContainerContent class in item.py.
    try:
        TTRANS['content'] = TTRANS['item']
    except KeyError:
        pass
    
    # Do translation.
    map(DoTranslation, PLAYER_TABLES)
    
    nc.execute("END TRANSACTION;")
    nc.close()
    PCONN.close()
    NEWPCONN.close()
    PCONN = None
    NEWPCONN = None
    
    f = file("./data/tmp/nbuffer","rb")
    buffer = f.read()
    f.close()
    
    buffer = zlib.compress(buffer)
    buffer = sqlite.Binary(buffer)
    
    cursor = NEWCONN.cursor()
    cursor.executemany("INSERT INTO player_buffer VALUES(?,?,?);",((None,publicName,buffer),))
    cursor.close()
Example #24
0
    def service(self):
        try:
            control.makeFile(control.dataPath)
            dbcon = database.connect(control.libcacheFile)
            dbcur = dbcon.cursor()
            dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");")
            dbcur.execute("SELECT * FROM service WHERE setting = 'last_run'")
            fetch = dbcur.fetchone()
            if fetch == None:
                serviceProperty = "1970-01-01 23:59:00.000000"
                dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty))
                dbcon.commit()
            else:
                serviceProperty = str(fetch[1])
            dbcon.close()
        except:
            try: return dbcon.close()
            except: return

        try: control.window.setProperty(self.property, serviceProperty)
        except: return

        while (not xbmc.abortRequested):
            try:
                serviceProperty = control.window.getProperty(self.property)

                t1 = datetime.timedelta(hours=6)
                t2 = datetime.datetime.strptime(serviceProperty, '%Y-%m-%d %H:%M:%S.%f')
                t3 = datetime.datetime.now()

                check = abs(t3 - t2) > t1
                if check == False: raise Exception()

                if (control.player.isPlaying() or control.condVisibility('Library.IsScanningVideo')): raise Exception()

                serviceProperty = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')

                control.window.setProperty(self.property, serviceProperty)

                try:
                    dbcon = database.connect(control.libcacheFile)
                    dbcur = dbcon.cursor()
                    dbcur.execute("CREATE TABLE IF NOT EXISTS service (""setting TEXT, ""value TEXT, ""UNIQUE(setting)"");")
                    dbcur.execute("DELETE FROM service WHERE setting = 'last_run'")
                    dbcur.execute("INSERT INTO service Values (?, ?)", ('last_run', serviceProperty))
                    dbcon.commit()
                    dbcon.close()
                except:
                    try: dbcon.close()
                    except: pass

                if not control.setting('service_update') == 'true': raise Exception()
                info = control.setting('service_notification') or 'true'
                self.update(None, info=info)
            except:
                pass

            control.sleep(10000)
    def __init__(self, dictName, data_type="default"):
        self.db_filename = "%s.sqlite" % dictName
        self.data_type = data_type

        if not os.path.isfile(self.db_filename):
            self.con = sqlite.connect(self.db_filename)
            self.con.execute("create table data (key PRIMARY KEY,value)")
        else:
            self.con = sqlite.connect(self.db_filename, timeout=10)
Example #26
0
def SaveData(SQLStatement): #8888
    if DB == 'mysql':
        db = database.connect(DB_NAME, DB_USER, DB_PASS, DB_ADDRESS, buffered=True)
    else:
        db = database.connect( db_dir )
    cursor = db.cursor()
    cursor.execute(SQLStatement)
    db.commit()
    db.close()
Example #27
0
 def get_conn(self, path):
     conn = sqlite.connect(path)  # 获取数据库文件链接
     if os.path.exists(path) and os.path.isfile(path):
         # 若存在数据库文件,返回链接
         return conn
     else:
         # 否则,返回内存链接
         conn = None
         return sqlite.connect(":memory:")
Example #28
0
    def __init__(self, layers, styles, **kwargs):
        self.srs = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null"
        self.name = cache_entry_name(
            layers, self.srs, styles,
            bgcolor=kwargs.get('bgcolor', None),
            transparent=kwargs.get('transparent', True),
            query=kwargs.get('query', None)
        )
        self.cachename = self.name + '.mbtiles'
        self.layers = layers if not isinstance(layers, basestring) else [layers]
        self.styles = styles if not isinstance(styles, basestring) else [styles]

        self.kwargs = kwargs
        e4326 = osr.SpatialReference()
        e3857 = osr.SpatialReference()
        e4326.ImportFromEPSG(4326)
        e3857.ImportFromEPSG(3857)
        self.crx = osr.CoordinateTransformation(e4326, e3857)

        paths = os.path.split(self.cachename)[:-1]
        p = ''
        for name in paths:
            p += '/' + name
            if not os.path.exists(p):
                os.mkdir(p)

        if os.path.exists(self.cachename):
            conn = db.connect(self.cachename)
        else:
            conn = db.connect(self.cachename)
            cursor = conn.cursor()
            cursor.executescript("""
                    BEGIN TRANSACTION;
                    CREATE TABLE android_metadata (locale text);
                    CREATE TABLE grid_key (grid_id TEXT,key_name TEXT);
                    CREATE TABLE grid_utfgrid (grid_id TEXT,grid_utfgrid BLOB);
                    CREATE TABLE keymap (key_name TEXT,key_json TEXT);
                    CREATE TABLE images (tile_data blob,tile_id text);
                    CREATE TABLE map (zoom_level INTEGER,tile_column INTEGER,tile_row INTEGER,tile_id TEXT,grid_id TEXT);
                    CREATE TABLE metadata (name text,value text);
                    CREATE VIEW tiles AS SELECT map.zoom_level AS zoom_level,map.tile_column AS tile_column,map.tile_row AS tile_row,images.tile_data AS tile_data FROM map JOIN images ON images.tile_id = map.tile_id ORDER BY zoom_level,tile_column,tile_row;
                    CREATE VIEW grids AS SELECT map.zoom_level AS zoom_level,map.tile_column AS tile_column,map.tile_row AS tile_row,grid_utfgrid.grid_utfgrid AS grid FROM map JOIN grid_utfgrid ON grid_utfgrid.grid_id = map.grid_id;
                    CREATE VIEW grid_data AS SELECT map.zoom_level AS zoom_level,map.tile_column AS tile_column,map.tile_row AS tile_row,keymap.key_name AS key_name,keymap.key_json AS key_json FROM map JOIN grid_key ON map.grid_id = grid_key.grid_id JOIN keymap ON grid_key.key_name = keymap.key_name;
                    CREATE UNIQUE INDEX grid_key_lookup ON grid_key (grid_id,key_name);
                    CREATE UNIQUE INDEX grid_utfgrid_lookup ON grid_utfgrid (grid_id);
                    CREATE UNIQUE INDEX keymap_lookup ON keymap (key_name);
                    CREATE UNIQUE INDEX images_id ON images (tile_id);
                    CREATE UNIQUE INDEX map_index ON map (zoom_level, tile_column, tile_row);
                    CREATE UNIQUE INDEX name ON metadata (name);
                    END TRANSACTION;
                    ANALYZE;
                    VACUUM;
               """)
            cursor.close()

        self.cache = conn
	def videoLibraryConnect(self):
		try:	
			import mysql.connector as database
			print "Loading mysql.connector as DB engine"
			self.DBH = database.connect(self.dbname, self.username, self.password, self.host, buffered=True)
		except:
			import MySQLdb as database
			print "Loading MySQLdb as DB engine"
			self.DBH=database.connect(host=self.host,user=self.username,passwd=self.password,db=self.dbname)
		self.DBC = self.DBH.cursor()
Example #30
0
 def _open(self):
     #Open the DB and set options
     if self.options.get("detect_types",False):
         self.db = sqlite.connect(self.filename, detect_types=sqlite.PARSE_DECLTYPES)
     else:
         self.db = sqlite.connect(self.filename)
     self.db.isolation_level = self.options.get("isolation_level",None)
     if self.options.get("row_by_name",False) == True:
         self.db.row_factory = sqlite.Row
     self.cur = self.db.cursor()
Example #31
0
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import jinja2
from sqlite3 import dbapi2 as sqlite3
from sqlite3 import IntegrityError
from MyPA.ipaclient import ipaclient
from MyPA.utils import gen_randtoken, validate_email

conf_file = "/etc/MyPA/api.ini"
conf = ConfigParser()
conf.read(conf_file)

jenv = jinja2.Environment(
    loader=jinja2.FileSystemLoader(conf.get('email', 'template_dir')))
db = sqlite3.connect(conf.get('db', 'path'))
db.row_factory = sqlite3.Row
ipaclient = ipaclient(conf.get('ipa', 'host'), conf.get('ipa', 'user'),
                      conf.get('ipa', 'pass'))


def ipa_user_info(username):
    return ipaclient.user_info(username)


def ipa_user_exists(username):
    return ipaclient.user_exists(username)


def ipa_user_create(username, email, gn, sn, password):
    return ipaclient.user_create(username, email, gn, sn, password)
Example #32
0
    import cPickle as pickle

import numpy as np
import json
import time
import dateutil.parser
import argparse
from random import shuffle
import re
import os
from sklearn import svm

import utils

DATABASE = 'as.db'
sqldb = sqlite3.connect(DATABASE)
sqldb.row_factory = sqlite3.Row  # to return dicts rather than tuples


def query_db(query, args=(), one=False):
    """Queries the database and returns a list of dictionaries."""
    cur = sqldb.execute(query, args)
    rv = cur.fetchall()
    return (rv[0] if rv else None) if one else rv


users = query_db('''select * from user''')
for u in users:
    print(u)
print('number of users: ', len(users))
Example #33
0
def get_watched_items(db_type, page_no, letter, passed_list=[]):
    import ast
    from resources.lib.modules.nav_utils import paginate_list
    from resources.lib.modules.utils import title_key, to_utf8
    watched_indicators = settings.watched_indicators()
    limit = 40
    if db_type == 'tvshow':
        from resources.lib.indexers.tvshows import aired_episode_number_tvshow
        if watched_indicators in (1, 2):
            if not passed_list:
                from resources.lib.modules.trakt import trakt_indicators_tv
                data = trakt_indicators_tv()
                data = sorted(data, key=lambda tup: title_key(tup[3]))
                original_list = [{
                    'media_id': i[0],
                    'title': i[3]
                } for i in data if i[1] == len(i[2])]
            else:
                original_list = ast.literal_eval(passed_list)
        else:
            if not passed_list:
                from resources.lib.indexers.tvshows import make_fresh_tvshow_meta
                settings.check_database(WATCHED_DB)
                dbcon = database.connect(WATCHED_DB)
                dbcur = dbcon.cursor()
                dbcur.execute(
                    "SELECT media_id, title FROM watched_status WHERE db_type = ?",
                    ('episode', ))
                rows = dbcur.fetchall()
                dbcon.close()
                watched_list = list(set(to_utf8([(i[0], i[1]) for i in rows])))
                data = []
                for item in watched_list:
                    watched = get_watched_status_tvshow(
                        item[0],
                        aired_episode_number_tvshow(
                            make_fresh_tvshow_meta('tmdb_id', item[0])))
                    if watched[0] == 1: data.append(item)
                    else: pass
                data = sorted(data, key=lambda tup: title_key(tup[1]))
                original_list = [{
                    'media_id': i[0],
                    'title': i[1]
                } for i in data]
            else:
                original_list = ast.literal_eval(passed_list)
    else:
        if watched_indicators in (1, 2):
            if not passed_list:
                from resources.lib.modules.trakt import trakt_indicators_movies
                data = trakt_indicators_movies()
                data = sorted(data, key=lambda tup: title_key(tup[1]))
                original_list = [{
                    'media_id': i[0],
                    'title': i[1]
                } for i in data]
            else:
                original_list = ast.literal_eval(passed_list)

        else:
            if not passed_list:
                settings.check_database(WATCHED_DB)
                dbcon = database.connect(WATCHED_DB)
                dbcur = dbcon.cursor()
                dbcur.execute(
                    "SELECT media_id, title FROM watched_status WHERE db_type = ?",
                    (db_type, ))
                rows = dbcur.fetchall()
                dbcon.close()
                data = to_utf8([(i[0], i[1]) for i in rows])
                data = sorted(data, key=lambda tup: title_key(tup[1]))
                original_list = [{
                    'media_id': i[0],
                    'title': i[1]
                } for i in data]
            else:
                original_list = ast.literal_eval(passed_list)
    paginated_list, total_pages = paginate_list(original_list, page_no, letter,
                                                limit)
    return paginated_list, original_list, total_pages, limit
Example #34
0
def Update():
    import downloader
    dp = xbmcgui.DialogProgress()
    dp.create("Mikeys Karaoke","",'Building Database Please Wait', ' ')
    downloader.download(K_db, db_dir,dp)
    
if os.path.exists(db_dir)==False:
    link=OPEN_URL(updatetxt)
    match=re.compile('id=<(.+?)>').findall (link)
    dp = xbmcgui.Dialog()
    dp.ok("Mikeys Karaoke","",'There is a New Database Update', 'Please Wait')
    Update()
    ADDON.setSetting('id',match[0])     
       
        
db = database.connect(db_dir)
db.execute('CREATE TABLE IF NOT EXISTS tracklist (sunfly_name, number, artist, track, iconimage, url)')
db.execute('CREATE TABLE IF NOT EXISTS favourites (track_name, artist, track, iconimage, url)')
db.commit()
db.close()

def GRABBER(type,mode,item):
    db = database.connect( db_dir );cur = db.cursor()
    if type == 1:#EXACT MATCH ALL
        item = '%'+item+'%'
        cached = cur.fetchall()
        try: cur.execute('SELECT * FROM tracklist WHERE %s = "%s"' %(mode,item))
        except:pass
    elif type == 2: #EXACT MATCH ONE
        item = '%'+item+'%'
        try: cur.execute('SELECT * FROM tracklist WHERE %s = "%s"' %(mode,item))
Example #35
0
def connect_db():
    '''Connects to the specific database.'''
    rv = sqlite3.connect(session['database'])
    rv.row_factory = sqlite3.Row
    return rv
 def connect_db(self):
     return sqlite3.connect(self.config['DATABASE'])
Example #37
0
File: db.py Project: rye761/anki
 def __init__(self, path: str, timeout: int = 0) -> None:
     self._db = sqlite.connect(path, timeout=timeout)
     self._db.text_factory = self._text_factory
     self._path = path
     self.echo = os.environ.get("DBECHO")
     self.mod = False
Example #38
0
def _get_connection_meta():
    control.makeFile(control.dataPath)
    conn = db.connect(control.metacacheFile)
    conn.row_factory = _dict_factory
    return conn
Example #39
0
def connect_db():
    """Return a new connection to the database."""
    return sqlite3.connect(app.config['DATABASE'])
    def __init__(self, id):
        """Constructor de la clase que genera una factura de la tienda de animales.
                 Esta clase genera una factura con la informacion del cliente y la lista de productos que compra con el precio total
                 a pagar.
                             Parametros:
                                   :param id: id de la factura que se quiere generar

                             Excepciones:
                                -dbapi2.DatabaseError
                        """
        idFactura = id
        factura = []

        factura.append(list(['', '', 'TIENDA DE MASCOTAS', '', '']))
        try:
            ###Conectamos con la base de datos
            baseDatos = dbapi2.connect("BaseDeDatos.dat")
            cursor = baseDatos.cursor()
            detalles = cursor.execute(
                "select nombreCliente,direccion,telefono,correo from facturasClientes where idFactura='"
                + idFactura + "'")

            for cliente in detalles:
                factura.append([
                    'Nombre Cliente: ', cliente[0], '', 'Nº Factura: ',
                    idFactura
                ])
                factura.append(['Direccion: ', cliente[1], '', '', ''])
                factura.append(['Telefono: ', cliente[2], '', '', ''])
                factura.append(['Correo: ', cliente[3], '', '', ''])

        except (dbapi2.DatabaseError):
            print("ERROR EN LA BASE DE DATOS")
        finally:
            print("Cerramos la conexion a la BD")
            cursor.close()
            baseDatos.close()

        factura.append(list(['', '', '', '', '']))
        factura.append(
            ['CODIGO', 'PRODUCTO', 'CANTIDAD', 'PRECIO/UNI', 'PRECIO'])

        try:
            ###Conectamos con la base de datos
            baseDatos = dbapi2.connect("BaseDeDatos.dat")
            cursor = baseDatos.cursor()

            listaProductos = []
            total = 0
            productos = cursor.execute(
                "select id,nombre,precioUnidad from productos")
            for producto in productos:
                listaProductos.append([producto[0], producto[1], producto[2]])

            detalesFactura = cursor.execute(
                "select idProducto,cantidad from facturasInfo where idFactura='"
                + idFactura + "'")
            for pro in detalesFactura:
                for prod in listaProductos:
                    if (prod[0] == pro[0]):
                        precio = int(pro[1]) * float(prod[2])
                        factura.append(
                            [prod[0], prod[1], pro[1], prod[2], precio])
                total = total + precio
            factura.append(['', '', '', 'PRECIO TOTAL:', str(total) + " €"])
        except (dbapi2.DatabaseError):
            print("ERROR EN LA BASE DE DATOS")
        finally:
            print("Cerramos la conexion a la BD")
            cursor.close()
            baseDatos.close()

        print(factura)

        doc = SimpleDocTemplate("InformeFactura.pdf", pagesize=A4)

        guion = []

        taboa = Table(factura, colWidths=90, rowHeights=30)
        taboa.setStyle(
            TableStyle([('TEXTCOLOR', (0, 0), (-1, -1), colors.darkgreen),
                        ('TEXTCOLOR', (0, 1), (-1, -1), colors.black),
                        ('ALIGN', (2, 5), (-1, -1), 'RIGHT'),
                        ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
                        ('BOX', (0, 1), (-1, 4), 1, colors.black),
                        ('BOX', (0, 6), (-1, -2), 1, colors.black),
                        ('INNERGRID', (0, 6), (-1, -2), 0.5, colors.grey)]))

        guion.append(taboa)
        guion.append(PageBreak())

        doc.build(guion)
Example #41
0
    def main(self, env):

        if (env == 'urlresolver'):
            addon('script.module.urlresolver').openSettings()
            return

        elif (env == 'metahandler'):
            addon('script.module.metahandler').openSettings()
            return

        elif (env == 'changelog'):
            try:
                sUrl = 'https://raw.githubusercontent.com/Kodi-vStream/venom-xbmc-addons/master/plugin.video.vstream/changelog.txt'
                oRequest = urllib2.Request(sUrl)
                oResponse = urllib2.urlopen(oRequest)
                sContent = oResponse.read()
                self.TextBoxes('vStream Changelog', sContent)
            except:
                self.DIALOG.VSerror("%s,%s" % (self.ADDON.VSlang(30205), sUrl))
            return

        elif (env == 'soutient'):
            try:
                sUrl = 'https://raw.githubusercontent.com/Kodi-vStream/venom-xbmc-addons/master/plugin.video.vstream/soutient.txt'
                oRequest = urllib2.Request(sUrl)
                oResponse = urllib2.urlopen(oRequest)
                sContent = oResponse.read()
                self.TextBoxes('vStream Soutient', sContent)
            except:
                self.DIALOG.VSerror("%s,%s" % (self.ADDON.VSlang(30205), sUrl))
            return

        elif (env == 'addon'):
            if self.DIALOG.VSyesno("Êtes-vous sûr ?"):
                #cached_Cache = cConfig().getFileCache()
                #cached_Cache = xbmc.translatePath(cached_Cache).decode("utf-8")
                cached_Cache = "special://home/userdata/addon_data/plugin.video.vstream/video_cache.db"
                #self.ClearDir2(cached_Cache,True)
                try:
                    xbmcvfs.delete(cached_Cache)
                    self.DIALOG.VSinfo(
                        'Clear Addon Cache, Successful[CR](Important relancer vStream)'
                    )
                except:
                    self.DIALOG.VSerror('Clear Addon Cache, Error')

            return

        elif (env == 'clean'):
            liste = [
                'Historiques', 'Lecture en cours', 'Marqués vues',
                'Marque-Pages', 'Téléchargements'
            ]
            ret = self.DIALOG.select('BDD à supprimer', liste)
            #cached_DB = cConfig().getFileDB()
            cached_DB = "special://home/userdata/addon_data/plugin.video.vstream/vstream.db"
            #important seul xbmcvfs peux lire le special
            cached_DB = xbmc.translatePath(cached_DB).decode("utf-8")

            sql_drop = ""

            if ret > -1:

                if ret == 0:
                    sql_drop = "DROP TABLE history"
                elif ret == 1:
                    sql_drop = "DROP TABLE resume"
                elif ret == 2:
                    sql_drop = "DROP TABLE watched"
                elif ret == 3:
                    sql_drop = "DROP TABLE favorite"
                elif ret == 4:
                    sql_drop = "DROP TABLE download"

                try:
                    db = sqlite.connect(cached_DB)
                    dbcur = db.cursor()
                    dbcur.execute(sql_drop)
                    db.commit()
                    dbcur.close()
                    db.close()
                    self.DIALOG.VSok(
                        "Suppression BDD, Successful[CR](Important relancer vStream)"
                    )
                except:
                    self.DIALOG.VSerror("Suppresion BDD, Error")

            return

        elif (env == 'xbmc'):
            if self.DIALOG.VSyesno('Êtes-vous sûr ?'):
                #temp = xbmc.translatePath('special://temp/').decode("utf-8")
                path = "special://temp/"
                #self.ClearDir(temp,True)
                try:
                    xbmcvfs.rmdir(path, True)
                    self.DIALOG.VSok(
                        'Clear Temp Cache, Successful[CR](Important relancer Kodi)'
                    )
                except:
                    self.DIALOG.VSerror('Clear Temp Cache, Error')
            return

        elif (env == 'fi'):
            if self.DIALOG.VSyesno('Êtes-vous sûr ?'):
                #path = xbmc.translatePath('special://temp/').decode("utf-8")
                path = "special://temp/archive_cache/"
                try:
                    xbmcvfs.rmdir(path, True)
                    self.DIALOG.VSok(
                        'Clear Archive_cache Cache, Successful[CR](Important relancer Kodi)'
                    )
                except:
                    self.DIALOG.VSerror('Clear Archive_cache Cache, Error')
                # filenames = next(os.walk(path))[2]
                # for i in filenames:
                #     if ".fi" in i:
                #         os.remove(os.path.join(path, i))
            return

        elif (env == 'uplog'):
            if self.DIALOG.VSyesno('Êtes-vous sûr ?'):
                #path = xbmc.translatePath('special://logpath/').decode("utf-8")
                path = "special://logpath/kodi.log"
                UA = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0'
                headers = {'User-Agent': UA}
                #filenames = next(os.walk(path))[2]
                #for i in filenames:
                if xbmcvfs.exists(path):
                    post_data = {}
                    cUrl = 'http://slexy.org/index.php/submit'
                    #logop = open(path + i,'rb')
                    logop = xbmcvfs.File(path, 'rb')
                    result = logop.read()
                    logop.close()
                    post_data['raw_paste'] = result
                    post_data['author'] = 'kodi.log'
                    post_data['language'] = 'text'
                    post_data['permissions'] = 1  #private
                    post_data['expire'] = 259200  #3j
                    post_data['submit'] = 'Submit+Paste'
                    request = urllib2.Request(cUrl,
                                              urllib.urlencode(post_data),
                                              headers)
                    reponse = urllib2.urlopen(request)
                    code = reponse.geturl().replace('http://slexy.org/view/',
                                                    '')
                    reponse.close()
                    self.ADDON.setSetting('service_log', code)
                    self.DIALOG.VSok(
                        'Ce code doit être transmis lorsque vous ouvrez une issue veuillez le noter:'
                        + '  ' + code)
            return

        elif (env == 'search'):

            from resources.lib.handler.pluginHandler import cPluginHandler
            valid = '[COLOR green][x][/COLOR]'

            class XMLDialog(xbmcgui.WindowXMLDialog):

                ADDON = addon()

                def __init__(self, *args, **kwargs):
                    xbmcgui.WindowXMLDialog.__init__(self)
                    pass

                def onInit(self):

                    self.container = self.getControl(6)
                    self.button = self.getControl(5)
                    self.getControl(3).setVisible(False)
                    self.getControl(1).setLabel(self.ADDON.VSlang(30094))
                    self.button.setLabel('OK')
                    listitems = []
                    oPluginHandler = cPluginHandler()
                    aPlugins = oPluginHandler.getAllPlugins()

                    for aPlugin in aPlugins:
                        #teste si deja dans le dsip
                        sPluginSettingsName = 'plugin_' + aPlugin[1]
                        bPlugin = self.ADDON.getSetting(sPluginSettingsName)

                        #icon = os.path.join(unicode(cConfig().getRootArt(), 'utf-8'), 'sites', aPlugin[1]+'.png')
                        icon = "special://home/addons/plugin.video.vstream/resources/art/sites/%s.png" % aPlugin[
                            1]
                        stitle = aPlugin[0].replace(
                            '[COLOR violet]',
                            '').replace('[COLOR orange]',
                                        '').replace('[/COLOR]', '')
                        if (bPlugin == 'true'):
                            stitle = ('%s %s') % (stitle, valid)
                        listitem = xbmcgui.ListItem(label=stitle,
                                                    label2=aPlugin[2])
                        listitem.setArt({'icon': icon, 'thumb': icon})
                        listitem.setProperty('Addon.Summary', aPlugin[2])
                        listitem.setProperty('sitename', aPlugin[1])
                        if (bPlugin == 'true'):
                            listitem.select(True)

                        listitems.append(listitem)
                    self.container.addItems(listitems)

                    self.setFocus(self.container)

                def onClick(self, controlId):
                    if controlId == 5:
                        self.close()
                        return
                    elif controlId == 99:
                        window = xbmcgui.Window(xbmcgui.getCurrentWindowId())
                        del window
                        self.close()
                        return
                    elif controlId == 7:
                        window = xbmcgui.Window(xbmcgui.getCurrentWindowId())
                        del window
                        self.close()
                        return
                    elif controlId == 6:
                        item = self.container.getSelectedItem()
                        if item.isSelected() == True:
                            label = item.getLabel().replace(valid, '')
                            item.setLabel(label)
                            item.select(False)
                            sPluginSettingsName = ('plugin_%s') % (
                                item.getProperty('sitename'))
                            self.ADDON.setSetting(sPluginSettingsName,
                                                  str('false'))
                        else:
                            label = ('%s %s') % (item.getLabel(), valid)
                            item.setLabel(label)
                            item.select(True)
                            sPluginSettingsName = ('plugin_%s') % (
                                item.getProperty('sitename'))
                            self.ADDON.setSetting(sPluginSettingsName,
                                                  str('true'))
                        return

                def onFocus(self, controlId):
                    self.controlId = controlId

                def _close_dialog(self):
                    self.close()

                # def onAction( self, action ):
                # if action.getId() in ( 9, 10, 92, 216, 247, 257, 275, 61467, 61448, ):
                # self.close()

            #path = cConfig().getAddonPath()
            path = "special://home/addons/plugin.video.vstream"
            wd = XMLDialog('DialogSelect.xml', path, "Default")
            wd.doModal()
            del wd
            return

        elif (env == 'thumb'):

            if self.DIALOG.VSyesno(
                    'Êtes-vous sûr ? Ceci effacera toutes les thumbnails '):

                text = False
                #path = xbmc.translatePath('special://userdata/Thumbnails/').decode("utf-8")
                path = "special://userdata/Thumbnails/"
                path_DB = "special://userdata/Database"
                try:
                    xbmcvfs.rmdir(path, True)
                    text = 'Clear Thumbnail Folder, Successful[CR]'
                except:
                    text = 'Clear Thumbnail Folder, Error[CR]'
                #for i in os.listdir(path):
                # folders = os.path.join(path, i).encode('utf-8')
                # if os.path.isdir(folders):
                #     p = next(os.walk(folders))[2]
                #     for x in p:
                #         os.remove(os.path.join(folders, x).encode('utf-8'))

                #filenames = next(os.walk(path2))[2]
                folder, items = xbmcvfs.listdir(path_DB)
                items.sort()
                for sItemName in items:
                    if "extures" in sItemName:
                        cached_Cache = "/".join([path_DB, sItemName])
                        try:
                            xbmcvfs.delete(cached_Cache)
                            text += 'Clear Thumbnail DB, Successful[CR]'
                        except:
                            text += 'Clear Thumbnail DB, Error[CR]'

                if text:
                    text = "%s (Important relancer Kodi)" % text
                    self.DIALOG.VSok(text)
                # for x in filenames:
                #     if "exture" in x:
                #         con = sqlite.connect(os.path.join(path2, x).encode('utf-8'))
                #         cursor = con.cursor()
                #         cursor.execute("DELETE FROM texture")
                #         con.commit()
                #         cursor.close()
                #         con.close()
            return

        else:
            return
        return
Example #42
0
def connect_db():
    ''' Connects to the TC database.'''
    rv = sqlite3.connect(app.config['DATABASE'])
    rv.row_factory = sqlite3.Row
    return rv
Example #43
0
 def __init__(self, dbname):
     self.con = sqlite.connect(dbname)
Example #44
0
    def _build_index(self):
        """Call from __init__ to create a new index (PRIVATE)."""
        index_filename = self._index_filename
        relative_path = self._relative_path
        filenames = self._filenames
        format = self._format
        key_function = self._key_function
        proxy_factory = self._proxy_factory
        max_open = self._max_open
        random_access_proxies = self._proxies

        if not format or not filenames:
            raise ValueError(
                "Filenames to index and format required to build %r" %
                index_filename)
        if not proxy_factory(format):
            raise ValueError("Unsupported format '%s'" % format)
        # Create the index
        con = _sqlite.connect(index_filename)
        self._con = con
        # print("Creating index")
        # Sqlite PRAGMA settings for speed
        con.execute("PRAGMA synchronous=OFF")
        con.execute("PRAGMA locking_mode=EXCLUSIVE")
        # Don't index the key column until the end (faster)
        # con.execute("CREATE TABLE offset_data (key TEXT PRIMARY KEY, "
        #             "offset INTEGER);")
        con.execute("CREATE TABLE meta_data (key TEXT, value TEXT);")
        con.execute("INSERT INTO meta_data (key, value) VALUES (?,?);",
                    ("count", -1))
        con.execute("INSERT INTO meta_data (key, value) VALUES (?,?);",
                    ("format", format))
        con.execute("INSERT INTO meta_data (key, value) VALUES (?,?);",
                    ("filenames_relative_to_index", "True"))
        # TODO - Record the alphabet?
        # TODO - Record the file size and modified date?
        con.execute("CREATE TABLE file_data (file_number INTEGER, name TEXT);")
        con.execute("CREATE TABLE offset_data (key TEXT, "
                    "file_number INTEGER, offset INTEGER, length INTEGER);")
        count = 0
        for i, filename in enumerate(filenames):
            # Default to storing as an absolute path,
            f = os.path.abspath(filename)
            if not os.path.isabs(filename) and not os.path.isabs(
                    index_filename):
                # Since user gave BOTH filename & index as relative paths,
                # we will store this relative to the index file even though
                # if it may now start ../ (meaning up a level)
                # Note for cross platform use (e.g. shared drive over SAMBA),
                # convert any Windows slash into Unix style for rel paths.
                f = os.path.relpath(filename,
                                    relative_path).replace(os.path.sep, "/")
            elif (os.path.dirname(os.path.abspath(filename)) +
                  os.path.sep).startswith(relative_path + os.path.sep):
                # Since sequence file is in same directory or sub directory,
                # might as well make this into a relative path:
                f = os.path.relpath(filename,
                                    relative_path).replace(os.path.sep, "/")
                assert not f.startswith("../"), f
            # print("DEBUG - storing %r as [%r] %r" % (filename, relative_path, f))
            con.execute(
                "INSERT INTO file_data (file_number, name) VALUES (?,?);",
                (i, f))
            random_access_proxy = proxy_factory(format, filename)
            if key_function:
                offset_iter = ((key_function(k), i, o, l)
                               for (k, o, l) in random_access_proxy)
            else:
                offset_iter = ((k, i, o, l)
                               for (k, o, l) in random_access_proxy)
            while True:
                batch = list(itertools.islice(offset_iter, 100))
                if not batch:
                    break
                # print("Inserting batch of %i offsets, %s ... %s"
                #       % (len(batch), batch[0][0], batch[-1][0]))
                con.executemany(
                    "INSERT INTO offset_data (key,file_number,offset,length) VALUES (?,?,?,?);",
                    batch)
                con.commit()
                count += len(batch)
            if len(random_access_proxies) < max_open:
                random_access_proxies[i] = random_access_proxy
            else:
                random_access_proxy._handle.close()
        self._length = count
        # print("About to index %i entries" % count)
        try:
            con.execute("CREATE UNIQUE INDEX IF NOT EXISTS "
                        "key_index ON offset_data(key);")
        except _IntegrityError as err:
            self._proxies = random_access_proxies
            self.close()
            con.close()
            raise ValueError("Duplicate key? %s" % err)
        con.execute("PRAGMA locking_mode=NORMAL")
        con.execute("UPDATE meta_data SET value = ? WHERE key = ?;",
                    (count, "count"))
        con.commit()
Example #45
0
    def _load_index(self):
        """Call from __init__ to re-use an existing index (PRIVATE)."""
        index_filename = self._index_filename
        relative_path = self._relative_path
        filenames = self._filenames
        format = self._format
        proxy_factory = self._proxy_factory

        con = _sqlite.connect(index_filename)
        self._con = con
        # Check the count...
        try:
            count, = con.execute("SELECT value FROM meta_data WHERE key=?;",
                                 ("count", )).fetchone()
            self._length = int(count)
            if self._length == -1:
                con.close()
                raise ValueError("Unfinished/partial database")
            count, = con.execute(
                "SELECT COUNT(key) FROM offset_data;").fetchone()
            if self._length != int(count):
                con.close()
                raise ValueError("Corrupt database? %i entries not %i" %
                                 (int(count), self._length))
            self._format, = con.execute(
                "SELECT value FROM meta_data WHERE key=?;",
                ("format", )).fetchone()
            if format and format != self._format:
                con.close()
                raise ValueError("Index file says format %s, not %s" %
                                 (self._format, format))
            try:
                filenames_relative_to_index, = con.execute(
                    "SELECT value FROM meta_data WHERE key=?;",
                    ("filenames_relative_to_index", )).fetchone()
                filenames_relative_to_index = (
                    filenames_relative_to_index.upper() == "TRUE")
            except TypeError:
                # Original behaviour, assume if meta_data missing
                filenames_relative_to_index = False
            self._filenames = [
                row[0]
                for row in con.execute("SELECT name FROM file_data "
                                       "ORDER BY file_number;").fetchall()
            ]
            if filenames_relative_to_index:
                # Not implicitly relative to $PWD, explicitly relative to index file
                relative_path = os.path.abspath(
                    os.path.dirname(index_filename))
                tmp = []
                for f in self._filenames:
                    if os.path.isabs(f):
                        tmp.append(f)
                    else:
                        # Would be stored with Unix / path separator, so convert
                        # it to the local OS path separator here:
                        tmp.append(
                            os.path.join(relative_path,
                                         f.replace("/", os.path.sep)))
                self._filenames = tmp
                del tmp
            if filenames and len(filenames) != len(self._filenames):
                con.close()
                raise ValueError("Index file says %i files, not %i" %
                                 (len(self._filenames), len(filenames)))
            if filenames and filenames != self._filenames:
                for old, new in zip(self._filenames, filenames):
                    # Want exact match (after making relative to the index above)
                    if os.path.abspath(old) != os.path.abspath(new):
                        con.close()
                        if filenames_relative_to_index:
                            raise ValueError(
                                "Index file has different filenames, e.g. %r != %r"
                                % (os.path.abspath(old), os.path.abspath(new)))
                        else:
                            raise ValueError(
                                "Index file has different filenames "
                                "[This is an old index where any relative paths "
                                "were relative to the original working directory]. "
                                "e.g. %r != %r" %
                                (os.path.abspath(old), os.path.abspath(new)))
                # Filenames are equal (after imposing abspath)
        except _OperationalError as err:
            con.close()
            raise ValueError("Not a Biopython index database? %s" % err)
        # Now we have the format (from the DB if not given to us),
        if not proxy_factory(self._format):
            con.close()
            raise ValueError("Unsupported format '%s'" % self._format)
Example #46
0
def _get_connection_providers():
    control.makeFile(control.dataPath)
    conn = db.connect(control.providercacheFile)
    conn.row_factory = _dict_factory
    return conn
Example #47
0
    def getEpisodeSource(self, title, year, imdb, tvdb, date, season, episode,
                         show, show_alt, source, call):
        try:
            dbcon = database.connect(self.sourceFile)
            dbcur = dbcon.cursor()
            dbcur.execute("CREATE TABLE IF NOT EXISTS rel_url ("
                          "source TEXT, "
                          "imdb_id TEXT, "
                          "season TEXT, "
                          "episode TEXT, "
                          "rel_url TEXT, "
                          "UNIQUE(source, imdb_id, season, episode)"
                          ");")
            dbcur.execute("CREATE TABLE IF NOT EXISTS rel_src ("
                          "source TEXT, "
                          "imdb_id TEXT, "
                          "season TEXT, "
                          "episode TEXT, "
                          "hosts TEXT, "
                          "added TEXT, "
                          "UNIQUE(source, imdb_id, season, episode)"
                          ");")
        except:
            pass

        try:
            sources = []
            dbcur.execute(
                "SELECT * FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, season, episode))
            match = dbcur.fetchone()
            t1 = int(re.sub('[^0-9]', '', str(match[5])))
            t2 = int(datetime.datetime.now().strftime("%Y%m%d%H%M"))
            update = abs(t2 - t1) > 60
            if update == False:
                sources = json.loads(match[4])
                return global_sources.extend(sources)
        except:
            pass

        try:
            url = None
            dbcur.execute(
                "SELECT * FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, '', ''))
            url = dbcur.fetchone()
            url = url[4]
        except:
            pass

        try:
            if url == None:
                url = call.get_show(imdb, tvdb, show, show_alt, year)
            if url == None: raise Exception()
            dbcur.execute(
                "DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, '', ''))
            dbcur.execute("INSERT INTO rel_url Values (?, ?, ?, ?, ?)",
                          (source, 'tt' + imdb, '', '', url))
            dbcon.commit()
        except:
            pass

        try:
            ep_url = None
            dbcur.execute(
                "SELECT * FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, season, episode))
            ep_url = dbcur.fetchone()
            ep_url = ep_url[4]
        except:
            pass

        try:
            if url == None: raise Exception()
            if ep_url == None:
                ep_url = call.get_episode(url, imdb, tvdb, title, date, season,
                                          episode)
            if ep_url == None: raise Exception()
            dbcur.execute(
                "DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, season, episode))
            dbcur.execute("INSERT INTO rel_url Values (?, ?, ?, ?, ?)",
                          (source, 'tt' + imdb, season, episode, ep_url))
            dbcon.commit()
        except:
            pass

        try:
            sources = []
            sources = call.get_sources(ep_url, self.hosthdfullDict,
                                       self.hostsdfullDict, self.hostlocDict)
            if sources == None: sources = []
            global_sources.extend(sources)
            dbcur.execute(
                "DELETE FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'"
                % (source, 'tt' + imdb, season, episode))
            dbcur.execute(
                "INSERT INTO rel_src Values (?, ?, ?, ?, ?, ?)",
                (source, 'tt' + imdb, season, episode, json.dumps(sources),
                 datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
            dbcon.commit()
        except:
            pass
Example #48
0
def _get_connection_search():
    control.makeFile(control.dataPath)
    conn = db.connect(control.searchFile)
    conn.row_factory = _dict_factory
    return conn
 def OpenDB(self, dbfile):
     try:
         self.dbconn = sqlite.connect(dbfile)
     except Exception:
         pass
Example #50
0
def bennu_download_get(function, timeout, *args, **table):
    try:
        response = None

        f = repr(function)
        f = re.sub('.+\smethod\s|.+function\s|\sat\s.+|\sof\s.+', '', f)

        a = hashlib.md5()
        for i in args:
            a.update(str(i))
        a = str(a.hexdigest())
    except:
        pass

    try:
        table = table['table']
    except:
        table = 'rel_list'

    try:
        control.makeFile(control.dataPath)
        dbcon = db.connect(control.cacheFile)
        dbcur = dbcon.cursor()
        dbcur.execute("SELECT * FROM %s WHERE func = '%s' AND args = '%s'" %
                      (table, f, a))
        match = dbcur.fetchone()

        response = eval(match[2].encode('utf-8'))

        t1 = int(match[3])
        t2 = int(time.time())
        update = (abs(t2 - t1) / 3600) >= int(timeout)
        if update == False:
            return response
    except:
        pass

    try:
        r = function(*args)
        if (r == None or r == []) and not response == None:
            return response
        elif (r == None or r == []):
            return r
    except:
        return

    try:
        r = repr(r)
        t = int(time.time())
        dbcur.execute("CREATE TABLE IF NOT EXISTS %s ("
                      "func TEXT, "
                      "args TEXT, "
                      "response TEXT, "
                      "added TEXT, "
                      "UNIQUE(func, args)"
                      ");" % table)
        dbcur.execute("DELETE FROM %s WHERE func = '%s' AND args = '%s'" %
                      (table, f, a))
        dbcur.execute("INSERT INTO %s Values (?, ?, ?, ?)" % table,
                      (f, a, r, t))
        dbcon.commit()
    except:
        pass

    try:
        return eval(r.encode('utf-8'))
    except:
        pass
Example #51
0
def connect_db():
    return sqlite3.connect(app.config['DATABASE'])
Example #52
0
def test(host, pguser):
    pg_conn_info = "dbname=epanet_test_db host=" + host + " user="******"dropdb --if-exists -h " + host + " -U " + pguser +
              " epanet_test_db")
    os.system("createdb -h " + host + " -U " + pguser + " epanet_test_db")
    os.system("psql -h " + host + " -U " + pguser +
              " epanet_test_db -c 'CREATE EXTENSION postgis'")

    pcur = versioning.Db(psycopg2.connect(pg_conn_info))
    pcur.execute("CREATE SCHEMA epanet")
    pcur.execute("""
        CREATE TABLE epanet.junctions (
            hid serial PRIMARY KEY,
            id varchar,
            elevation float, 
            base_demand_flow float, 
            demand_pattern_id varchar, 
            printmap integer[],
            geometry geometry('POINT',2154),
            geometry_schematic geometry('POLYGON',2154)
        )""")

    pcur.execute("""
        INSERT INTO epanet.junctions
            (id, elevation, printmap, geometry, geometry_schematic)
            VALUES
            ('0',0,'{1,2,3}',ST_GeometryFromText('POINT(0 0)',2154),
            ST_GeometryFromText('POLYGON((-1 -1,1 -1,1 1,-1 1,-1 -1))',2154))"""
                 )

    pcur.execute("""
        INSERT INTO epanet.junctions
            (id, elevation, printmap, geometry, geometry_schematic)
            VALUES
            ('1',1,'{}',ST_GeometryFromText('POINT(0 1)',2154),
            ST_GeometryFromText('POLYGON((0 0,2 0,2 2,0 2,0 0))',2154))""")

    pcur.execute("""
        CREATE TABLE epanet.pipes (
            hid serial PRIMARY KEY,
            id varchar,
            start_node varchar,
            end_node varchar,
            length float,
            diameter float,
            roughness float,
            minor_loss_coefficient float,
            status varchar,
            geometry geometry('LINESTRING',2154)
        )""")

    pcur.execute("""
        INSERT INTO epanet.pipes
            (id, start_node, end_node, length, diameter, geometry) 
            VALUES
            ('0','0','1',1,2,ST_GeometryFromText('LINESTRING(1 0,0 1)',2154))"""
                 )

    pcur.commit()
    pcur.close()

    versioning.historize(pg_conn_info, 'epanet')

    pcur = versioning.Db(psycopg2.connect(pg_conn_info))

    pcur.execute(
        "SELECT ST_AsText(geometry), ST_AsText(geometry_schematic) FROM epanet_trunk_rev_head.junctions"
    )
    res = pcur.fetchall()
    assert (res[0][0] == 'POINT(0 0)')
    assert (res[1][1] == 'POLYGON((0 0,2 0,2 2,0 2,0 0))')

    wc = tmp_dir + '/wc_multiple_geometry_test.sqlite'
    if os.path.isfile(wc): os.remove(wc)
    spversioning = versioning.spatialite(wc, pg_conn_info)
    spversioning.checkout(
        ['epanet_trunk_rev_head.pipes', 'epanet_trunk_rev_head.junctions'])

    scur = versioning.Db(dbapi2.connect(wc))
    scur.execute(
        "UPDATE junctions_view SET GEOMETRY = GeometryFromText('POINT(3 3)',2154) WHERE OGC_FID = 1"
    )
    scur.commit()
    scur.execute("SELECT * from junctions_view")
    print("--------------")
    for res in scur.fetchall():
        print(res)
    scur.close()
    spversioning.commit('moved a junction')

    pcur.execute(
        "SELECT ST_AsText(geometry), ST_AsText(geometry_schematic), printmap FROM epanet_trunk_rev_head.junctions ORDER BY hid DESC"
    )
    res = pcur.fetchall()
    for r in res:
        print(r)
    assert (res[0][0] == 'POINT(3 3)')
    assert (res[0][1] == 'POLYGON((-1 -1,1 -1,1 1,-1 1,-1 -1))')
    assert (res[0][2] == [1, 2, 3])

    pcur.close()
Example #53
0
    def get(self,
            name,
            imdb=None,
            tmdb=None,
            tvdb=None,
            season=None,
            episode=None,
            year='0',
            runtime=None,
            ck=False):
        offset = '0'
        if not runtime or runtime == 'None':
            return offset  # TMDB sometimes return None as string
        scrobbble = 'Local Bookmark'
        if control.setting('bookmarks') != 'true': return offset
        if control.setting('trakt.scrobble') == 'true' and control.setting(
                'resume.source') == '1':
            try:
                scrobbble = 'Trakt Scrobble'
                from resources.lib.modules import traktsync
                progress = float(
                    traktsync.fetch_bookmarks(imdb, tmdb, tvdb, season,
                                              episode))
                offset = (float(progress / 100) * int(runtime))
                seekable = (2 <= progress <= 85)
                if not seekable: return '0'
            except:
                log_utils.error()
                return '0'
        else:
            try:
                dbcon = database.connect(control.bookmarksFile)
                dbcur = dbcon.cursor()
                dbcur.execute(
                    '''CREATE TABLE IF NOT EXISTS bookmark (idFile TEXT, timeInSeconds TEXT, Name TEXT, year TEXT, UNIQUE(idFile));'''
                )
                if not year or year == 'None': return offset
                years = [str(year), str(int(year) + 1), str(int(year) - 1)]
                #helps fix random cases where trakt and imdb, or tvdb, differ by a year for eps
                match = dbcur.execute(
                    '''SELECT * FROM bookmark WHERE Name="%s" AND year IN (%s)'''
                    % (name, ','.join(i for i in years))).fetchone()
            except:
                log_utils.error()
                return offset
            finally:
                dbcur.close()
                dbcon.close()

            if not match: return offset
            offset = str(match[1])

        if ck: return offset
        minutes, seconds = divmod(float(offset), 60)
        hours, minutes = divmod(minutes, 60)
        label = '%02d:%02d:%02d' % (hours, minutes, seconds)
        label = control.lang(32502) % label
        if control.setting('bookmarks.auto') == 'false':
            if control.yesnoDialog(label, scrobbble, '', str(name),
                                   control.lang(32503), control.lang(32501)):
                offset = '0'
        return offset
Example #54
0
def get_db():
    top = _app_ctx_stack.top
    if not hasattr(top, 'sqlite_db'):
        top.sqlite_db = sqlite3.connect(app.config['DATABASE'])
        top.sqlite_db.row_factory = sqlite3.Row
    return top.sqlite_db
Example #55
0
    def update(self, query=None, info='true'):
        if not query == None: control.idle()

        try:

            items = []
            season, episode = [], []
            show = [
                os.path.join(self.library_folder, i)
                for i in control.listDir(self.library_folder)[0]
            ]
            for s in show:
                try:
                    season += [
                        os.path.join(s, i) for i in control.listDir(s)[0]
                    ]
                except:
                    pass
            for s in season:
                try:
                    episode.append([
                        os.path.join(s, i) for i in control.listDir(s)[1]
                        if i.endswith('.strm')
                    ][-1])
                except:
                    pass

            for file in episode:
                try:
                    file = control.openFile(file)
                    read = file.read()
                    read = read.encode('utf-8')
                    file.close()

                    if not read.startswith(sys.argv[0]): raise Exception()

                    params = dict(urlparse.parse_qsl(read.replace('?', '')))

                    try:
                        tvshowtitle = params['tvshowtitle']
                    except:
                        tvshowtitle = None
                    try:
                        tvshowtitle = params['show']
                    except:
                        pass
                    if tvshowtitle == None or tvshowtitle == '':
                        raise Exception()

                    year, imdb, tvdb = params['year'], params['imdb'], params[
                        'tvdb']

                    imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))

                    try:
                        tmdb = params['tmdb']
                    except:
                        tmdb = '0'

                    items.append({
                        'tvshowtitle': tvshowtitle,
                        'year': year,
                        'imdb': imdb,
                        'tmdb': tmdb,
                        'tvdb': tvdb
                    })
                except:
                    pass

            items = [i for x, i in enumerate(items) if i not in items[x + 1:]]
            if len(items) == 0: raise Exception()
        except:
            return

        try:
            lib = control.jsonrpc(
                '{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"properties" : ["imdbnumber", "title", "year"]}, "id": 1}'
            )
            lib = unicode(lib, 'utf-8', errors='ignore')
            lib = json.loads(lib)['result']['tvshows']
        except:
            return

        if info == 'true' and not control.condVisibility(
                'Window.IsVisible(infodialog)') and not control.condVisibility(
                    'Player.HasVideo'):
            control.infoDialog(control.lang(32553).encode('utf-8'),
                               time=10000000)
            self.infoDialog = True

        try:
            control.makeFile(control.dataPath)
            dbcon = database.connect(control.libcacheFile)
            dbcur = dbcon.cursor()
            dbcur.execute("CREATE TABLE IF NOT EXISTS tvshows ("
                          "id TEXT, "
                          "items TEXT, "
                          "UNIQUE(id)"
                          ");")
        except:
            return

        try:
            from resources.lib.indexers import episodes
        except:
            return

        files_added = 0

        # __init__ doesn't get called from services so self.date never gets updated and new episodes are not added to the library
        self.datetime = (datetime.datetime.utcnow() -
                         datetime.timedelta(hours=5))
        self.date = (self.datetime -
                     datetime.timedelta(hours=24)).strftime('%Y%m%d')

        for item in items:
            it = None

            if xbmc.abortRequested == True: return sys.exit()

            try:
                dbcur.execute("SELECT * FROM tvshows WHERE id = '%s'" %
                              item['tvdb'])
                fetch = dbcur.fetchone()
                it = eval(fetch[1].encode('utf-8'))
            except:
                pass

            try:
                if not it == None: raise Exception()

                it = episodes.episodes().get(item['tvshowtitle'],
                                             item['year'],
                                             item['imdb'],
                                             item['tvdb'],
                                             idx=False)

                status = it[0]['status'].lower()

                it = [{
                    'title': i['title'],
                    'year': i['year'],
                    'imdb': i['imdb'],
                    'tvdb': i['tvdb'],
                    'season': i['season'],
                    'episode': i['episode'],
                    'tvshowtitle': i['tvshowtitle'],
                    'premiered': i['premiered']
                } for i in it]

                if status == 'continuing': raise Exception()
                dbcur.execute("INSERT INTO tvshows Values (?, ?)",
                              (item['tvdb'], repr(it)))
                dbcon.commit()
            except:
                pass

            try:
                id = [item['imdb'], item['tvdb']]
                if not item['tmdb'] == '0': id += [item['tmdb']]

                ep = [
                    x['title'].encode('utf-8') for x in lib
                    if str(x['imdbnumber']) in id or (
                        x['title'].encode('utf-8') == item['tvshowtitle']
                        and str(x['year']) == item['year'])
                ][0]
                ep = control.jsonrpc(
                    '{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"filter":{"and": [{"field": "tvshow", "operator": "is", "value": "%s"}]}, "properties": ["season", "episode"]}, "id": 1}'
                    % ep)
                ep = unicode(ep, 'utf-8', errors='ignore')
                ep = json.loads(ep).get('result', {}).get('episodes', {})
                ep = [{
                    'season': int(i['season']),
                    'episode': int(i['episode'])
                } for i in ep]
                ep = sorted(ep, key=lambda x: (x['season'], x['episode']))[-1]

                num = [
                    x for x, y in enumerate(it)
                    if str(y['season']) == str(ep['season'])
                    and str(y['episode']) == str(ep['episode'])
                ][-1]
                it = [y for x, y in enumerate(it) if x > num]
                if len(it) == 0: continue
            except:
                continue

            for i in it:
                try:
                    if xbmc.abortRequested == True: return sys.exit()

                    premiered = i.get('premiered', '0')
                    if (premiered != '0'
                            and int(re.sub('[^0-9]', '', str(premiered))) >
                            int(self.date)) or (premiered == '0'
                                                and not self.include_unknown):
                        continue

                    libtvshows().strmFile(i)
                    files_added += 1
                except:
                    pass

        if self.infoDialog == True:
            control.infoDialog(control.lang(32554).encode('utf-8'), time=1)

        if self.library_setting == 'true' and not control.condVisibility(
                'Library.IsScanningVideo') and files_added > 0:
            control.execute('UpdateLibrary(video)')
Example #56
0
                    for s in syms:
                        sadb.update_symbol(con, s, db_path)
                        con.commit()
            except lite.Error, e:
                log.critical("Error: %s: " % e.args[0])
                sys.exit(1)
            finally:
                if con:
                    con.close()

        chunksize = int(math.ceil(
            sadb.file_len(symbol_list) / float(nthreads)))
        #threads = []
        all_syms = [i.strip() for i in open(symbol_list, 'r').readlines()]

        p = sqlalchemy.pool.SingletonThreadPool(lambda: lite.connect(db_path))

        for i in range(nthreads):
            syms = all_syms[chunksize * i:chunksize * (i + 1)]
            t = threading.Thread(target=worker, args=[syms])
            #t.daemon = True
            t.start()
            #p = multiprocessing.Process(
            #target=worker,
            #args=[syms])
            #procs.append(p)
            #p.start()
    else:
        log.critical(
            "Could not connect to google.com via [%s]. Conclusion:  You're not connected to the internet. Either that or google.com is down. 2013-08-17 Never Forget."
            % conn_test_ip)
Example #57
0
import os
from sqlite3 import dbapi2
"""
    Genera la base de datos y le introduce unos datos iniciales.
"""
try:
    ###Creacion de la base de datos.
    baseDatos = dbapi2.connect("BaseDeDatos.dat")
    cursor = baseDatos.cursor()

    ###Creamcion de las tablas
    cursor.execute(
        "create table proveedores(id text, nombre text,CIF text, direccion text, telefono text, correo text)"
    )
    cursor.execute(
        "create table productos(id text, nombre text , descripcion text, cantidadStock number, precioUnidad number,idProv text)"
    )
    cursor.execute(
        "create table facturasClientes(idFactura number, nombreCliente text, telefono text, direccion text, correo text)"
    )
    cursor.execute(
        "create table facturasInfo(idFactura number,idProducto text, cantidad number)"
    )

    ###Rezlizamos Inserts en las tablas
    cursor.execute(
        "insert into proveedores values('idprov1','Siemens','562-352-143','Av. Alcalde Lavadores Nº56','986456784','*****@*****.**')"
    )
    cursor.execute(
        "insert into proveedores values('idprov2','Codisin','150-488-654','Rua Sen nome Nª87','95528789563','*****@*****.**')"
    )
Example #58
0
def get_xml(url):
    cache_location = os.path.join(".", 'url_cache.db')
    try:
        request = requests.get(url, timeout=5)
    except:
        request = None
    try:
        last_modified = request.headers['Last-Modified']
    except:
        last_modified = ""
    dbcon = database.connect(cache_location)
    dbcur = dbcon.cursor()
    try:
        dbcur.execute("SELECT * FROM version")
        match = dbcur.fetchone()
    except:
        dbcur.execute("CREATE TABLE version (""version TEXT)")
        dbcur.execute("INSERT INTO version Values ('0.0.1')")
        dbcon.commit()
    dbcur.execute(
        "CREATE TABLE IF NOT EXISTS xml (url TEXT, xml TEXT, last_modified Text, UNIQUE(url, last_modified));")
    if last_modified:
        try:
            dbcur.execute(
                "SELECT * FROM xml WHERE last_modified = '%s' and url = '%s'" % (last_modified, url))
            match = dbcur.fetchone()
            if match:
                if match[2] == last_modified:
                    return match[1]
        except:
            pass
    else:
        try:
            dbcur.execute(
                "SELECT * FROM xml WHERE url = '%s'" % (url))
            match = dbcur.fetchone()
            if match:
                return match[1]
        except:
            pass
    if not last_modified:
        request = requests.get(url)
        try:
            last_modified = request.headers['Last-Modified']
        except:
            last_modified = 0
    print("cache miss")
    xml = request.text
    xml = xml.replace("\n", "").replace("##", "").replace('\t', "")
    try:
        dbcur.execute("DELETE FROM xml WHERE url = '%s'" % (url))
    except:
        print("error deleting")
        pass
    try:
        dbcur.execute("INSERT INTO xml Values (?, ?, ?)", (url, xml.encode("utf-8", "ignore"), last_modified))
    except:
        try:
            dbcur.execute("INSERT INTO xml Values (?, ?, ?)", (url, xml.decode("utf-8"), last_modified))
        except:
            pass
    dbcon.commit()
    return xml
# standard imports
import os
import sys
import pickle
import re
# non-standard imports
import numpy as np
from sklearn import svm
from sqlite3 import dbapi2 as sqlite3
# local imports
from utils import safe_pickle_dump, strip_version, Config



sqldb = sqlite3.connect(Config.database_path)


def query_db(query, args=(), one=False):
  """Queries the database and returns a list of dictionaries."""
  cur = sqldb.execute(query, args)
  rv = cur.fetchall()
  return (rv[0] if rv else None) if one else rv


db = pickle.load(open(Config.db_path, 'rb'))

saved_ids = query_db('''select * from library''')
# something like[(1, '1708.00871', 1, 1503193837),(2, '1707.09589', 1, 1503281105),(3, '1708.05606', 1, 1503295637)]

ids=[]
for saved_id in saved_ids:
Example #60
0
    def service(self):
        try:
            lib_tools.create_folder(
                os.path.join(
                    control.transPath(control.setting('library.movie')), ''))
            lib_tools.create_folder(
                os.path.join(control.transPath(control.setting('library.tv')),
                             ''))
        except:
            pass

        try:
            control.makeFile(control.dataPath)
            dbcon = database.connect(control.libcacheFile)
            dbcur = dbcon.cursor()
            dbcur.execute("CREATE TABLE IF NOT EXISTS service ("
                          "setting TEXT, "
                          "value TEXT, "
                          "UNIQUE(setting)"
                          ");")
            dbcur.execute("SELECT * FROM service WHERE setting = 'last_run'")
            fetch = dbcur.fetchone()
            if fetch == None:
                serviceProperty = "1970-01-01 23:59:00.000000"
                dbcur.execute("INSERT INTO service Values (?, ?)",
                              ('last_run', serviceProperty))
                dbcon.commit()
            else:
                serviceProperty = str(fetch[1])
            dbcon.close()
        except:
            try:
                return dbcon.close()
            except:
                return

        try:
            control.window.setProperty(self.property, serviceProperty)
        except:
            return

        while not xbmc.abortRequested:
            try:
                serviceProperty = control.window.getProperty(self.property)

                t1 = datetime.timedelta(hours=6)
                t2 = datetime.datetime.strptime(serviceProperty,
                                                '%Y-%m-%d %H:%M:%S.%f')
                t3 = datetime.datetime.now()

                check = abs(t3 - t2) > t1
                if check == False: raise Exception()

                if (control.player.isPlaying()
                        or control.condVisibility('Library.IsScanningVideo')):
                    raise Exception()

                serviceProperty = datetime.datetime.now().strftime(
                    '%Y-%m-%d %H:%M:%S.%f')

                control.window.setProperty(self.property, serviceProperty)

                try:
                    dbcon = database.connect(control.libcacheFile)
                    dbcur = dbcon.cursor()
                    dbcur.execute("CREATE TABLE IF NOT EXISTS service ("
                                  "setting TEXT, "
                                  "value TEXT, "
                                  "UNIQUE(setting)"
                                  ");")
                    dbcur.execute(
                        "DELETE FROM service WHERE setting = 'last_run'")
                    dbcur.execute("INSERT INTO service Values (?, ?)",
                                  ('last_run', serviceProperty))
                    dbcon.commit()
                    dbcon.close()
                except:
                    try:
                        dbcon.close()
                    except:
                        pass

                if not control.setting('library.service.update') == 'true':
                    raise Exception()
                info = control.setting(
                    'library.service.notification') or 'true'
                self.update(info=info)
            except:
                pass

            control.sleep(10000)