def initiate_govt_users(): #this it the only Government Org org = p.IDENTITY_MARKET[0] for person in p.POPULATION: account = f.create_account(person,org) f.debug(1, "Initiate Government Users") return 0
def load_files(file_names): """Loads all files into the database. file_names : str All the file names to load into the database """ for file_name in file_names: print() print("Loading file {0}".format(file_name)) f.debug("Opening file handler for '{0}'".format(file_name)) with f.open_file(file_name) as file: try: read_and_load_file(file) print("File loaded.") except StopIteration: print("File is empty: {0}".format(file_name)) except Exception as err: f.error("Error while loading file into table: {0}".format( file.name)) exception, traceback = f.get_exception_details() f.error(exception) f.debug(traceback) cfg.data_loading_error = True print("Skipping file.") print()
def create_person(): #Important to move the ID market along, person ID is same as position in list person = classes.Person_CLASS(p.PERSON_ID) p.PERSON_ID += 1 p.POPULATION.append(person) f.debug(1, "Created Person:"+str(person.displayPERSON())) return person
def loop(self): debug_counter = 0 pause = True self.torrent.torrentHandle.force_dht_announce() with closing( OverlayText(w=OVERLAY_WIDTH, h=OVERLAY_HEIGHT, alignment=XBFONT_CENTER_X | XBFONT_CENTER_Y)) as overlay: with nested( self.attach(overlay.show, self.on_playback_paused), self.attach(overlay.hide, self.on_playback_resumed, self.on_playback_stopped)): while not xbmc.abortRequested and self.isPlaying( ) and not self.torrent.threadComplete: self.torrent.checkThread() self.watchedTime = xbmc.Player().getTime() self.totalTime = xbmc.Player().getTotalTime() if self.iterator == 100 and debug_counter < 100: debug_counter += 1 else: self.torrent.debug() debug_counter = 0 status = self.torrent.torrentHandle.status() overlay.text = "\n".join(self._get_status_lines(status)) # downloadedSize = torrent.torrentHandle.file_progress()[contentId] self.iterator = int(status.progress * 100) if pause and self.__settings__.getSetting( "pause_onplay") == 'true': pause = False xbmc.Player().pause() xbmc.sleep(1000) if self.iterator == 100 and self.next_dl: next_contentId_index = self.ids_video.index( str(self.contentId)) + 1 if len(self.ids_video) > next_contentId_index: self.next_contentId = int( self.ids_video[next_contentId_index]) else: self.next_contentId = False debug('[loop] next_contentId: ' + str(self.next_contentId)) if not self.seeding_run and self.iterator == 100 and self.seeding: self.seeding_run = True self.seed(self.contentId) self.seeding_status = True # xbmc.sleep(7000) if self.iterator == 100 and self.next_dl and not self.next_dling and isinstance( self.next_contentId, int) and self.next_contentId != False: showMessage( self.localize('Torrent Downloading'), self.localize('Starting download next episode!')) self.torrent.stopSession() # xbmc.sleep(1000) path = self.torrent.getFilePath(self.next_contentId) self.basename = self.display_name = os.path.basename( path) self.torrent.continueSession(self.next_contentId) self.next_dling = True
def setup_torrent(self): self.torrent.initSession() if self.__settings__.getSetting("encryption") == "true": self.torrent.encryptSession() self.torrent.startSession() upload_limit = ( self.__settings__.getSetting("upload_limit") if self.__settings__.getSetting("upload_limit") != "" else 0 ) if 0 < int(upload_limit): self.torrent.setUploadLimit(int(upload_limit) * 1024 * 1024 / 8) # MBits/second download_limit = ( self.__settings__.getSetting("download_limit") if self.__settings__.getSetting("download_limit") != "" else 0 ) if 0 < int(download_limit): self.torrent.setDownloadLimit(int(download_limit) * 1024 * 1024 / 8) # MBits/second self.torrent.status = False self.fullSize = self.torrent.getFileSize(self.contentId) Offset = calculate(self.fullSize) debug("Offset: " + str(Offset)) # mp4 fix label = os.path.basename(self.torrent.getFilePath(self.contentId)) isMP4 = False if "." in label and str(label.split(".")[-1]).lower() == "mp4": isMP4 = True debug("setup_torrent: " + str((self.contentId, Offset, isMP4, label))) self.torrent.continueSession(self.contentId, Offset=Offset, isMP4=isMP4)
def setup_subs(self, label, path): iterator = 0 subs = self.torrent.getSubsIds(label) debug("[setup_subs] subs: " + str(subs)) if len(subs) > 0: showMessage( self.localize("Information"), self.localize("Downloading and copy subtitles. Please wait."), forced=True ) for ind, title in subs: self.torrent.continueSession(ind) while iterator < 100: xbmc.sleep(1000) self.torrent.debug() status = self.torrent.torrentHandle.status() iterator = int(status.progress * 100) # xbmc.sleep(2000) for ind, title in subs: folder = title.split(os.sep)[0] temp = os.path.basename(title) addition = os.path.dirname(title).lstrip(folder + os.sep).replace(os.sep, ".").replace(" ", "_").strip() ext = temp.split(".")[-1] temp = temp[: len(temp) - len(ext) - 1] + "." + addition + "." + ext newFileName = os.path.join(os.path.dirname(path), temp) debug( "[setup_subs]: " + str((os.path.join(os.path.dirname(os.path.dirname(path)), title), newFileName)) ) if not xbmcvfs.exists(newFileName): xbmcvfs.copy(os.path.join(os.path.dirname(os.path.dirname(path)), title), newFileName)
def setup_torrent(self): self.torrent.initSession() if self.__settings__.getSetting('encryption') == 'true': self.torrent.encryptSession() self.torrent.startSession() upload_limit = self.__settings__.getSetting( "upload_limit" ) if self.__settings__.getSetting("upload_limit") != "" else 0 if 0 < int(upload_limit): self.torrent.setUploadLimit(int(upload_limit) * 1024 * 1024 / 8) # MBits/second download_limit = self.__settings__.getSetting( "download_limit" ) if self.__settings__.getSetting("download_limit") != "" else 0 if 0 < int(download_limit): self.torrent.setDownloadLimit( int(download_limit) * 1024 * 1024 / 8) # MBits/second self.torrent.status = False self.fullSize = self.torrent.getFileSize(self.contentId) self.path = self.torrent.getFilePath(self.contentId) Offset = calculate(self.fullSize) debug('Offset: ' + str(Offset)) # mp4 fix label = os.path.basename(self.path) isMP4 = False if '.' in label and str(label.split('.')[-1]).lower() == 'mp4': isMP4 = True debug('setup_torrent: ' + str((self.contentId, Offset, isMP4, label))) self.torrent.continueSession(self.contentId, Offset=Offset, isMP4=isMP4)
def update(self): # TODO: Determine if the record already exists by checking if the primary key value is set. updateQuery = database.generate_updateQuery(self.mb_tableName,self.mb_recordData,self.mb_wc,self.mb_custom_values) #result = database.update(updateQuery[0],updateQuery[1]) functions.debug(updateQuery[0]) functions.debug(str(updateQuery[1])) database.update(updateQuery[0],updateQuery[1])
def proxy_open(self, req, proxy, type): import socket if socket.gethostbyname(req.get_host().split(":")[0]) in self.config["domains"]: debug("[antizapret]: Pass request through proxy " + self.config["server"]) return urllib2.ProxyHandler.proxy_open(self, req, self.config["server"], type) return None
def create_org(marketID): org = classes.Organisation_CLASS(p.ORG_ID, marketID) p.ORG_ID += 1 #initial stratey towards growth org.strategy = f.returnRandomInt(15,20) p.IDENTITY_MARKET.append(org) f.debug(1, "Create Organisation") return 0
def add(self,level): #add a level to the levels list self.levelList.append(level) level.number = len(self.levelList)-1 if level.speed > self.maxSpeed: #maxSpeed of level assignment self.maxSpeed = level.speed if self.minSpeed == 0: self.minSpeed = level.speed if level.speed < self.minSpeed: self.minSpeed = level.speed functions.debug("maxSpeed: "+str(self.maxSpeed)+"\nminSpeed: "+str(self.minSpeed))
def loop(self): debug_counter = 0 xbmc.sleep(1000) self.torrent.torrentHandle.force_dht_announce() with closing( OverlayText(w=OVERLAY_WIDTH, h=OVERLAY_HEIGHT, alignment=XBFONT_CENTER_X | XBFONT_CENTER_Y) ) as overlay: with nested( self.attach(overlay.show, self.on_playback_paused), self.attach(overlay.hide, self.on_playback_resumed, self.on_playback_stopped), ): while not xbmc.abortRequested and self.isPlaying() and not self.torrent.threadComplete: self.torrent.checkThread() self.watchedTime = xbmc.Player().getTime() self.totalTime = xbmc.Player().getTotalTime() if self.iterator == 100 and debug_counter < 100: debug_counter += 1 else: self.torrent.debug() debug_counter = 0 status = self.torrent.torrentHandle.status() overlay.text = "\n".join(self._get_status_lines(status)) # downloadedSize = torrent.torrentHandle.file_progress()[contentId] self.iterator = int(status.progress * 100) xbmc.sleep(1000) if self.iterator == 100 and self.next_dl: next_contentId_index = self.ids_video.index(str(self.contentId)) + 1 if len(self.ids_video) > next_contentId_index: self.next_contentId = int(self.ids_video[next_contentId_index]) else: self.next_contentId = False debug("[loop] next_contentId: " + str(self.next_contentId)) if not self.seeding_run and self.iterator == 100 and self.seeding: self.seeding_run = True self.seed(self.contentId) self.seeding_status = True # xbmc.sleep(7000) if ( self.iterator == 100 and self.next_dl and not self.next_dling and isinstance(self.next_contentId, int) and self.next_contentId != False ): showMessage( self.localize("Torrent Downloading"), self.localize("Starting download next episode!"), forced=True, ) self.torrent.stopSession() # xbmc.sleep(1000) path = self.torrent.getFilePath(self.next_contentId) self.basename = self.display_name = os.path.basename(path) self.torrent.continueSession(self.next_contentId) self.next_dling = True
def initiate_organisations(): #Creates a random set of new organisations for all three markets #First market is government create_org(0) for marketID in xrange(2): # for Social and Commercial market, initial number can be a range initial_num_ORGS = f.returnRandomInt(3,10) for num in xrange(initial_num_ORGS): create_org(marketID+1) f.debug(1, "Initiate Organisations") return 0
def __debug(self, msg): """ Prints a debug message if debug is enabled. :param msg: The message to print :type msg: str :return: Nothing :rtype: None """ if self.debug: debug(msg)
def initiate_population(): f.debug(1, "Initiate Population") #first create the population for personID in xrange(p.NUM_POPULATION): create_person() #next initiate thier attributes for person in p.POPULATION: #initial number of friends - one direction only, so likly more for x in xrange(2): f.make_friend(person) return 0
def load_data(col_map, data): """Loads the data into the database. Parameters ---------- col_map : [str,] The columns to load the data into data : [str,] The data to load. If data is None the array will be loaded and flushed. """ if data is not None and len(data) > 0: # If the data has more values than the header provided, ignore the end (green data set has that) while len(data) > len(col_map): f.debug( "Removing extra row value entry not present in the header.") data.pop() # tuple or dictionary only for SQL Server cfg.input_data.append(tuple(data)) # If batch size has been reached or input array should be flushed if (len(cfg.input_data) == cfg.batch_size) or (data is None and len(cfg.input_data) > 0): f.debug("Executing statement:") stmt = generate_statement(col_map) f.debug(stmt) cur = cfg.conn.cursor() try: f.executemany(cur, stmt) except Exception as err: # Rollback old batch (needed for at least Postgres to finish transaction) cfg.conn.rollback() # If debug output is enabled, find failing record if cfg.debug: for record in cfg.input_data: try: cur.execute(stmt, record) except Exception as err1: f.debug("Error with record: {0}".format(record)) # Rollback old batch (needed for at least Postgres to finish transaction) cfg.conn.rollback() cur.close() cfg.input_data.clear() raise # Debug output is not enabled, clear current batch and raise error else: cur.close() cfg.input_data.clear() raise f.debug("Commit") cfg.conn.commit() cur.close() f.verbose("{0} rows loaded.".format(len(cfg.input_data))) cfg.input_data.clear()
def config(): shelf = None try: CONFIG_LOCK.acquire() filename = os.path.join(CACHE_DIR, "antizapret.pac_config2") try: shelf = shelve.open(filename) except anydbm.error: os.remove(filename) shelf = shelve.open(filename) created_at = 0 data = {} if 'created_at' in shelf: created_at = shelf['created_at'] if 'data' in shelf: data = shelf['data'] if((time.time() - created_at) <= CACHE_LIFETIME and 'domains' in data and len(data['domains']) > 0): return data log("[antizapret]: Fetching Antizapret PAC file on %s" %PAC_URL) try: pac_data = urllib2.urlopen(PAC_URL).read() except: pac_data = "" r = re.search(r"\"PROXY (.*); DIRECT", pac_data) if r: data["server"] = r.group(1) data["domains"] = map(lambda x: x.replace(r"\Z(?ms)", "").replace("\\", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data))) else: data["server"] = None data["domains"] = [] shelf.clear() shelf.update({ "created_at": time.time(), "data": data, }) return data except Exception as ex: debug("[antizapret]: " + str(ex)) raise finally: if shelf: shelf.close() if CONFIG_LOCK.locked(): CONFIG_LOCK.release()
def insert(query, values): # Make sure the values are in a list if type(values) != list: values = list(values) #Run the query and see what happens... try: cur.executemany(query,[values]) db.commit() functions.debug("Inserted.") except Exception, e: functions.debug("Error inserting record."); exit("ModelBuddy Database Driver Error: " + str(e) + "\n modelbuddy.database.insert("+ str(query) + ", " + str(values)+")")
def read_and_load_file(file): """Reads and loads file. Parameters ---------- file : file_object The file to load """ reader = f.get_csv_reader(file) col_map = f.read_header(reader) f.debug("Column map: {0}".format(col_map)) for line in reader: load_data(col_map, tuple(line)) load_data(col_map, None)
def load_files(file_names): """Loads all files into the database. file_names : str All the file names to load into the database """ for file_name in file_names: print() print("Loading file {0}".format(file_name)) f.debug("Opening file handler for '{0}'".format(file_name)) with f.open_file(file_name) as file: read_and_load_file(file) print("Done") print()
def __init__(self, tableName, wc="", custom_values=""): functions.debug("Initializing ModelBuddy Model for " + tableName + " table.") self.mb_tableName = tableName self.mb_wc = wc self.mb_custom_values = custom_values #Get Table Structure self.mb_tableStructure = database.getTableStructure(self.mb_tableName) # TODO: Find primary key and determine if it's an auto-increment field. set mb_primaryKey and mb_autoIncrement #If there's no WC, exit if wc == "" and type(wc) != dict: functions.debug("Loaded blank model") self.mb_recordData = database.assign_defaults(self.mb_tableStructure) print self.mb_recordData return #Generate our select query selectQuery = database.generate_selectQuery(self.mb_tableName,self.mb_wc,self.mb_custom_values) functions.debug("Fetching record") result = database.select(selectQuery[0],selectQuery[1]) self.mb_recordData = result functions.debug("Got record.")
def generate_updateQuery(tableName,newValues,wc,custom_values=""): functions.debug("Generating an update query...") #Build our update query query = "UPDATE " + tableName + " SET " values = [] for key in newValues.keys(): query = str(query) + str(key) + " = %s, " #Key goes in the query values.append(newValues[key]) #Value goes in separate list # Trim the last comma and then add the where-clause query = query[:-2] + " WHERE " if type(wc) == dict: functions.debug("Given dictionary WC -- " + str(wc)) # Add our keys to the statement and then we'll put the values in a tuple for key in wc.keys(): query = str(query) + str(key) + '= %s AND ' values.append(wc[key]) elif type(wc) == str: functions.debug("Given String WC -- " + str(wc)) query = str(query) + str(wc) functions.debug("custom_values: " + str(custom_values)) for value in custom_values: values.append(value) # Trim off the last AND query = query[:-4] return (query,values)
def load_data(col_map, data): """Loads the data into the database. Parameters ---------- col_map : [str,] The columns to load the data into data : bytes, bytearray, str The data to load """ if data is not None: values = f.raw_input_to_list(data) if values: # If the data has more values than the header provided, ignore the end (green data set has that) while len(values) > len(col_map): f.debug( "Removing extra row value entry not present in the header." ) values.pop() cfg.input_data.append(values) if (len(cfg.input_data) == cfg.batch_size) or (data is None): f.debug("Executing statement:") stmt = generate_statement(col_map) f.debug(stmt) cur = cfg.conn.cursor() cur.executemany(stmt, cfg.input_data) f.debug("Commit") cfg.conn.commit() cur.close() f.verbose("{0} rows loaded".format(len(cfg.input_data))) cfg.input_data.clear()
def setLevel(self, levelNum): #set the level to the levelNum level in the #list and make it active, also set the framesTillNext to next level try: self.activeLevel = self.levelList[levelNum-1] self.currentLevel = levelNum except IndexError: functions.debug("no more levels, fellback to current") self.activeLevel.makeActive() self.framesTillNext = self.activeLevel.length functions.debug("scroller.dx:"+str(scroller.dx)) if self.speedInd: if len(self.levelList) == 1: self.speedInd.setPercentage(100) else: self.speedInd.setPercentage(((self.activeLevel.speed-self.minSpeed)/float(self.maxSpeed-self.minSpeed))*100)
def read_and_load_file(file): """Reads and loads file. Parameters ---------- file : file_object The file to load """ col_map = f.raw_input_to_list(file.readline(), True) f.debug("Column map: {0}".format(col_map)) try: for raw_line in file: load_data(col_map, raw_line) load_data(col_map, None) except Exception as err: print("Error in file: {0}".format(file.name), err)
def setup_nextep(self): try: if self.get("url2"): debug("[setup_nextep]: url2") self.ids_video = urllib.unquote_plus(self.get("url2")).split(',') else: debug("[setup_nextep]: not url2") self.ids_video = self.get_ids() except: pass if self.__settings__.getSetting('next_dl') == 'true' and self.ids_video and len(self.ids_video)>1: self.next_dl = True else: self.next_dl = False log('[AnteoPlayer]: nextdl - %s, ids_video - %s' % (str(self.next_dl), str(self.ids_video)))
def resume_data(self): wasPaused = self.session.is_paused() self.session.pause() self.save_resume_data = None try: if not self.torrentHandle.is_valid(): return status = self.torrentHandle.status() if not status.has_metadata: return if not status.need_save_resume: return log('[save_resume_data]: waiting for alert...') self.torrentHandle.save_resume_data( self.lt.save_resume_flags_t.flush_disk_cache) received = False while not received: self.session.wait_for_alert(1000) a = self.session.pop_alert() log('[save_resume_data]: [' + str(type(a)) + '] the alert ' + str(a) + ' is received') if type(a) == self.lt.save_resume_data_alert: received = True debug('[save_resume_data]: ' + str(dir(a))) self.save_resume_data = self.lt.bencode(a.resume_data) log('[save_resume_data]: the torrent resume data are received' ) try: resumeFileHandler = xbmcvfs.File( self.resume_data_path(), "w+b") resumeFileHandler.write(self.save_resume_data) resumeFileHandler.close() log('[save_resume_data]: the torrent resume data to file' + self.resume_data_path()) except: log('[save_resume_data]: failed to save the torrent resume data to file' ) elif type(a) == self.lt.save_resume_data_failed_alert: received = True log('[save_resume_data]: save_resume_data() failed') log('[save_resume_data]: done.') finally: if not wasPaused: self.session.resume()
def switchOrg(person, org): #still works on random utility, after selcting a random org f.shuffleList(person.accounts) switch = False marketID = org.marketID utility = f.calc_utility(person, org) for accountID in person.accounts: if switch == False: account = p.ACCOUNTS[accountID] if ((account.marketID == marketID) & (account.status == p.ACTIVE_1)): if f.calc_utility(person, p.IDENTITY_MARKET[account.orgID]) > utility: f.debug(0, "old account"+str(account.displayACCOUNT())) f.end_account(account) f.create_account(person, org) f.debug(1, "switched account"+str(person.displayPERSON())) switch = True return 0
def generate_insertQuery(tableName,newValues): functions.debug("Generating an insert query...") #Build our update query query = "INSERT INTO " + tableName + " (" queryValuesPiece = " VALUES (" values = [] for key in newValues.keys(): query = str(query) + str(key) + "," #Key goes in the first spot queryValuesPiece = str(queryValuesPiece) + "%s," values.append(str(newValues[key])) #Value goes in separate list # Finish the first part with the fields query = query[:-1] + ") " # Finish the second part with the values queryValuesPiece = queryValuesPiece[:-1] + ")" # Combine the two query = str(query) + str(queryValuesPiece) return (query,values)
def switchOrg(person, org): #still works on random utility, after selcting a random org f.shuffleList(person.accounts) switch = False marketID = org.marketID utility = f.calc_utility(person, org) for accountID in person.accounts: if switch == False: account = p.ACCOUNTS[accountID] if ((account.marketID == marketID) & (account.status == p.ACTIVE_1)): if f.calc_utility(person, p.IDENTITY_MARKET[account.orgID]) > utility: f.debug(0, "old account" + str(account.displayACCOUNT())) f.end_account(account) f.create_account(person, org) f.debug(1, "switched account" + str(person.displayPERSON())) switch = True return 0
def generate_selectQuery(tableName,wc,custom_values=""): #Build our query... query = "SELECT * FROM " + tableName + " WHERE " values = [] if type(wc) == dict: functions.debug("Given dictionary WC") # Add our keys to the statement and then we'll put the values in a tuple for key in wc.keys(): query = query + key + '= %s AND ' values.append(wc[key]) # Trim off the last "AND" then limit us to one record query = query[:-4] + " LIMIT 1" elif type(wc) == str: functions.debug("Given String WC") query = query + wc values = custom_values return query, values
def resume_data(self): wasPaused=self.session.is_paused() self.session.pause() self.save_resume_data=None try: if not self.torrentHandle.is_valid(): return status = self.torrentHandle.status() if not status.has_metadata: return if not status.need_save_resume: return log('[save_resume_data]: waiting for alert...') self.torrentHandle.save_resume_data(self.lt.save_resume_flags_t.flush_disk_cache) received=False while not received: self.session.wait_for_alert(1000) a = self.session.pop_alert() log('[save_resume_data]: ['+str(type(a))+'] the alert '+str(a)+' is received') if type(a) == self.lt.save_resume_data_alert: received = True debug('[save_resume_data]: '+str(dir(a))) self.save_resume_data=self.lt.bencode(a.resume_data) log('[save_resume_data]: the torrent resume data are received') try: resumeFileHandler = xbmcvfs.File(self.resume_data_path(), "w+b") resumeFileHandler.write(self.save_resume_data) resumeFileHandler.close() log('[save_resume_data]: the torrent resume data to file' + self.resume_data_path()) except: log('[save_resume_data]: failed to save the torrent resume data to file') elif type(a) == self.lt.save_resume_data_failed_alert: received = True log('[save_resume_data]: save_resume_data() failed') log('[save_resume_data]: done.') finally: if not wasPaused: self.session.resume()
def generate_table_sql(file_names, column_data_type): """Generates SQL for the table to load data. Parameters ---------- file_names : str The file_names to scan for columns column_data_type : str The column data type to use """ col_list = [] for file_name in file_names: f.debug("Reading file {0}".format(file_name)) with f.open_file(file_name) as file: reader = f.get_csv_reader(file) columns_to_add = f.read_header(reader) f.debug("Columns to add {0}".format(columns_to_add)) # Add columns to list implicitly removing duplicates for when going over multiple files col_list.extend(col for col in columns_to_add if col not in col_list) print_table_and_columns(col_list, column_data_type)
def __init__(self, userStorageDirectory, torrentUrl, params={}): print '!!!!!!!!!!!!!!!!!! BORN ' + self.__class__.__name__ self.userStorageDirectory = userStorageDirectory self.torrentUrl = torrentUrl xbmc.Player.__init__(self) log("[TorrentPlayer] Initalized") self.params = params self.get = self.params.get self.contentId = int(self.get("url")) self.torrent = Downloader.Torrent(self.userStorageDirectory, self.torrentUrl, self.torrentFilesDirectory).player try: if self.get("url2"): self.ids_video = urllib.unquote_plus( self.get("url2")).split(',') else: self.ids_video = self.get_ids() except: pass self.init() self.setup_torrent() if self.buffer(): while True: if self.setup_play(): debug('************************************* GOING LOOP') self.torrent.startSession() self.torrent.continueSession(self.contentId) self.loop() else: break debug('************************************* GO NEXT?') if self.next_dl and self.next_dling and isinstance( self.next_contentId, int) and self.iterator == 100: self.contentId = self.next_contentId continue debug('************************************* NO! break') break self.torrent.stopSession() self.torrent.threadComplete = True self.torrent.checkThread() if '1' != self.__settings__.getSetting( "keep_files" ) and 'Saved Files' not in self.userStorageDirectory: xbmc.sleep(1000) clearStorage(self.userStorageDirectory) else: if self.seeding_status: showMessage( self.localize('Information'), self.localize( 'Torrent is seeding. To stop it use Download Status.'), forced=True) else: if self.seeding: self.db_delete() showMessage(self.localize('Information'), self.localize('Torrent downloading is stopped.'), forced=True)
def setup_subs(self, label, path): iterator = 0 subs = self.torrent.getSubsIds(label) debug('[setup_subs] subs: ' + str(subs)) if len(subs) > 0: self.torrent.startSession() showMessage( self.localize('Information'), self.localize('Downloading and copy subtitles. Please wait.')) for ind, title in subs: self.torrent.continueSession(ind) while iterator < 100: xbmc.sleep(1000) self.torrent.debug() status = self.torrent.torrentHandle.status() iterator = int(status.progress * 100) # xbmc.sleep(2000) for ind, title in subs: folder = title.split(os.sep)[0] temp = os.path.basename(title) addition = os.path.dirname(title).lstrip(folder + os.sep).replace( os.sep, '.').replace( ' ', '_').strip() ext = temp.split('.')[-1] temp = temp[:len(temp) - len(ext) - 1] + '.' + addition + '.' + ext newFileName = os.path.join( ensure_str(os.path.dirname(decode_str(path))), ensure_str(temp)) debug('[setup_subs]: {} {}'.format(newFileName, title)) if not xbmcvfs.exists(newFileName): fileName = os.path.join( ensure_str( os.path.dirname(os.path.dirname( decode_str(path)))), ensure_str(title)) xbmcvfs.copy(fileName, newFileName)
def __init__(self, userStorageDirectory, torrentUrl, params={}): self.userStorageDirectory = userStorageDirectory self.torrentUrl = torrentUrl xbmc.Player.__init__(self) log("[TorrentPlayer] Initalized") self.params = params self.get = self.params.get self.contentId = int(self.get("url")) if self.get("seek"): self.seek = int(self.get("seek")) log('[TorrentPlayer] Seek='+str(self.seek)) self.torrent = Downloader.Torrent(self.userStorageDirectory, self.torrentUrl, self.torrentFilesDirectory).player try: if self.get("url2"): self.ids_video = urllib.unquote_plus(self.get("url2")).split(',') else: self.ids_video = self.get_ids() except: pass self.init() self.setup_torrent() if self.buffer(): while True: if self.setup_play(): debug('************************************* GOING LOOP') self.torrent.startSession() self.torrent.continueSession(self.contentId) self.loop() WatchedHistoryDB().add(self.basename, foldername(self.torrent.getContentList()[self.contentId]['title']), self.watchedTime, self.totalTime, self.contentId, self.fullSize / 1024 / 1024) else: break debug('************************************* GO NEXT?') if self.next_dl and self.next_dling and isinstance(self.next_contentId, int) and self.iterator == 100: if not self.next_play: xbmc.sleep(3000) if not xbmcgui.Dialog().yesno( self.localize('python-libtorrent'), self.localize('Would you like to play next episode?'), self.display_name): break self.contentId = self.next_contentId continue debug('************************************* NO! break') break self.torrent.stopSession() self.torrent.threadComplete = True self.torrent.checkThread() if '1' != self.__settings__.getSetting("keep_files") and 'Saved Files' not in self.userStorageDirectory: xbmc.sleep(1000) clearStorage(self.userStorageDirectory) else: if self.seeding_status: showMessage(self.localize('Information'), self.localize('Torrent is seeding. To stop it use Download Status.'), forced=True) else: if self.seeding: self.db_delete() showMessage(self.localize('Information'), self.localize('Torrent downloading is stopped.'), forced=True)
def select(query,values): # Make sure values is a tuple if type(values) != tuple: values = tuple(values) # Create a tuple #Run the query and see what happens... try: cur.execute(query, values) result = {} columns = tuple([d[0].decode('utf8') for d in cur.description]) for row in cur: field = zip(columns, row) print type(field) for column in field: result[column[0]] = str(column[1]) print str(result) except Exception, e: functions.debug("Error getting record") exit("ModelBuddy Database Driver Error: " + str(e) + "\n modelbuddy.database.select("+ str(query) + ", " + str(values)+")")
def set(self,values): for key in values: try: # TODO: Don't allow modifying of an auto-increment primary key. # check if it's not a duplicate entry... but at the start it should be read only. self.mb_recordData[key] self.mb_recordData[key] = values[key] functions.debug("Updated key " + str(key) + " with value " + str(values[key])) except KeyError: functions.debug("Uh oh! Key: " + str(key) + " does not exist in the record!") functions.debug("Record data set. Use save() or commit() to write to database")
def __init__(self, userStorageDirectory, torrentUrl, params={}): self.userStorageDirectory = userStorageDirectory self.torrentUrl = torrentUrl xbmc.Player.__init__(self) log("[TorrentPlayer] Initalized") self.params = params self.get = self.params.get self.contentId = int(self.get("url")) self.torrent = Downloader.Torrent(self.userStorageDirectory, self.torrentUrl, self.torrentFilesDirectory).player try: if self.get("url2"): self.ids_video = urllib.unquote_plus(self.get("url2")).split(",") else: self.ids_video = self.get_ids() except: pass self.init() self.setup_torrent() if self.buffer(): while True: if self.setup_play(): debug("************************************* GOING LOOP") self.torrent.startSession() self.torrent.continueSession(self.contentId) self.loop() WatchedHistoryDB().add( self.basename, self.watchedTime, self.totalTime, self.contentId, self.fullSize / 1024 / 1024 ) else: break debug("************************************* GO NEXT?") if self.next_dl and self.next_dling and isinstance(self.next_contentId, int) and self.iterator == 100: self.contentId = self.next_contentId continue debug("************************************* NO! break") break self.torrent.stopSession() self.torrent.threadComplete = True self.torrent.checkThread() if "1" != self.__settings__.getSetting("keep_files") and "Saved Files" not in self.userStorageDirectory: xbmc.sleep(1000) clearStorage(self.userStorageDirectory) else: if self.seeding_status: showMessage( self.localize("Information"), self.localize("Torrent is seeding. To stop it use Download Status."), forced=True, ) else: if self.seeding: self.db_delete() showMessage(self.localize("Information"), self.localize("Torrent downloading is stopped."), forced=True)
def joinOrg(person, org): numAcc = f.num_accounts(person,org.marketID) if f.already_account(person, org) == False: if numAcc < p.ENGAGE_MAX[org.marketID]: account = f.create_account(person, org) f.debug(0, "Person:%s Joining Org:%s " % (person.personID, org.orgID)) else: f.debug(0, "TOO many accounts: NEED to SWITCH") switchOrg(person, org) else: f.debug(0, "Already an account with org "+str(org.orgID)) return 0
def __init__(self, userStorageDirectory, torrentUrl, params={}): print '!!!!!!!!!!!!!!!!!! BORN '+self.__class__.__name__ self.userStorageDirectory = userStorageDirectory self.torrentUrl = torrentUrl xbmc.Player.__init__(self) log("[TorrentPlayer] Initalized") self.params = params self.get = self.params.get self.contentId = int(self.get("url")) self.torrent = Downloader.Torrent(self.userStorageDirectory, self.torrentUrl, self.torrentFilesDirectory).player try: if self.get("url2"): self.ids_video = urllib.unquote_plus(self.get("url2")).split(',') else: self.ids_video = self.get_ids() except: pass self.init() self.setup_torrent() if self.buffer(): while True: if self.setup_play(): debug('************************************* GOING LOOP') self.torrent.startSession() self.torrent.continueSession(self.contentId) self.loop() else: break debug('************************************* GO NEXT?') if self.next_dl and self.next_dling and isinstance(self.next_contentId, int) and self.iterator == 100: self.contentId = self.next_contentId continue debug('************************************* NO! break') break self.torrent.stopSession() self.torrent.threadComplete = True self.torrent.checkThread() if '1' != self.__settings__.getSetting("keep_files") and 'Saved Files' not in self.userStorageDirectory: xbmc.sleep(1000) clearStorage(self.userStorageDirectory) else: if self.seeding_status: showMessage(self.localize('Information'), self.localize('Torrent is seeding. To stop it use Download Status.'), forced=True) else: if self.seeding: self.db_delete() showMessage(self.localize('Information'), self.localize('Torrent downloading is stopped.'), forced=True)
def joinOrg(person, org): numAcc = f.num_accounts(person, org.marketID) if f.already_account(person, org) == False: if numAcc < p.ENGAGE_MAX[org.marketID]: account = f.create_account(person, org) f.debug(0, "Person:%s Joining Org:%s " % (person.personID, org.orgID)) else: f.debug(0, "TOO many accounts: NEED to SWITCH") switchOrg(person, org) else: f.debug(0, "Already an account with org " + str(org.orgID)) return 0
def run_simulation(): initiate() for year in xrange(p.YEARS): f.debug(1, ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;") for month in xrange(p.MONTHS): f.debug(1, "----- Year:"+str(year)+" Month:"+str(month)) for step in xrange(p.STEPS): updatePeople() updateThreats() updateAccounts() updateOrgs(month, year) agePeople() friends() if year < p.YEARS-1: regenPOP() ageOrgs() f.debug(1, "run simulation") plotAges() o.writeCSV() return 0
def getTableStructure(tableName): functions.debug("Getting table structure for " + tableName) # This will contain the table structure in a tuple tableStructure = "" #This will hold the primay key if there is one primaryKey = "" #Do work try: tableStructure = cur.execute("DESCRIBE " + tableName) tableStructure = cur.fetchall() # Find the primary key if it exists for i in tableStructure: if i[3] == "PRI": primaryKey = i[0] functions.debug(primaryKey + " is the primary key.") break # Something went wrong... except Exception: functions.debug("Could not get table structure for " + tableName) return tableStructure, primaryKey
def run_simulation(): initiate() for year in xrange(p.YEARS): f.debug( 1, ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;" ) for month in xrange(p.MONTHS): f.debug(1, "----- Year:" + str(year) + " Month:" + str(month)) for step in xrange(p.STEPS): updatePeople() updateThreats() updateAccounts() updateOrgs(month, year) agePeople() friends() if year < p.YEARS - 1: regenPOP() ageOrgs() f.debug(1, "run simulation") plotAges() o.writeCSV() return 0
def debug(self, msg): debug(msg)
def run(cmd): """Runs csv2db. This function is the main entry point for csv2db. Parameters ---------- cmd : str array The arguments passed Returns ------- int The exit code. """ args = parse_arguments(cmd) # Set verbose and debug output flags cfg.verbose = args.verbose if args.debug: cfg.verbose = True cfg.debug = True # Set table name cfg.table_name = args.table # Find all files f.verbose("Finding file(s).") file_names = f.find_all_files(args.file) f.debug("Found {0} files.".format(len(file_names))) f.debug(file_names) if args.command.startswith("gen"): f.verbose("Generating CREATE TABLE statement.") generate_table_sql(file_names, args.column_type) else: # Set DB type f.debug("DB type: {0}".format(args.dbtype)) cfg.db_type = args.dbtype # Set DB default port, if needed if args.port is None: args.port = f.get_default_db_port(args.dbtype) f.debug("Using default port {0}".format(args.port)) # Set batch size f.debug("Batch size: {0}".format(args.batch)) cfg.batch_size = int(args.batch) f.verbose("Establishing database connection.") f.debug("Database details:") f.debug({ "dbtype": args.dbtype, "user": args.user, "host": args.host, "port": args.port, "dbname": args.dbname }) try: cfg.conn = f.get_db_connection(cfg.db_type, args.user, args.password, args.host, args.port, args.dbname) load_files(file_names) f.verbose("Closing database connection.") cfg.conn.close() except Exception as err: print("Error connecting to the database: {0}".format(err)) except KeyboardInterrupt: print("Exiting program") cfg.conn.close()
def Warn(msg, force=False): if debug == 'true' or force: debug(msg, True)
def __init__(self, userStorageDirectory, torrentUrl, params={}): self.userStorageDirectory = userStorageDirectory self.torrentUrl = torrentUrl xbmc.Player.__init__(self) log("[TorrentPlayer] Initalized") self.params = params self.get = self.params.get self.contentId = int(self.get("url")) if self.get("seek"): self.seek = int(self.get("seek")) log('[TorrentPlayer] Seek=' + str(self.seek)) self.torrent = Downloader.Torrent(self.userStorageDirectory, self.torrentUrl, self.torrentFilesDirectory).player try: if self.get("url2"): self.ids_video = urllib.unquote_plus( self.get("url2")).split(',') else: self.ids_video = self.get_ids() except: pass self.init() self.setup_torrent() if self.buffer(): while True: if self.setup_play(): debug('************************************* GOING LOOP') self.torrent.startSession() self.torrent.continueSession(self.contentId) WatchedHistoryDB().add( self.basename, self.torrentUrl, foldername(self.torrent.getContentList()[ self.contentId]['title']), self.watchedTime, self.totalTime, self.contentId, self.fullSize / 1024 / 1024) self.loop() WatchedHistoryDB().add( self.basename, self.torrentUrl, foldername(self.torrent.getContentList()[ self.contentId]['title']), self.watchedTime, self.totalTime, self.contentId, self.fullSize / 1024 / 1024) else: break debug('************************************* GO NEXT?') if self.next_dl and self.next_dling and isinstance( self.next_contentId, int) and self.iterator == 100: if not self.next_play: xbmc.sleep(3000) if not xbmcgui.Dialog().yesno( self.localize('python-libtorrent'), self.localize( 'Would you like to play next episode?'), self.display_name): break self.contentId = self.next_contentId continue debug('************************************* NO! break') break self.torrent.stopSession() self.torrent.threadComplete = True self.torrent.checkThread() if '1' != self.__settings__.getSetting( "keep_files" ) and 'Saved Files' not in self.userStorageDirectory: xbmc.sleep(1000) clearStorage(self.userStorageDirectory) else: if self.seeding_status: showMessage( self.localize('Information'), self.localize( 'Torrent is seeding. To stop it use Download Status.')) else: if self.seeding: self.db_delete() showMessage(self.localize('Information'), self.localize('Torrent downloading is stopped.')) loadsw_onstop() # Reload Search Window
def run(cmd): """Runs csv2db. This function is the main entry point for csv2db. Parameters ---------- cmd : str array The arguments passed Returns ------- int The exit code. """ args = parse_arguments(cmd) # Set verbose and debug output flags cfg.verbose = args.verbose if args.debug: cfg.verbose = True cfg.debug = True # Set table name cfg.table_name = args.table f.debug("Table name: {0}".format(cfg.table_name)) # Set column separator characters(s) cfg.column_separator = args.separator f.debug("Column separator: {0}".format(cfg.column_separator)) # Set quote character(s) cfg.quote_char = args.quote f.debug("Column escape character: {0}".format(cfg.quote_char)) # Find all files f.verbose("Finding file(s).") file_names = f.find_all_files(args.file) f.verbose("Found {0} file(s).".format(len(file_names))) # Exit program if no files found. if len(file_names) == 0: return f.ExitCodes.SUCCESS.value f.debug(file_names) # Generate CREATE TABLE SQL if args.command.startswith("gen"): f.verbose("Generating CREATE TABLE statement.") try: generate_table_sql(file_names, args.column_type) return f.ExitCodes.SUCCESS.value except Exception as err: f.error("Error generating statement: {0}".format(err)) return f.ExitCodes.GENERIC_ERROR.value # Load data else: # Set DB type f.debug("DB type: {0}".format(args.dbtype)) cfg.db_type = args.dbtype cfg.direct_path = args.directpath # Set DB default port, if needed if args.port is None: args.port = f.get_default_db_port(args.dbtype) f.debug("Using default port {0}".format(args.port)) # Set batch size f.debug("Batch size: {0}".format(args.batch)) cfg.batch_size = int(args.batch) # If batch size is lower than 10k and direct path has been specified, overwrite batch size to 10k. if cfg.direct_path and cfg.batch_size < 10000: f.debug( "Direct path was specified but batch size is less than 10000.") f.debug( "Overwriting the batch size to 10000 for direct-path load to make sense." ) cfg.batch_size = 10000 f.verbose("Establishing database connection.") f.debug("Database details:") f.debug({ "dbtype": args.dbtype, "user": args.user, "host": args.host, "port": args.port, "dbname": args.dbname }) if args.password is None: args.password = getpass.getpass() try: cfg.conn = f.get_db_connection(cfg.db_type, args.user, args.password, args.host, args.port, args.dbname) except Exception as err: f.error("Error connecting to the database: {0}".format(err)) return f.ExitCodes.DATABASE_ERROR.value try: load_files(file_names) f.verbose("Closing database connection.") cfg.conn.close() return f.ExitCodes.SUCCESS.value if not cfg.data_loading_error else f.ExitCodes.DATA_LOADING_ERROR.value except KeyboardInterrupt: print("Exiting program") cfg.conn.close() return f.ExitCodes.GENERIC_ERROR.value except Exception as err: f.error("Error loading file(s): {0}".format(err)) cfg.conn.close() return f.ExitCodes.GENERIC_ERROR.value
def debug(self, msg): debug('[%s] ' % self.__plugin__ +msg)