def __init__(self, parent): ''' Constructor ''' log.info("***ConfigPanel.init") gui.ConfigPanel.__init__(self, parent) self.config = Config.get_config() self.db = DB() self.state = ViewState self.update_filetype_list() self.clear_filetype() self.image = wx.Bitmap(os.path.join(const.PixmapDir, "configure.png")) self.title = _("Configuration") if self.lstFileTypes.GetCount() > 0: self.lstFileTypes.SetSelection(0) self.onFileType(None) self.show_mail() self.txtMailServer.SetFocus() self.nb_config.SetSelection(0) self.show_security() self.pwd_hidden = True self.mail_hidden = True log.trace("Done ConfigPanel.init")
def __init__(self, parent): ''' Constructor ''' log.info("***RestorePanel.init") gui.RestorePanel.__init__(self, parent) self.db = DB() self.config = Config.get_config() self.images = wx.ImageList(16, 16) self.images.Add( wx.Bitmap(os.path.join(const.PixmapDir, "folder.png"), wx.BITMAP_TYPE_PNG) ) self.images.Add( wx.Bitmap(os.path.join(const.PixmapDir, "document.png"), wx.BITMAP_TYPE_PNG) ) self.fs_tree.SetImageList(self.images) # Looks better if this is blank. self.set_selected_file("") self.force_rebuild() self.image = wx.Bitmap(os.path.join(const.PixmapDir, "review.png")) self.title = _("Restore") # Ensure the right page is showing self.nb_restore.SetSelection(0) log.trace("Done RestorePanel.init")
def __init__(self, parent): ''' Constructor ''' log.info("***BackupPanel.init") gui.BackupPanel.__init__(self, parent) self.btnAddFolder.SetBitmapLabel(wx.Bitmap(os.path.join(const.PixmapDir, "add.png"))) self.db = DB() self.config = Config.get_config() self.state = ViewState self.update_data(False) self.nbBackup.SetSelection(0) self.clear() self.nbBackup.Layout() self.Fit() self.radSchedDailyWeekly.SetValue(True) if self.lstItems.GetCount() > 0: self.lstItems.SetSelection(0) self.onItemSelected(None) # self.onNotifyEmail(None) self.image = wx.Bitmap(os.path.join(const.PixmapDir, "backup.png")) self.title = _("Backups") self.onBackupSchedule(None) log.trace("Done BackupPanel.init")
def __init__(self, parent, default_name=None): ''' Constructor ''' gui.HistoryWindow.__init__(self, parent) log.trace("Starting up a history panel") self.db = DB() self.config = Config.get_config() self.order = const.ASC self.update_data(default_name) # self.imgList = wx.ImageList(16, 16) # self.img_up = self.imgList.Add(wx.Bitmap("images/go-up.png", wx.BITMAP_TYPE_PNG)) # self.img_down = self.imgList.Add(wx.Bitmap("images/go-down.png", wx.BITMAP_TYPE_PNG)) # self.lstRuns.SetImageList(self.imgList, wx.IMAGE_LIST_SMALL) icon = wx.Icon(os.path.join(const.PixmapDir, "storage.png"), wx.BITMAP_TYPE_ANY) self.SetIcon(icon) # listmix.ColumnSorterMixin.__init__(self, 7) # self.Bind(wx.EVT_LIST_COL_CLICK, self.onColClick, self.lstRuns) # self.SortListItems(2, 1) # Ensure the right page is showing self.nb_history.SetSelection(0) self.Show()
def __init__(self, test_folder, options): self.test_folder = os.path.join(test_folder, "tester") self.options = options self.config = Config.get_config() self.store_folder = os.path.join(self.test_folder, "stores") self.files_folder = os.path.join(self.test_folder, "files") self.restore_folder = os.path.join(self.test_folder, "restore") self.db = DB() self.max_fs_id = self.db.query("select max(fs_id) from fs", ())[0][0] if self.max_fs_id is None: self.max_fs_id = 0 self.max_version_id = self.db.query("select max(version_id) from versions", ())[0][0] if self.max_version_id is None: self.max_version_id = 0 self.max_run_id = self.db.query("select max(run_id) from runs", ())[0][0] if self.max_run_id is None: self.max_run_id = 0 self.max_message_id = self.db.query("select max(message_id) from messages", ())[0][0] if self.max_message_id is None: self.max_message_id = 0 log.debug("MAX IDs", self.max_fs_id, self.max_version_id, self.max_run_id, self.max_message_id) self.teststring1 = os.urandom(204800) self.teststring2 = os.urandom(204800)
def __init__(self, parent): ''' Constructor ''' log.info("***StorePanel.init") gui.StoragePanel.__init__(self, parent) self.db = DB() self.config = Config.get_config() self.state = ViewState self.load_static_data() self.update_store_list() self.clear() if self.lstItems.GetCount() > 0: self.lstItems.SetSelection(0) self.onItemSelected(None) self.onAutoManage(None) self.image = wx.Bitmap(os.path.join(const.PixmapDir, "storage.png")) self.title = _("Storage") self.ftp_hidden = True self.db_hidden = True log.trace("Done StorePanel.init")
def __init__(self,app,onlyDBAccess=False): '''main window init''' QtGui.QMainWindow.__init__(self) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.db = DB(self) self.settings=Settings(self) self.onlyDBAccess=onlyDBAccess if onlyDBAccess: return #GUI setting guiSettings(self) connectSignals(self) changeStyle(self) self.taskOpened = False self.app = app loadContexts(self) self.currentContext = self.settings.getInitContext() selectCurrentContext(self) self.loadTasksList(init=True) self.tray=Trayicon(self) self.timer=TaskReminder(self) self.shortcuts=ShortcutsHandler(self,self.settings['keyMainWindowToggle']) finalizeInit(self) self.translate() self.adjustHeight(init=True)
def __init__(self): # open syslog and notice startup syslog.openlog('captiveportal', logoption=syslog.LOG_DAEMON) syslog.syslog(syslog.LOG_NOTICE, 'starting captiveportal background process') # handles to ipfw, arp the config and the internal administration self.ipfw = IPFW() self.arp = ARP() self.cnf = Config() self.db = DB() self._conf_zone_info = self.cnf.get_zones()
def __init__(self, backup_name, run_date): ''' Verify a run to ensure it is valid ''' self.config = Config.get_config() self.backup = self.config.backups[backup_name] self.store = self.config.storage[self.backup.store].copy() self.db = DB() # Find the run runs = self.db.runs(self.backup.name, run_date) if len(runs) == 0: raise Exception(_("Verify failed: Backup run does not exist")) self.vrun = runs[0]
def __init__(self, name, type, options): ''' Prepare to run a backup event @param name: name of the backup @param type: type (Full/Incr) @param type: dry_run If dry_run is True, then we will print the files we *would have* backed up to stdout. ''' self.type = type self.dry_run = options.dry_run self.options = options self.config = Config.get_config() try: self.backup = self.config.backups[name] except: raise Exception(_("Backup is missing or corrupt. Please reconfigure backup.")) try: # Get a fresh store (a copy of the config version self.store = self.config.storage[self.backup.store].copy() except: raise Exception(_("Storage definition is missing. Please reconfigure backup.")) self.db = DB() self.start_time = None self.nfiles = None self.nfolders = None self.bytes = None self.run_id = None self.backup_folder = None # Make sure there are no other backups running of this name self.lock = locking.InterProcessLock(name="Vault-%s" % self.backup.name) # Build a quick file exclusion list, to speed up exclusion checking self.excl_ext = self.build_excl_exts() log.debug("Exclusion List:", ",".join(self.excl_ext))
def __init__(self, parent): ''' Constructor ''' log.info("***OverviewPanel.init") gui.OverviewPanel.__init__(self, parent) self.imgStatus.SetBitmap(wx.Bitmap(os.path.join(const.PixmapDir, "status-ok.png"))) self.db = DB() self.config = Config.get_config() self.update_data() self.image = wx.Bitmap(os.path.join(const.PixmapDir, "overview.png")) self.title = "Overview" self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self.update_data, self.timer) # Update the display every 30 seconds self.timer.Start(30000) log.trace("Done OverviewPanel.init")
def setUp(self): self.config = Config.get_config() self.db = DB() self.db.check_upgrade() self.mark_db_ids() self.test_folder = tempfile.mkdtemp() self.files_folder = os.path.join(self.test_folder, "files") self.store_folder = os.path.join(self.test_folder, "store") self.restore_folder = os.path.join(self.test_folder, "restore") utils.makedirs(self.files_folder) utils.makedirs(self.store_folder) utils.makedirs(self.restore_folder) utils.build_file_structure(self.files_folder, 50 * const.Kilobyte, 500 * const.Kilobyte) # Build a store object (dont save config) # Note the careful size selection - we want backups to overflow the FolderStore. self.store = FolderStore("teststore", "2MB", True, self.store_folder) self.config.storage[self.store.name] = self.store # Build the backup object (dont save config) self.backup = Backup("testbackup") self.backup.include_folders = [self.files_folder] self.backup.store = self.store.name self.backup.notify_msg = False self.include_packages = True self.config.backups[self.backup.name] = self.backup # build an options object for use with the backup self.options = BlankClass() self.options.dry_run = False self.options.message = False self.options.email = False self.options.shutdown = False self.options.norecurse = False self.old_pass = self.config.data_passphrase self.config.data_passphrase = "banana"
def __init__(self, parent, run): ''' Constructor ''' gui.RunDetailsWindow.__init__(self, parent) log.info("Starting up a run details window") self.run = run self.db = DB() self.config = Config.get_config() self.paths = {} self.load_run_details() self.load_files(200) self.load_messages() icon = wx.Icon(os.path.join(const.PixmapDir, "storage.png"), wx.BITMAP_TYPE_ANY) self.SetIcon(icon) # Ensure the right page is showing self.nbDetails.SetSelection(0) self.Show()
def main(): try : # # CONFIGURACIÓN # # Gestor de base de datos db = DB() # Carga los valores de configuración config = Config(db) # Listas de probabilidades de aparación probabilidad_enemigos = db.get_probabilidad('enemigos') probabilidad_gemas = db.get_probabilidad('gemas') # Instancia un reloj para controlar el tiempo reloj = pygame.time.Clock() # # VENTANA # # Crea la ventana ventana = pygame.display.set_mode((config.ventana_ancho, config.ventana_alto)) # Título de la ventana pygame.display.set_caption('Gemas') # Carga el fondo (convirtiéndolo al formato usado en SDL para mejorar la eficiencia) fondo = pygame.image.load(os.path.join(config.dir_img, 'fondo.jpg')).convert() # Inicia partidas hasta que el usuario decide terminar la ejecución del programa salir = False while not salir: # # SPRITES # # Diccionario de sprites activos en cada momento sprites_activos = {} # Instancia al jugador y lo añade a la lista de sprites activos jugador = Jugador(config) sprites_activos['jugador'] = jugador # Instancia dos enemigos y los añade a la lista de sprites activos sprites_activos['enemigo'] = [Enemigo(config, 0), Enemigo(config, 1)] # Indica el momento en el que se generó el último enemigo ultimo_enemigo_respawn = pygame.time.get_ticks() # Instancia las gemas y las añade a la lista de sprites activos # # Hay varios tipos de gemas, cada una con una probabilidad distinta de ser # generada. La generación de las gemas es aleatoria pero teniendo en cuenta # dicha probabilidad sprites_activos['gema'] = [] for i in range(1, config.gema_max_activas + 1): tipo_gema = get_tipo(probabilidad_gemas) gema = Gema(config, tipo_gema, sprites_activos) sprites_activos['gema'].append(gema) # Indica el momento en que ha de generarse una nueva gema (0 = no se genera ninguna) proximo_respawn_gema = 0 # Marcador marcador = Marcador(config) sprites_activos['marcador'] = marcador # Puntuación máxima record = Record(config, db) sprites_activos['record'] = record # Fin de partida gameover = GameOver(config) # # BUCLE DE EVENTOS # # El programa permanece funcionando hasta que se cierra la ventana # Cada iteración del bucle es un frame fin_partida = False while not fin_partida: # Averigua el tiempo (en milisegundos) transcurrido por cada frame # Además, al usar FRAMERATE en la llamada, se fija el número de frames por segundo # independientemente del hardware de la máquina tiempo = reloj.tick(config.framerate) # Obtiene y recorre la lista de eventos que están teniendo lugar for evento in pygame.event.get(): # Si encuentra el evento QUIT termina la ejecución if evento.type == QUIT: fin_partida = True salir = True # La tecla ESC termina la ejecución elif evento.type == KEYDOWN: if evento.key == K_ESCAPE: salir = True fin_partida = True # # CALCULO DEL MOVIMIENTO Y PUNTUACIÓN # jugador.mover(tiempo, sprites_activos) marcador.render_puntos(jugador.puntos) for enemigo in sprites_activos['enemigo']: enemigo.mover(tiempo, sprites_activos) # # ACTUALIZACIÓN DE POSICIONES EN PANTALLA # # Situa el fondo en el primer pixel de la ventana ventana.blit(fondo, (0, 0)) # Actualiza la posición de los sprites for nombre in sprites_activos.keys(): # Si se trata de una lista de sprites la recorre y # procesa cada elemento if isinstance(sprites_activos[nombre], list): for elemento in sprites_activos[nombre]: # Si el sprite es una gema sin vida la elimina de los sprites activos if nombre == 'gema' and elemento.vida <= 0: sprites_activos[nombre].remove(elemento) else: ventana.blit(elemento.imagen, elemento.rect) else: ventana.blit(sprites_activos[nombre].imagen, sprites_activos[nombre].rect) # # ACTUALIZACIÓN DE LA PANTALLA # # Dibuja la escena pygame.display.flip() # # EVALUACIÓN DEL ESTADO DE LOS SPRITES # # Comprueba si el jugador sigue vivo if not jugador.vivo: # Guarda la puntución db.guarda_puntuacion(jugador.puntos) # Pequeña pausa para que el mensaje de game over no salte brúscamente pygame.time.delay(1000) # Avisa al jugador ventana.blit(gameover.imagen, gameover.rect) pygame.draw.rect(ventana, (255, 255, 255), gameover.rect.inflate(7, 5), 2) # Actualiza la pantalla pygame.display.flip() # y finaliza la partida fin_partida = True else: # Si el jugador sigue vivo: # - Genera nuevas gemas si es necesario # - Genera nuevos enemigos según aumenta el tiempo de juego # # Generación de gemas # # Las gemas se generan siempre que haya menos del máximo permitido y # siempre después de pasado cierto tiempo (config.gema_respawn) desde la # desaparición de una gema o desde la generación de una nueva, lo que ocurra # antes. Es decir, mientras haya menos gemas de las permitidas se genera una # nueva cada 'config.gema_respawn' milisegundos # Si hay menos gemas activas del máximo permitido es necesario generar una nueva if len(sprites_activos['gema']) < config.gema_max_activas: # Calcula el momento para la creación de la gema, pero sólo si dicho momento no # ha sido todavía calculado para evitar que a cada iteración del bucle (cada frame) # se recalcule y la gema no llegue a generarse nunca if proximo_respawn_gema == 0: # La gema se generará después del momento actual más el tiempo de espera # para la generación de gemas proximo_respawn_gema = pygame.time.get_ticks() + config.gema_respawn # Comprueba si ha pasado suficiente tiempo como para generar la gema if proximo_respawn_gema <= pygame.time.get_ticks(): # Ya se puede crear la gema y añadirla a la lista de sprites activos tipo_gema = get_tipo(probabilidad_gemas) gema = Gema(config, tipo_gema, sprites_activos) sprites_activos['gema'].append(gema) # Resetea el momento para la creación de la siguiente gema proximo_respawn_gema = 0 # # Generación de enemigos # # Cada cierto tiempo se genera un enemigo nuevo. El tipo es aleatorio pero # sujeto a la probabilidad de generación de cada enemigo if (pygame.time.get_ticks() - ultimo_enemigo_respawn) / config.enemigo_respawn > 0: tipo_enemigo = get_tipo(probabilidad_enemigos) sprites_activos['enemigo'].append(Enemigo(config, tipo_enemigo)) # Anota el momento en el que se ha generado el último enemigo ultimo_enemigo_respawn = pygame.time.get_ticks() # # FIN DE LA PARTIDA # if fin_partida: # # CONTROL PARA JUGAR UNA NUEVA PARTIDA O TERMINAR EL PROGRAMA # while not salir and fin_partida: # Obtiene y recorre la lista de eventos que están teniendo lugar for evento in pygame.event.get(): # Si encuentra el evento QUIT termina la ejecución if evento.type == QUIT: salir = True # Pulsaciones de teclas elif evento.type == KEYDOWN: # La tecla ESC termina la ejecución if evento.key == K_ESCAPE: salir = True # La tecla RETURN inicia una nueva partida elif evento.key == K_RETURN: fin_partida = False # # FIN DE LA EJECUCIÓN DEL PROGRAMA # # Cierra la conexión con la base de datos db.close() # Termina la ejecución sys.exit(0) except pygame.error, e: print '\n' print u'Error en Pygame: ' print '\n\t' , e, '\n'
def setUp(self): self.config = Config.get_config() self.db = DB() self.db.check_upgrade() self.mark_db_ids() self.test_folder = tempfile.mkdtemp() self.files_folder = os.path.join(self.test_folder, "files") self.store_folder = os.path.join(self.test_folder, "store") self.restore_folder = os.path.join(self.test_folder, "restore") utils.makedirs(self.files_folder) utils.makedirs(self.store_folder) utils.makedirs(self.restore_folder) # Build the base set of files with open(os.path.join(self.files_folder, "base"), "w") as f: f.write("base") with open(os.path.join(self.files_folder, "incr"), "w") as f: f.write("0") config_file = os.path.expanduser("~/.vault") if not os.path.exists(config_file): raise Exception("Vault test configuration file (~/.vault) does not exist") self.store_config = ConfigParser.RawConfigParser() self.store_config.read(config_file) # FOLDER STORE self.store = FolderStore("teststore", "50MB", True, self.store_folder) # DROPBOX STORE # self.login = self.store_config.get("DropBox", "login") # self.password = self.store_config.get("DropBox", "password") # self.folder = self.store_config.get("DropBox", "folder") # self.app_key = self.store_config.get("DropBox", "app_key") # self.app_secret_key = self.store_config.get("DropBox", "app_secret_key") # self.store = DropBoxStore("teststore", 0, False, self.folder, self.login, self.password, # self.app_key, self.app_secret_key) # S3 STORE # self.key = self.store_config.get("Amazon", "aws_access_key_id") # self.secret_key = self.store_config.get("Amazon", "aws_secret_access_key") # self.bucket = self.store_config.get("Amazon", "bucket") # self.store = S3Store("teststore", 0, False, bucket=self.bucket, key=self.key, secret_key=self.secret_key) # Now record the existance of this store self.config.storage[self.store.name] = self.store # Build the backup object (dont save config) self.backup = Backup("testbackup") self.backup.include_folders = [self.files_folder] self.backup.store = self.store.name self.backup.notify_msg = False self.old_pass = self.config.data_passphrase self.config.data_passphrase = "goofy" self.backup.encrypt = True self.config.backups[self.backup.name] = self.backup # build an options object for use with the backup self.options = BlankClass() self.options.dry_run = False self.options.message = False self.options.email = False self.options.shutdown = False self.options.norecurse = False # How many cycles? self.cycles = 20
if v['location'] != None: print Util.hubeny_distance(v['location'], u['location']) if len(sys.argv) < 8: print '[usage]: python %s [training set] [test set] [params] [db user name] [db pass] [db name] [model file]' % sys.argv[ 0] exit() training = Users() training.load_file(sys.argv[1]) test = Users() test.load_file(sys.argv[2]) params = load_params(sys.argv[3]) db = DB(sys.argv[4], sys.argv[5], sys.argv[6]) tweets = Tweets(db) olim = OLIM(training, tweets, params) """ quadtree partitioning """ if os.path.exists(sys.argv[7]): f = open(sys.argv[7]) qtree = pickle.load(f) f.close() else: qtree = olim.geoPartitioning(params) f = open(sys.argv[7], 'w') pickle.dump(qtree, f) f.close() olim.make_population(qtree)
from lib.db import DB from lib.DisjointSet import DisjointSet from pprint import pprint from lib.BitcoinBlockCrawler import BitcoinBlockCrawler import json, csv from migrations.createTables import createTables import psycopg2 # Load configuration file and connect to database with open('config.json') as json_file: config = json.load(json_file) db = DB(config['DB']) createTables(db) # queries1 = [] # addresses = [] # # queries1 = """SELECT * FROM darknet_addresses;""" # addresses = db.execute([queries1]) # # queries2 = [] # # query2 = """UPDATE addresses SET id = (SELECT id FROM address_matching where address={0});""" # query2 = """SELECT id FROM address_mapping where address='{0}';""" # for row in addresses: # print(query2.format(row[0])) # queries2.append(query2.format(row[0])) # print(db.execute(queries2)) # db.execute(queries2, False) # queries=[]
import os import re from lib.db import DB root_path = os.path.realpath(os.path.dirname(__file__)) file_path = os.path.join(root_path, 'data', 'cnarea20160320-2.sql') # f = open(file_path, 'rb') # line.decode() f = open(file_path, 'r', encoding='utf8') for line in f: if line: column = line.replace('\n', '') column = re.sub('([^,"\d]+),([^,"]+)', '\\1|\\2', column) column = re.sub('([^,"\d]+),([^,"]+)', '\\1|\\2', column) column = re.sub('([^,"\d]+),([^,"]+)', '\\1|\\2', column) column = column.replace('"', '').split(',') if column: sql = 'INSERT INTO `china_area`(`id`, `parent_id`, `level`, `area_code`, `zip_code`, `city_code`, `name`, `short_name`, `merger_name`, `pinyin`, `longitude`, `latitude`) ' \ 'VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' DB.connect().execute(sql, tuple(column)) else: print('ignored:' + line) f.close()
exp.send(installcommand) exp.expect("Setting up com.bulbsecurity.tooltest (0.0.1-23) ...", timeout ) exp.send("tooltest\n") if (exp.expect(["Smartphone Pentest Framework Agent"], timeout)): agent = "yes" exp.send("exit") exp.close() fullpath2 = webserver + path + "/text2.txt" GETFILE = open(fullpath2, 'w') GETFILE.write("Apline Agent " + agent) GETFILE.close() table = "remote" db = DB(config=config) alpine = "alpine" db.query("INSERT INTO "+table+" (id,ip,exploit,vuln,agent) VALUES (DEFAULT,%s,%s,%s,%s)", (ipaddress,alpine,vulnerable,agent)) elif arsplit[1] == "evil" : print "Something something\n" webserver = config.get('WEBSERVER') sqlserver = config.get('MYSQLSERVER') ipaddress = config.get('IPADDRESS') localpath = arsplit[2] filename = arsplit[3] link = "http://" + ipaddress + localpath + filename fullpath = webserver + localpath command1 = "mkdir " + fullpath system(command1) sploitfile = webserver + localpath + filename
def test_truncate_table(): db = None with DB(DSN) as db: db.truncate_table('foobar') db.cur.execute.assert_called_once_with('TRUNCATE foobar CASCADE')
class BackupPanel(EditPanel, gui.BackupPanel): ''' classdocs ''' def __init__(self, parent): ''' Constructor ''' log.info("***BackupPanel.init") gui.BackupPanel.__init__(self, parent) self.btnAddFolder.SetBitmapLabel(wx.Bitmap(os.path.join(const.PixmapDir, "add.png"))) self.db = DB() self.config = Config.get_config() self.state = ViewState self.update_data(False) self.nbBackup.SetSelection(0) self.clear() self.nbBackup.Layout() self.Fit() self.radSchedDailyWeekly.SetValue(True) if self.lstItems.GetCount() > 0: self.lstItems.SetSelection(0) self.onItemSelected(None) # self.onNotifyEmail(None) self.image = wx.Bitmap(os.path.join(const.PixmapDir, "backup.png")) self.title = _("Backups") self.onBackupSchedule(None) log.trace("Done BackupPanel.init") def update_data(self, set_selection=True): # The next line should be # for child in self.pnlScheduleTab.GetChildren(): # if child.GetName().find("cboTime") == 0: # but there is a bug in wxFormBuilder. It doesn't set the name attribute. # See http://sourceforge.net/tracker/?func=detail&aid=3187563&group_id=135521&atid=733136 for name in dir(self): if name.find("cboTime") == 0: child = self.__getattribute__(name) child.Clear() child.AppendItems(const.HoursOfDay) child.SetSelection(0) if name.find("cboDay") == 0: child = self.__getattribute__(name) child.Clear() child.AppendItems(const.ShortDaysOfWeek) child.SetSelection(0) if name.find("cboMonthDay") == 0: child = self.__getattribute__(name) child.Clear() child.AppendItems([str(i) for i in xrange(1, 32)]) child.SetSelection(0) self.txtFolders.Clear() self.lstExcludeTypes.Clear() self.lstExcludeTypes.AppendItems(self.config.file_types.keys()) # Lets update this in a smart fasion. Need to keep the current selection if possible old_sel = self.cboStore.GetStringSelection() self.cboStore.Clear() self.cboStore.AppendItems(self.config.storage.keys()) self.cboStore.SetStringSelection(old_sel) # Lastly - lets reload the backup list self.update_backup_list(set_selection) def update_backup_list(self, set_selection=True): sel = self.lstItems.GetStringSelection() # Look for new items backups = self.lstItems.GetItems() keys = self.config.backups.keys() keys.sort() for item in keys: if not item in backups: # new item becomes selected (hopefully the first) sel = item break self.lstItems.Clear() self.lstItems.AppendItems(keys) if set_selection: self.lstItems.SetStringSelection(sel) self.onItemSelected(None) ######################################################################3 # # EVENTS # ######################################################################3 def onHistory(self, event): name = self.lstItems.GetStringSelection() if len(name) > 0: self.history(name) def onRun(self, event): name = self.lstItems.GetStringSelection() if len(name) > 0: self.run_backup(name) def onAddFolder(self, event): dlog = wx.DirDialog(self, _("Select a folder to back up"), "/home") ret = dlog.ShowModal() if ret == wx.ID_OK: folders = self.text_to_list(self.txtFolders.GetValue()) folders.append(dlog.GetPath()) self.txtFolders.Clear() self.txtFolders.AppendText("\n".join(folders)) def onBackupSchedule(self, event): if self.radSchedAdvanced.GetValue(): self.pnlAdvanced.Show() else: self.pnlAdvanced.Hide() ###################################################################### # # Save and Load # ###################################################################### def update_state(self): if self.state == ViewState: self.lblName.Show(True) self.txtName.Show(False) if self.state == NewState: self.lblName.Show(False) self.txtName.Show(True) self.onBackupSchedule(None) self.Fit() self.Refresh() def clear(self): b = Backup(EmptyName) self.show_backup(b) self.nbBackup.SetSelection(0) def delete(self, name): # Lets get some statistics runs = self.db.runs(backupname=name) num_runs = len(runs) size = 0 for run in runs: size += run.size if num_runs > 0: msg = _("Backup '{backup}' has {numruns} runs stored, " \ "totalling {size} of remote data.\n" \ "Are you sure you want to delete the backup definition?\n" \ "(hint - its usually better to just deactivate the backup)").format(\ backup=name, numruns=num_runs, size=utils.readable_form(size)) mbox = OptionDialog(self, msg, _("Delete Backup Definition"), _("Also delete all backup data stored remotely\nNote that this cannot be undone.")) if mbox.ShowModal() != wx.ID_OK: return delete_offsite_data = mbox.chkOption.GetValue() else: msg = _("Backup '{backup}' has never run. Are you " \ "sure you want to delete the backup definition?").format(backup=name) if dlg.OkCancel(self, msg, _("Confirm Delete")) != wx.ID_OK: return delete_offsite_data = False with ProgressDialog(self, _("Deleting"), _("Deleting backup %s%s.\nPlease wait...") % (name, " and all offsite data" if delete_offsite_data else "")): self.delete_backup(name, delete_offsite_data) import time time.sleep(3) self.clear() self.state = ViewState app.broadcast_update() def show(self, name): try: backup = self.config.backups[name] self.state = ViewState self.show_backup(backup) except Exception as e: # Missing backup! dlg.Warn(self, _("The backup '{backup}' seems to be corrupt. {error}").format(backup=name, error=str(e))) # self.update_backup_list() # self.state = ViewState # self.clear() def show_backup(self, b): # General Information self.txtName.SetValue(b.name) self.lblName.SetLabel(b.name) self.chkActive.SetValue(b.active) # Folder Information self.txtFolders.Clear() self.txtFolders.AppendText("\n".join(b.include_folders)) self.chkPackages.SetValue(b.include_packages) # Exclusions self.lstExcludeTypes.SetCheckedStrings(b.exclude_types) self.txtExcludePatterns.Clear() self.txtExcludePatterns.AppendText("\n".join(b.exclude_patterns)) # Destination self.cboStore.SetStringSelection(b.store) self.chkEncrypt.SetValue(b.encrypt) self.chkVerify.SetValue(b.verify) # Schedule if b.sched_type == "custom": self.radSchedAdvanced.SetValue(True) incr, full = b.sched_times.split("\n") self.txtCronIncr.SetValue(incr) self.txtCronFull.SetValue(full) else: # itime, dummy = incr.split("/") # iday not used # ftime, fday = full.split("/") time, day = b.sched_times.split("/") if b.sched_type == "daily/weekly": self.radSchedDailyWeekly.SetValue(True) self.cboTime1.SetStringSelection(time) self.cboDay1.SetStringSelection(day) elif b.sched_type == "daily/monthly": self.radSchedDailyMonthly.SetValue(True) self.cboTime2.SetStringSelection(time) self.cboMonthDay2.SetStringSelection(day) elif b.sched_type == "hourly/weekly": self.radSchedHourlyWeekly.SetValue(True) self.cboTime3.SetStringSelection(time) self.cboDay3.SetStringSelection(day) elif b.sched_type == "none/daily": self.radSchedNoneDaily.SetValue(True) self.cboTime4.SetStringSelection(time) elif b.sched_type == "none/weekly": self.radSchedNoneWeekly.SetValue(True) self.cboDay5.SetStringSelection(day) self.cboTime5.SetStringSelection(time) else: raise Exception(_("This backup is corrupt. Invalid schedule type")) # Notifications self.chkNotifyMsg.SetValue(b.notify_msg) self.chkNotifyEmail.SetValue(b.notify_email) self.chkShutdown.SetValue(b.shutdown_after) self.update_state() def text_to_list(self, text): list = [item.strip() for item in text.split("\n") if len(item.strip()) > 0] return list def get_time_str(self, cronitem): hour = cronitem.hour().render() if not hour.isdigit(): hour = "19" if len(hour) == 1: hour = '0' + hour min = cronitem.minute().render() if not min.isdigit(): min = "00" if len(min) == 1: min = '0' + min time = "%s:%s" % (hour, min) return time def get_dow(self, cronitem): dow = cronitem.dow().render() if not dow.isdigit(): dow = "0" return int(dow) def get_dom(self, cronitem): dom = cronitem.dom().render() if not dom.isdigit(): dom = "0" return int(dom) def save(self): # BUILD THE BACKUP if len(self.txtName.GetValue()) == 0: raise Exception(_("Backup name cannot be blank")) if self.chkEncrypt.GetValue() and not self.config.data_passphrase: raise Exception(_("You cannot select encryption when the passphrase is blank (see Configuration page).")) if self.txtName.GetValue() == EmptyName: raise Exception(_("You need to provide a proper backup name")) try: # Create the new backup object b = Backup(self.txtName.GetValue()) # General Information b.active = self.chkActive.GetValue() # Folder Information b.include_folders = self.text_to_list(self.txtFolders.GetValue()) b.include_packages = self.chkPackages.GetValue() # Exclusions b.exclude_types = list(self.lstExcludeTypes.GetCheckedStrings()) # returns a tuple, convert to array b.exclude_patterns = self.text_to_list(self.txtExcludePatterns.GetValue()) # Destination b.store = self.cboStore.GetStringSelection() b.encrypt = self.chkEncrypt.GetValue() b.verify = self.chkVerify.GetValue() # Schedule if self.radSchedAdvanced.GetValue(): b.sched_type = "custom" b.sched_times = "%s\n%s" % (self.txtCronIncr.GetValue(), self.txtCronFull.GetValue()) else: if self.radSchedDailyWeekly.GetValue(): b.sched_type = "daily/weekly" time = self.cboTime1.GetStringSelection() day = self.cboDay1.GetStringSelection() elif self.radSchedDailyMonthly.GetValue(): b.sched_type = "daily/monthly" time = self.cboTime2.GetStringSelection() day = self.cboMonthDay2.GetStringSelection() elif self.radSchedHourlyWeekly.GetValue(): b.sched_type = "hourly/weekly" time = self.cboTime3.GetStringSelection() day = self.cboDay3.GetStringSelection() elif self.radSchedNoneDaily.GetValue(): b.sched_type = "none/daily" time = self.cboTime4.GetStringSelection() day = "*" elif self.radSchedNoneWeekly.GetValue(): b.sched_type = "none/weekly" time = self.cboTime5.GetStringSelection() day = self.cboDay5.GetStringSelection() else: raise Exception(_("Corrupt backup")) b.sched_times = time + "/" + day # Notifications b.notify_msg = self.chkNotifyMsg.GetValue() b.notify_email = self.chkNotifyEmail.GetValue() b.shutdown_after = self.chkShutdown.GetValue() b.check() except Exception as e: raise e if self.state == ViewState: # Delete the old name oldname = self.lstItems.GetStringSelection() try: del self.config.backups[oldname] except: pass self.config.backups[b.name] = b self.config.save() self.update_backup_list() # Attempt to save the crontab. If this fails, the backup was corrupt. # But it has been saved. So that is a problem update_crontab(self.config.backups) ######################################################################3 # # Misc Routines # ######################################################################3 def hour_min_from_str(self, str): hour, min = str.split(":") return int(hour), int(min) def delete_backup(self, name, delete_offsite_data): # Delete the database runs. backup = self.config.backups[name] # Read the runs dummy = self.db.runs(name) success = True try: if delete_offsite_data: wx.Yield() store = self.config.storage[backup.store].copy() store.delete_backup_data(name) wx.Yield() self.db.delete_backup(name) except: # Most likely this will happen with a corrupt backup object. # We dont want that corruption to stop the deletion. success = False # Now delete the configuration. wx.Yield() del self.config.backups[name] update_crontab(self.config.backups) self.config.save() self.update_backup_list() if not success: dlg.Warn(self, _("There were errors during the delete. You should check/delete the offsite store manually."), _("Error During Delete")) def history(self, default_name): # Open the file list window win = HistoryWindow(self, default_name) win.Show() def run_backup(self, backup_name): win = RunBackupWindow(self, backup_name) win.Show()
class Meta: database = DB.getConnection()
def test_objs_to_csv(mock_data_obj): retval = DB._objs_to_csv([mock_data_obj, mock_data_obj]) assert retval.read() == '1,2,3\n1,2,3\n'
def __init__(self, tmdb_api_key, db_name, db_username, db_password): tmdb.API_KEY = tmdb_api_key self.database = DB(db_username, db_password, db_name)
import ujson from lib.db import DB # parse input parameters parameters = {'zoneid': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None if parameters['zoneid'] is not None: cpDB = DB() response = cpDB.list_clients(parameters['zoneid']) else: response = [] # output result as plain text or json if parameters['output_type'] != 'json': heading = {'sessionId': 'sessionid', 'userName': '******', 'ipAddress': 'ip_address', 'macAddress': 'mac_address', 'total_bytes': 'total_bytes', 'idletime': 'idletime', 'totaltime': 'totaltime', 'acc_timeout': 'acc_session_timeout' }
class Monitor(object): """ Allows us to actively monitor certain coins on exchanges & updates values to database """ def __init__(self, metadata): self.db = DB() self.metadata = metadata #exchanges dict has {exchange_name: exchange_object} self.exchanges_dict = {} #init all exchanges we need for exchange_name, markets in metadata.items(): #init exchange & record print("Initializing exchange: " + str(exchange_name)) exchange = init_exchange(exchange_name) self.exchanges_dict[exchange_name] = exchange #create tables for exchange self.db.execute("CREATE TABLE IF NOT EXISTS " + exchange_name + "(datestamp TIMESTAMP, ask REAL, bid REAL, market VARCHAR(14), market_sym VARCHAR(14))") #hard code what columns we want to record #stage changes to DB; commit occurs in update_data def update_data_exchanges(self, exch_name, exch_obj, exch_markets): # for each market temp_store = [] for market in exch_markets: base_coin = market[0] quote_coin = market[1] curr_tries = 1 while (curr_tries <= max_tries): try: curr_data = exch_obj.grab_data(base_coin, quote_coin) break except Exception as e: curr_tries += 1 print("Failed to grab data for " + str(exch_name) + " " + str(market) + ". Error: " + str(e)) print(traceback.format_exc()) print("Attempt #: " + str(curr_tries)) temp_store.append((exch_name, curr_data)) return temp_store def post_data(self, temp_store): now = datetime.now() print(len(temp_store)) print(temp_store) for exch_name, curr_data in temp_store: insert_query = "INSERT INTO " + exch_name + " values ('{0}', {1}, {2}, '{3}', '{4}')".format(*curr_data) self.db.execute(insert_query) self.db.commit() end = datetime.now() print("Updated DB in " + str((end-now).total_seconds()) + " s." ) #update database for each exchange & market def update_data(self): total_markets = 0 # for each exchange # create methods dict to run in parallel methods_arr = [] for exch_name, exch_obj in self.exchanges_dict.items(): curr_markets = self.metadata[exch_name] methods_arr.append([self.update_data_exchanges, exch_name, exch_obj, curr_markets]) total_markets += len(curr_markets) #run all exchanges in parrallel temp_store = run_methods_parallel(methods_arr) #push data in temp store to database self.post_data(temp_store) return total_markets
from lib.ipfw import IPFW # parse input parameters parameters = {'sessionid': None, 'zoneid': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/' and param[1:] in parameters: current_param = param[1:].lower() elif current_param is not None: parameters[current_param] = param.strip() current_param = None # disconnect client response = {'terminateCause': 'UNKNOWN'} if parameters['sessionid'] is not None and parameters['zoneid'] is not None: cp_db = DB() # remove client client_session_info = cp_db.del_client(parameters['zoneid'], parameters['sessionid']) if client_session_info is not None: cpIPFW = IPFW() cpIPFW.delete(parameters['zoneid'], client_session_info['ip_address']) client_session_info['terminateCause'] = 'User-Request' response = client_session_info # output result as plain text or json if parameters['output_type'] != 'json': for item in response: print '%20s %s' % (item, response[item]) else: print(ujson.dumps(response))
def test_vacuum_analyze_table(): db = None with DB(DSN) as db: db.vacuum_analyze_table('foobar') db.cur.execute.assert_called_once_with('VACUUM ANALYZE foobar')
from lib.ipfw import IPFW # parse input parameters parameters = {'username': '', 'ip_address': None, 'zoneid': None, 'authenticated_via': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None # create new session if parameters['ip_address'] is not None and parameters['zoneid'] is not None: cpDB = DB() cpIPFW = IPFW() arp_entry = ARP().get_by_ipaddress(parameters['ip_address']) if arp_entry is not None: mac_address = arp_entry['mac'] else: mac_address = None response = cpDB.add_client(zoneid=parameters['zoneid'], authenticated_via=parameters['authenticated_via'], username=parameters['username'], ip_address=parameters['ip_address'], mac_address=mac_address ) # check if address is not already registered before adding it to the ipfw table if not cpIPFW.ip_or_net_in_table(table_number=parameters['zoneid'], address=parameters['ip_address']):
class CPBackgroundProcess(object): """ background process helper class """ def __init__(self): # open syslog and notice startup syslog.openlog('captiveportal', logoption=syslog.LOG_DAEMON, facility=syslog.LOG_LOCAL4) syslog.syslog(syslog.LOG_NOTICE, 'starting captiveportal background process') # handles to ipfw, arp the config and the internal administration self.ipfw = IPFW() self.arp = ARP() self.cnf = Config() self.db = DB() self._conf_zone_info = self.cnf.get_zones() def list_zone_ids(self): """ return zone numbers """ return self._conf_zone_info.keys() def initialize_fixed(self): """ initialize fixed ip / hosts per zone """ cpzones = self._conf_zone_info for zoneid in cpzones: for conf_section in ['allowedaddresses', 'allowedmacaddresses']: for address in cpzones[zoneid][conf_section]: if conf_section.find('mac') == -1: sessions = self.db.sessions_per_address( zoneid, ip_address=address) ip_address = address mac_address = None else: sessions = self.db.sessions_per_address( zoneid, mac_address=address) ip_address = None mac_address = address sessions_deleted = 0 for session in sessions: if session['authenticated_via'] not in ('---ip---', '---mac---'): sessions_deleted += 1 self.db.del_client(zoneid, session['sessionId']) if sessions_deleted == len(sessions) or len(sessions) == 0: # when there's no session active, add a new one # (only administrative, the sync process will add it if neccesary) if ip_address is not None: self.db.add_client(zoneid, "---ip---", "", ip_address, "") else: self.db.add_client(zoneid, "---mac---", "", "", mac_address) # cleanup removed static sessions for dbclient in self.db.list_clients(zoneid): if dbclient['authenticated_via'] == '---ip---' \ and dbclient['ipAddress'] not in cpzones[zoneid]['allowedaddresses']: self.ipfw.delete(zoneid, dbclient['ipAddress']) self.db.del_client(zoneid, dbclient['sessionId']) elif dbclient['authenticated_via'] == '---mac---' \ and dbclient['macAddress'] not in cpzones[zoneid]['allowedmacaddresses']: if dbclient['ipAddress'] != '': self.ipfw.delete(zoneid, dbclient['ipAddress']) self.db.del_client(zoneid, dbclient['sessionId']) def sync_zone(self, zoneid): """ Synchronize captiveportal zone. Handles timeouts and administrative changes to this zones sessions """ if zoneid in self._conf_zone_info: # fetch data for this zone cpzone_info = self._conf_zone_info[zoneid] registered_addresses = self.ipfw.list_table(zoneid) registered_addr_accounting = self.ipfw.list_accounting_info() expected_clients = self.db.list_clients(zoneid) concurrent_users = self.db.find_concurrent_user_sessions(zoneid) # handle connected clients, timeouts, address changes, etc. for db_client in expected_clients: # fetch ip address (or network) from database cpnet = db_client['ipAddress'].strip() # there are different reasons why a session should be removed, check for all reasons and # use the same method for the actual removal drop_session_reason = None # session cleanups, only for users not for static hosts/ranges. if db_client['authenticated_via'] not in ('---ip---', '---mac---'): # check if hardtimeout is set and overrun for this session if 'hardtimeout' in cpzone_info and str( cpzone_info['hardtimeout']).isdigit(): # hardtimeout should be set and we should have collected some session data from the client if int(cpzone_info['hardtimeout']) > 0 and float( db_client['startTime']) > 0: if (time.time() - float(db_client['startTime'])) / 60 > int( cpzone_info['hardtimeout']): drop_session_reason = "session %s hit hardtimeout" % db_client[ 'sessionId'] # check if idletimeout is set and overrun for this session if 'idletimeout' in cpzone_info and str( cpzone_info['idletimeout']).isdigit(): # idletimeout should be set and we should have collected some session data from the client if int(cpzone_info['idletimeout']) > 0 and float( db_client['last_accessed']) > 0: if (time.time() - float(db_client['last_accessed']) ) / 60 > int(cpzone_info['idletimeout']): drop_session_reason = "session %s hit idletimeout" % db_client[ 'sessionId'] # cleanup concurrent users if 'concurrentlogins' in cpzone_info and int( cpzone_info['concurrentlogins']) == 0: if db_client['sessionId'] in concurrent_users: drop_session_reason = "remove concurrent session %s" % db_client[ 'sessionId'] # if mac address changes, drop session. it's not the same client current_arp = self.arp.get_by_ipaddress(cpnet) if current_arp is not None and current_arp[ 'mac'] != db_client['macAddress']: drop_session_reason = "mac address changed for session %s" % db_client[ 'sessionId'] # session accounting if db_client['acc_session_timeout'] is not None \ and time.time() - float(db_client['startTime']) > db_client['acc_session_timeout']: drop_session_reason = "accounting limit reached for session %s" % db_client[ 'sessionId'] elif db_client['authenticated_via'] == '---mac---': # detect mac changes current_ip = self.arp.get_address_by_mac( db_client['macAddress']) if current_ip is not None: if db_client['ipAddress'] != '': # remove old ip self.ipfw.delete(zoneid, db_client['ipAddress']) self.db.update_client_ip(zoneid, db_client['sessionId'], current_ip) self.ipfw.add_to_table(zoneid, current_ip) self.ipfw.add_accounting(current_ip) # check session, if it should be active, validate its properties if drop_session_reason is None: # registered client, but not active according to ipfw (after reboot) if cpnet not in registered_addresses: self.ipfw.add_to_table(zoneid, cpnet) # is accounting rule still available? need to reapply after reload / reboot if cpnet not in registered_addr_accounting: self.ipfw.add_accounting(cpnet) else: # remove session syslog.syslog(syslog.LOG_NOTICE, drop_session_reason) self.ipfw.delete(zoneid, cpnet) self.db.del_client(zoneid, db_client['sessionId']) # if there are addresses/networks in the underlying ipfw table which are not in our administration, # remove them from ipfw. for registered_address in registered_addresses: address_active = False for db_client in expected_clients: if registered_address == db_client['ipAddress']: address_active = True break if not address_active: self.ipfw.delete(zoneid, registered_address)
class Verify(): def __init__(self, backup_name, run_date): ''' Verify a run to ensure it is valid ''' self.config = Config.get_config() self.backup = self.config.backups[backup_name] self.store = self.config.storage[self.backup.store].copy() self.db = DB() # Find the run runs = self.db.runs(self.backup.name, run_date) if len(runs) == 0: raise Exception(_("Verify failed: Backup run does not exist")) self.vrun = runs[0] def run(self): self.test_store() # Get config and packages self.fetch_config() # We only check the data if there is actually something stored there. if self.vrun.nfiles == 0 and self.vrun.nfolders == 0: return True self.prepare_input(self.vrun, self.backup, self.store) try: # Only check for tar data if there are files backed up # Otherwise the tar will simply return an error tarinfo = self.tarfile.next() while tarinfo: tarinfo = self.tarfile.next() finally: self.close_input(self.backup) store_size, store_hash, = self.store_thread.get_hash() run_hash = self.vrun.hash run_size = self.vrun.size if store_size == run_size and store_hash == run_hash: return True # print(store_size, store_hash, run_size, run_hash) raise Exception(_("Verify failed - Run data is corrupt")) def test_store(self): store = self.config.storage[self.store.name].copy() store.connect() try: store.test() finally: store.disconnect() def fetch_config(self): store = self.config.storage[self.store.name].copy() store.connect() try: encrypted = False config = os.path.join(self.vrun.folder, const.ConfigName) if not store.exists(config): encrypted = True config = config + const.EncryptionSuffix if not store.exists(config): raise Exception(_("Configuration file missing. Bad run")) store.get(config, os.path.join(tempfile.gettempdir(), "__vault__tmp__")) os.remove(os.path.join(tempfile.gettempdir(), "__vault__tmp__")) if self.backup.include_packages: packages = os.path.join(self.vrun.folder, const.PackageFile) if encrypted: packages = packages + const.EncryptionSuffix store.get(packages, os.path.join(tempfile.gettempdir(), "__vault__tmp__")) os.remove(os.path.join(tempfile.gettempdir(), "__vault__tmp__")) finally: store.disconnect() def prepare_input(self, run, backup, store): ''' Open the tar file. Connect the output of the tar to either: a) the storage handler b) to encryption (openssl), THEN the storage handler ''' log.trace("Setting up input processes") # Set up the encryptor (use TEE for now) self.crypt_proc = None if backup.encrypt: log.debug("Creating crypt objects") self.crypto = cryptor.DecryptStream(self.config.data_passphrase) else: self.crypto = cryptor.Buffer() # Set up the storage handler log.debug("Starting storage thread") self.store_thread = StreamIn(self.crypto, store, run.folder) self.store_thread.start() log.debug("Connecting tar object") self.tarfile = tarfile.open(mode="r|gz", fileobj=self.crypto, bufsize=const.BufferSize) log.trace("Completed input preparation") def close_input(self, backup): log.trace("Closing output managers") # If we are using an external save command, we do nothing here try: self.tarfile.close() self.crypto.close() # Now we are ready to wait for the storage. self.store_thread.join() if self.store_thread.error: log.error("Closing store. Got error", str(self.store_thread.error)) # self.db.save_message("Error saving backup: %s" % str(self.store_thread.error)) raise self.store_thread.error finally: pass log.debug("All input closed")
'authenticated_via': None, 'output_type': 'plain', 'sessionid': '' } current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None # create new session if parameters['ip_address'] is not None and parameters['zoneid'] is not None: cpDB = DB() cpIPFW = IPFW() arp_entry = ARP().get_by_ipaddress(parameters['ip_address']) if arp_entry is not None: mac_address = arp_entry['mac'] else: mac_address = None response = cpDB.add_client( zoneid=parameters['zoneid'], authenticated_via=parameters['authenticated_via'], username=parameters['username'], ip_address=parameters['ip_address'], mac_address=mac_address, sessionid=parameters['sessionid']) # check if address is not already registered before adding it to the ipfw table
def setUpClass(cls): cls.db = DB() cls.excel = Excel("加油卡完整用例.xls")
def unBanUser(self, user): return DB().unbanUser(user, self)
sploitfile = webserver + localpath + filename command8 = "touch " + sploitfile system(command8) command9 = "chmod 777 " + sploitfile system(command9) results = webserver + localpath + "/results" command8 = "touch " + results system(command8) command9 = "chmod 777 " + results system(command9) sploitfiletext = "<?php\necho \"You Got Phished!\";\n$agent = $_SERVER['HTTP_USER_AGENT'];\n$page = " + x + ";\n$thing = $page . \" \" . $agent;\n$file = results;\n$current = file_get_contents($file);\n$current .= $thing . \"\\n\";\nfile_put_contents($file, $current);\n?>" SPLOITFILE = open(sploitfile, 'w') SPLOITFILE.write(sploitfiletext) SPLOITFILE.close() modem = 1 db = DB(config=config) db.query("SELECT path from modems where id=%s", (modem,)) path2 = db.fetchone()[0].replace('"', '') print path2 db.query("SELECT controlkey from modems where id=%s", (modem,)) key2 = db.fetchone()[0] print key2 db.query("SELECT type from modems where id=%s", (modem,)) modemtype2 = db.fetchone()[0] control = webserver + path2 + "/getfunc" sleep(5) with open(control, 'w') as f: msg = "This is a cool page: " command2 = key2 + " " + "SEND" + " " + x + " " + msg + link
def muteUser(self, user, time): return DB().muteUser(user, self, time)
class ConfigPanel(gui.ConfigPanel): ''' classdocs ''' def __init__(self, parent): ''' Constructor ''' log.info("***ConfigPanel.init") gui.ConfigPanel.__init__(self, parent) self.config = Config.get_config() self.db = DB() self.state = ViewState self.update_filetype_list() self.clear_filetype() self.image = wx.Bitmap(os.path.join(const.PixmapDir, "configure.png")) self.title = _("Configuration") if self.lstFileTypes.GetCount() > 0: self.lstFileTypes.SetSelection(0) self.onFileType(None) self.show_mail() self.txtMailServer.SetFocus() self.nb_config.SetSelection(0) self.show_security() self.pwd_hidden = True self.mail_hidden = True log.trace("Done ConfigPanel.init") def update_data(self): pass def update_filetype_list(self): self.lstFileTypes.Clear() self.lstFileTypes.AppendItems(self.config.file_types.keys()) ######################################################################3 # # FILE TYPE EVENTS # ######################################################################3 def onSaveTypes(self, event): self.save_filetype() self.state = ViewState self.update_state() self.lstFileTypes.SetStringSelection(self.txtName.GetValue()) def onFileType(self, event): # Get the name to be showed name = self.lstFileTypes.GetStringSelection() if len(name) == 0: return # Load it try: list = self.config.file_types[name] self.state = ViewState self.show_filetype(name, list) except Exception: # Missing backup! dlg.Warn(self, _("That File Type seems to be missing or corrupt.")) self.update_filetype_list() self.state = ViewState self.clear_filetype() return def onDelete(self, event): # Get the name to be showed name = self.lstFileTypes.GetStringSelection() if len(name) == 0: return if dlg.OkCancel(self, _("Delete File Type definition %s and all its data! Are you sure?") % name) == wx.ID_OK: self.delete_filetype(name) self.clear_filetype() self.state = ViewState def onNew(self, event): log.info("New!") self.state = NewState self.clear_filetype() self.txtName.SetFocus() def onName(self, event): self.update_state() def onSSL(self, event): if self.chkMailSSL.GetValue(): self.txtMailPort.SetValue("465") else: self.txtMailPort.SetValue("25") ######################################################################3 # # EMAIL EVENTS # ######################################################################3 def onHideMailPassword(self, event): if self.mail_hidden: self.txtMailPassword.SetWindowStyle(wx.NORMAL) self.mail_hidden = False self.btnHideMailPassword.SetLabel("Hide") else: self.txtMailPassword.SetWindowStyle(wx.TE_PASSWORD) self.mail_hidden = True self.btnHideMailPassword.SetLabel("Show") def onMailAuth(self, event): auth = self.chkMailAuth.GetValue() self.txtMailLogin.Enable(auth) self.txtMailPassword.Enable(auth) def onMailSave(self, event): self.config.mail_server = self.txtMailServer.GetValue() self.config.mail_port = self.txtMailPort.GetValue() self.config.mail_ssl = self.chkMailSSL.GetValue() self.config.mail_auth = self.chkMailAuth.GetValue() self.config.mail_login = self.txtMailLogin.GetValue() self.config.mail_password = self.txtMailPassword.GetValue() self.config.mail_from = self.txtMailFrom.GetValue() self.config.mail_to = self.txtMailTo.GetValue() self.config.save() def onMailTest(self, event): try: if not self.txtMailServer.GetValue() \ or not self.txtMailFrom.GetValue() \ or not self.txtMailTo.GetValue(): raise Exception(_("Mail server, from address and to address are required.")) with ProgressDialog(self, _("Sending"), _("Sending a test email.\nPlease wait...")): import time time.sleep(1) log.debug("Doing send") sendemail.sendemail2(self.txtMailServer.GetValue(), int(self.txtMailPort.GetValue()), self.chkMailSSL.GetValue(), self.txtMailFrom.GetValue(), self.txtMailTo.GetValue(), self.chkMailAuth.GetValue(), self.txtMailLogin.GetValue(), self.txtMailPassword.GetValue(), _('The Vault Backup System - Test Message'), _("This is a test message from The Vault Backup System.\n" "If you have received this, then email is correctly configured.")) dlg.Info(self, _("Mail was sent successfully. Please check it arrived.")) except Exception as e: dlg.Warn(self, str(e)) def show_mail(self): self.txtMailServer.SetValue(self.config.mail_server) self.txtMailPort.SetValue(str(self.config.mail_port)) self.chkMailSSL.SetValue(self.config.mail_ssl) self.chkMailAuth.SetValue(self.config.mail_auth) self.txtMailLogin.SetValue(self.config.mail_login) self.txtMailPassword.SetValue(self.config.mail_password) self.txtMailFrom.SetValue(self.config.mail_from) self.txtMailTo.SetValue(self.config.mail_to) self.onMailAuth(None) ######################################################################3 # # Security EVENTS # ######################################################################3 def onHidePassword(self, event): if self.pwd_hidden: self.txtMasterPassword.SetWindowStyle(wx.NORMAL) self.pwd_hidden = False self.btnHidePassword.SetLabel("Hide") else: self.txtMasterPassword.SetWindowStyle(wx.TE_PASSWORD) self.pwd_hidden = True self.btnHidePassword.SetLabel("Show") def show_security(self): if not self.config.data_passphrase: self.txtMasterPassword.SetValue("") else: self.txtMasterPassword.SetValue(self.config.data_passphrase) self.onMasterPasswordChar(None) def onMasterPasswordChar(self, event): """Recalculate entropy any time the password changes.""" pwd = self.txtMasterPassword.GetValue() e = int(cryptor.entropy(pwd)) if e < 0: e = 0 if e > 100: e = 100 self.strength.SetValue(e) if event: event.Skip() def onSavePassword(self, event): pwd = self.txtMasterPassword.GetValue() if pwd != self.config.data_passphrase: # Password has changed. Do we have any stored backups? # If so, they should be deleted. runs = self.db.runs() num_runs = len(runs) if num_runs > 0: size = 0 for run in runs: size += run.size # Check with the user. msg = _("You current have {numruns} backup runs stored, " \ "totalling {size} of remote data.\n" \ "Changing the Master Password means old encrypted backups cannot be used.\n" \ "Note that they can be kept for disaster recovery if needed,\n" \ "but we suggest you simply start fresh.").format(\ numruns=num_runs, size=utils.readable_form(size)) mbox = OptionDialog(self, msg, _("Delete Backup Runs"), _("Also delete all encrypted backup data stored remotely."), default=True) if mbox.ShowModal() != wx.ID_OK: return delete_offsite_data = mbox.chkOption.GetValue() # TODO skip if no runs # We keep track of all errors errors = "" with ProgressDialog(self, _("Deleting"), _("Deleting old encrypted backup data.\nPlease wait...")): for backup in self.config.backups.itervalues(): # If its encrypted if backup.encrypt: # If the option set - delete all offline data at the store if delete_offsite_data: try: # Get the list of unique stores used by runs of this backup runs = self.db.runs(backup.name) stores = set([r.store for r in runs]) # Get the store and delete all data. for storename in stores: store = self.config.storage[storename].copy() store.delete_backup_data(backup.name) except Exception as e: errors += "\nDelete offline data for %s failed: %s" % (backup.name, str(e)) # Now delete the database records of the run abd backup try: self.db.delete_backup(backup.name) except Exception as e: errors += "\nDelete local backup information for %s failed: " % (backup.name, str(e)) if len(errors) > 0: dlg.Error(self, errors) if not pwd: self.config.data_passphrase = None app.show_message('Password cleared') else: self.config.data_passphrase = pwd app.show_message('Password set') self.config.save() # Now delete all the backups and offsite data. ###################################################################### # # Save and Load # ###################################################################### def update_state(self): if self.state == ViewState: self.lblName.Show(True) self.txtName.Show(False) if self.state == NewState: self.lblName.Show(False) self.txtName.Show(True) #self.pnlDetails.Fit() self.pnlDetails.Refresh() def clear_filetype(self): self.show_filetype("<name>", []) def show_filetype(self, name, list): try: # General Information self.txtName.SetValue(name) self.lblName.SetLabel(name) # TODO! self.txtExtensions.Clear() list.sort() self.txtExtensions.AppendText("\n".join(list)) self.update_state() except Exception as e: log.error("Error showing File Type:", str(e)) def save_filetype(self): # BUILD THE Storage if len(self.txtName.GetValue()) == 0: dlg.Warn(self, _("File Type name cannot be blank")) return list = self.txtExtensions.GetValue().split("\n") try: # Create the new file_type object name = self.txtName.GetValue() # We already have list from above # ensure the list is clean cleanlist = [] for item in list: item = item.strip() while len(item) > 0 and item[0] == ".": item = item[1:] if len(item) == 0: continue if item not in cleanlist: cleanlist.append(item) cleanlist.sort() except Exception as e: dlg.Warn(self, str(e)) return if self.state == ViewState: # Delete the old name oldname = self.lstFileTypes.GetStringSelection() try: del self.config.file_types[oldname] except: pass self.config.file_types[name] = cleanlist self.config.save() self.update_filetype_list() self.show_filetype(name, cleanlist) ######################################################################3 # # Misc Routines # ######################################################################3 def delete_filetype(self, name): del self.config.file_types[name] self.config.save() self.update_filetype_list()
def unMuteUser(self, user): return DB().unmuteUser(user, self)
catcommand2 = "mv hold1 " + fullpath5 system(catcommand) if line == "": pass else: print line + "\n" if select.select([sys.stdin, ], [], [], 0.5)[0]: foo = sys.stdin().readline().strip() if foo == "exit": break elif comm.lower == "sms": command = agentkey + " " + "PORT" + " " + port1 + " " + foo db = DB(config=config) db.query("SELECT path from modems where id=" + modem) path2 = db.fetchone()[0] db.query("SELECT controlkey from modems where id=" + modem) key2 = db.fetchone()[0] number2 = agentnumber db.query("SELECT type from modems where id=" + modem) modemtype2 = db.fetchone()[0] if modemtype2 == "usb": usb = serial.serialposix(port='/dev/ttyUSB2', baudrate=115200, bytesize=8, parity='N', stopbits=1) usb.write("ATZ\r\n") sleep(1) line = read_modem(usb)
def refreshInfo(self): self.db = DB().getConversationInfo(peer=self.id)
class LongRunTestCase(unittest.TestCase): def setUp(self): self.config = Config.get_config() self.db = DB() self.db.check_upgrade() self.mark_db_ids() self.test_folder = tempfile.mkdtemp() self.files_folder = os.path.join(self.test_folder, "files") self.store_folder = os.path.join(self.test_folder, "store") self.restore_folder = os.path.join(self.test_folder, "restore") utils.makedirs(self.files_folder) utils.makedirs(self.store_folder) utils.makedirs(self.restore_folder) # Build the base set of files with open(os.path.join(self.files_folder, "base"), "w") as f: f.write("base") with open(os.path.join(self.files_folder, "incr"), "w") as f: f.write("0") config_file = os.path.expanduser("~/.vault") if not os.path.exists(config_file): raise Exception("Vault test configuration file (~/.vault) does not exist") self.store_config = ConfigParser.RawConfigParser() self.store_config.read(config_file) # FOLDER STORE self.store = FolderStore("teststore", "50MB", True, self.store_folder) # DROPBOX STORE # self.login = self.store_config.get("DropBox", "login") # self.password = self.store_config.get("DropBox", "password") # self.folder = self.store_config.get("DropBox", "folder") # self.app_key = self.store_config.get("DropBox", "app_key") # self.app_secret_key = self.store_config.get("DropBox", "app_secret_key") # self.store = DropBoxStore("teststore", 0, False, self.folder, self.login, self.password, # self.app_key, self.app_secret_key) # S3 STORE # self.key = self.store_config.get("Amazon", "aws_access_key_id") # self.secret_key = self.store_config.get("Amazon", "aws_secret_access_key") # self.bucket = self.store_config.get("Amazon", "bucket") # self.store = S3Store("teststore", 0, False, bucket=self.bucket, key=self.key, secret_key=self.secret_key) # Now record the existance of this store self.config.storage[self.store.name] = self.store # Build the backup object (dont save config) self.backup = Backup("testbackup") self.backup.include_folders = [self.files_folder] self.backup.store = self.store.name self.backup.notify_msg = False self.old_pass = self.config.data_passphrase self.config.data_passphrase = "goofy" self.backup.encrypt = True self.config.backups[self.backup.name] = self.backup # build an options object for use with the backup self.options = BlankClass() self.options.dry_run = False self.options.message = False self.options.email = False self.options.shutdown = False self.options.norecurse = False # How many cycles? self.cycles = 20 def tearDown(self): self.config.data_passphrase = self.old_pass # Remove all DB records created during this test self.clean_db() shutil.rmtree(self.test_folder) self.assertFalse(os.path.isdir(self.test_folder)) def testLongRun(self): # Run a full backup b = Run(self.backup.name, const.FullBackup, self.options) b.run() for cycle in xrange(self.cycles): print(str(cycle)+"\r") time.sleep(1) # Change some files with open(os.path.join(self.files_folder, "incr"), "w") as f: f.write(os.urandom(100)) with open(os.path.join(self.files_folder, str(cycle)), "w") as f: f.write(os.urandom(100)) # Run an incr backup b = Run(self.backup.name, const.IncrBackup, self.options) b.run() # Attempt to restore every file r = Restore(self.restore_folder, [self.files_folder], datetime.now(), self.options) r.run() # Lets break it # os.remove(os.path.join(self.restore_folder, self.files_folder[1:], "1")) # with open(os.path.join(self.files_folder, "incr"), "w") as f: # f.write("-1") # with open(os.path.join(self.restore_folder, self.files_folder[1:], "8"), "w") as f: # f.write("-1") # Check that the restored folder and original folder are identical left = unicode(self.files_folder) right = unicode(os.path.join(self.restore_folder, self.files_folder[1:])) d = utils.dircmp(left, right) self.assertEqual(d.left_only, set()) self.assertEqual(d.right_only, set()) self.assertEqual(d.diff_files, set()) self.assertTrue(len(d.same_files) > 0) # Check that all files are in the DB for folder, _, local_files in os.walk(self.files_folder): for file in local_files: path = os.path.join(file, folder) # This will raise an exception if it does not exist self.db.select_path(path, build=False) ############################################################################ # # Utility Routines # ############################################################################ def mark_db_ids(self): self.max_fs_id = self.db.query("select max(fs_id) from fs", ())[0][0] if self.max_fs_id is None: self.max_fs_id = 0 self.max_version_id = self.db.query("select max(version_id) from versions", ())[0][0] if self.max_version_id is None: self.max_version_id = 0 self.max_run_id = self.db.query("select max(run_id) from runs", ())[0][0] if self.max_run_id is None: self.max_run_id = 0 self.max_message_id = self.db.query("select max(message_id) from messages", ())[0][0] if self.max_message_id is None: self.max_message_id = 0 def clean_db(self): self.db.execute("delete from messages where message_id > ?", (self.max_message_id,)) self.db.execute("delete from versions where version_id > ?", (self.max_version_id,)) self.db.execute("delete from fs where fs_id > ?", (self.max_fs_id,)) self.db.execute("delete from runs where run_id > ?", (self.max_run_id,))
def add(self, type): return DB().addGroup(peer=self.id, type=type)
import ujson from lib.db import DB # parse input parameters parameters = {'zoneid': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None if parameters['zoneid'] is not None: cpDB = DB() response = cpDB.list_clients(parameters['zoneid']) else: response = [] # output result as plain text or json if parameters['output_type'] != 'json': heading = {'sessionId': 'sessionid', 'userName': '******', 'ipAddress': 'ip_address', 'macAddress': 'mac_address', 'total_bytes': 'total_bytes', 'idletime': 'idletime', 'totaltime': 'totaltime' } print '%(sessionId)-30s %(userName)-20s %(ipAddress)-20s %(macAddress)-20s '\
class CPBackgroundProcess(object): """ background process helper class """ def __init__(self): # open syslog and notice startup syslog.openlog('captiveportal', logoption=syslog.LOG_DAEMON) syslog.syslog(syslog.LOG_NOTICE, 'starting captiveportal background process') # handles to ipfw, arp the config and the internal administration self.ipfw = IPFW() self.arp = ARP() self.cnf = Config() self.db = DB() self._conf_zone_info = self.cnf.get_zones() def list_zone_ids(self): """ return zone numbers """ return self._conf_zone_info.keys() def initialize_fixed(self): """ initialize fixed ip / hosts per zone """ cpzones = self._conf_zone_info for zoneid in cpzones: for conf_section in ['allowedaddresses', 'allowedmacaddresses']: for address in cpzones[zoneid][conf_section]: if conf_section.find('mac') == -1: sessions = self.db.sessions_per_address(zoneid, ip_address=address) ip_address = address mac_address = None else: sessions = self.db.sessions_per_address(zoneid, mac_address=address) ip_address = None mac_address = address sessions_deleted = 0 for session in sessions: if session['authenticated_via'] not in ('---ip---', '---mac---'): sessions_deleted += 1 self.db.del_client(zoneid, session['sessionId']) if sessions_deleted == len(sessions) or len(sessions) == 0: # when there's no session active, add a new one # (only administrative, the sync process will add it if neccesary) if ip_address is not None: self.db.add_client(zoneid, "---ip---", "", ip_address, "") else: self.db.add_client(zoneid, "---mac---", "", "", mac_address) # cleanup removed static sessions for dbclient in self.db.list_clients(zoneid): if dbclient['authenticated_via'] == '---ip---' \ and dbclient['ipAddress'] not in cpzones[zoneid]['allowedaddresses']: self.ipfw.delete(zoneid, dbclient['ipAddress']) self.db.del_client(zoneid, dbclient['sessionId']) elif dbclient['authenticated_via'] == '---mac---' \ and dbclient['macAddress'] not in cpzones[zoneid]['allowedmacaddresses']: if dbclient['ipAddress'] != '': self.ipfw.delete(zoneid, dbclient['ipAddress']) self.db.del_client(zoneid, dbclient['sessionId']) def sync_zone(self, zoneid): """ Synchronize captiveportal zone. Handles timeouts and administrative changes to this zones sessions """ if zoneid in self._conf_zone_info: # fetch data for this zone cpzone_info = self._conf_zone_info[zoneid] registered_addresses = self.ipfw.list_table(zoneid) registered_addr_accounting = self.ipfw.list_accounting_info() expected_clients = self.db.list_clients(zoneid) concurrent_users = self.db.find_concurrent_user_sessions(zoneid) # handle connected clients, timeouts, address changes, etc. for db_client in expected_clients: # fetch ip address (or network) from database cpnet = db_client['ipAddress'].strip() # there are different reasons why a session should be removed, check for all reasons and # use the same method for the actual removal drop_session_reason = None # session cleanups, only for users not for static hosts/ranges. if db_client['authenticated_via'] not in ('---ip---', '---mac---'): # check if hardtimeout is set and overrun for this session if 'hardtimeout' in cpzone_info and str(cpzone_info['hardtimeout']).isdigit(): # hardtimeout should be set and we should have collected some session data from the client if int(cpzone_info['hardtimeout']) > 0 and float(db_client['startTime']) > 0: if (time.time() - float(db_client['startTime'])) / 60 > int(cpzone_info['hardtimeout']): drop_session_reason = "session %s hit hardtimeout" % db_client['sessionId'] # check if idletimeout is set and overrun for this session if 'idletimeout' in cpzone_info and str(cpzone_info['idletimeout']).isdigit(): # idletimeout should be set and we should have collected some session data from the client if int(cpzone_info['idletimeout']) > 0 and float(db_client['last_accessed']) > 0: if (time.time() - float(db_client['last_accessed'])) / 60 > int(cpzone_info['idletimeout']): drop_session_reason = "session %s hit idletimeout" % db_client['sessionId'] # cleanup concurrent users if 'concurrentlogins' in cpzone_info and int(cpzone_info['concurrentlogins']) == 0: if db_client['sessionId'] in concurrent_users: drop_session_reason = "remove concurrent session %s" % db_client['sessionId'] # if mac address changes, drop session. it's not the same client current_arp = self.arp.get_by_ipaddress(cpnet) if current_arp is not None and current_arp['mac'] != db_client['macAddress']: drop_session_reason = "mac address changed for session %s" % db_client['sessionId'] elif db_client['authenticated_via'] == '---mac---': # detect mac changes current_ip = self.arp.get_address_by_mac(db_client['macAddress']) if current_ip != None: if db_client['ipAddress'] != '': # remove old ip self.ipfw.delete(zoneid, db_client['ipAddress']) self.db.update_client_ip(zoneid, db_client['sessionId'], current_ip) self.ipfw.add_to_table(zoneid, current_ip) self.ipfw.add_accounting(current_ip) # check session, if it should be active, validate its properties if drop_session_reason is None: # registered client, but not active according to ipfw (after reboot) if cpnet not in registered_addresses: self.ipfw.add_to_table(zoneid, cpnet) # is accounting rule still available? need to reapply after reload / reboot if cpnet not in registered_addr_accounting: self.ipfw.add_accounting(cpnet) else: # remove session syslog.syslog(syslog.LOG_NOTICE, drop_session_reason) self.ipfw.delete(zoneid, cpnet) self.db.del_client(zoneid, db_client['sessionId']) # if there are addresses/networks in the underlying ipfw table which are not in our administration, # remove them from ipfw. for registered_address in registered_addresses: address_active = False for db_client in expected_clients: if registered_address == db_client['ipAddress']: address_active = True break if not address_active: self.ipfw.delete(zoneid, registered_address)
def main(): scrapperFactory = ScrapperFactory() cfg = Config("conf/scrapper.conf") if (cfg.readConfig() == False): print( "[Main - readConfig] [Error reading config file]" ) return if (cfg.createLog() == False): print( "[Main - createLog] [Error creating log files]" ) return cfg.log.info( "[ Main - readConfig ] [ Config file read sucessfully ]" ) cfg.log.info( "[ Main - createLog ] [ Log files created successfully ]" ) cfg.log.info( "[ Scrapper ] - [ This is Major Tom to Ground Control! ]" ) cfg.log.info( "[ Scrapper ] - [ Here are my orders ]" ) for website in cfg.websitesConf: cfg.log.info( "[ Scrapper ] - [ Website -> \"{}\" ]".format( website ) ) for section in cfg.websitesConf[website]: cfg.log.info( "[ Scrapper ] - [ \tSection -> \"{}\" ]".format( section ) ) cfg.log.info( "[ Scrapper ] - [ \t\turl -> \"{}\" ]".format( cfg.websitesConf[website][section]["url"] ) ) cfg.log.info( "[ Scrapper ] - [ \t\tslug -> \"{}\" ]".format( cfg.websitesConf[website][section]["slug"] ) ) db = DB(cfg) wpinfo = { "website" : cfg.wphost, "user" : cfg.wpuser, "pass" : cfg.wppass } if db.hasError(): cfg.log.info( "[ Scrapper ] - [ Conection to database has failed ]".format( website ) ) cfg.log.error( "[ Scrapper ] - [ Conection to database has failed ]".format( website ) ) return for website in cfg.websitesConf: cfg.log.info( "[ Scrapper ] - [ Looking for \"{}\" sections ]".format( website ) ) for section in cfg.websitesConf[website]: # table name and type are the same as section name cfg.log.info( "[ Scrapper ] - [ Accesing to \"{}\" table ]".format( section ) ) db.createTableIfNotExist( section ) cfg.log.info( "[ Scrapper ] - [ Calling \"{}\" scrapper ]".format( section ) ) #factory -> ( type, db, wpinfo, table, url, slug, log ) scrapperInstance = scrapperFactory.factory( section, db, wpinfo, section, cfg.websitesConf[website][section]["url"], cfg.websitesConf[website][section]["slug"], cfg.log ) cfg.log.info( "[ Scrapper ] - [ \"{}\" scrapper begins ]".format( section ) ) scrapperInstance.scrape() cfg.log.info( "[ Scrapper ] - [ \"{}\" scrapper has finished ]".format( section ) ) scrappedItems = scrapperInstance.numberOfItems() cfg.log.info( "[ Scrapper ] - [ \"{}\" scrapper has scrapped {} new items ]".format( section, scrappedItems ) ) if scrappedItems: cfg.log.info( "[ Scrapper \"{}\" ] - [ storing items into db ]".format( section ) ) scrapperInstance.addItemsToMysql() cfg.log.info( "[ Scrapper \"{}\" ] - [ writting articles into Wordpress ]".format( section ) ) scrapperInstance.addItemsToWordpress() cfg.log.info( "[ Scrapper ] - [ Bye ]" ) scrapperInstance.addItemsToWordpress()
import sys import logging import time from lib.geocoder import Geocoder from lib.db import DB logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='./log/location_geocoder.log', filemode='w') conf_file = 'data/geocoder.conf' geocoder = Geocoder(logging, conf_file) db = DB(logging) while True: users = db.get_geocoding_target_users(100) if len(users) == 0: logging.info("All users in DB are geocoded.") time.sleep(10*60) continue for user in users: if user['location_property'] == '': db.update_user_by_geocoded_info(user['id'], -1) logging.info("Cannot geocoded %s", user['id']) time.sleep(10) continue locations = geocoder.get(user['location_property'])
POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------------- update (or add) client/session restrictions """ import sys import ujson from lib.db import DB parameters = {'zoneid': '', 'sessionid': None, 'session_timeout': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/' and param[1:] in parameters: current_param = param[1:].lower() elif current_param is not None: parameters[current_param] = param.strip() current_param = None response = dict() if parameters['zoneid'] is not None and parameters['sessionid'] is not None: db = DB() response['response'] = db.update_session_restrictions(parameters['zoneid'], parameters['sessionid'], parameters['session_timeout']) # output result as plain text or json if parameters['output_type'] != 'json': for item in response: print '%20s %s' % (item, response[item]) else: print(ujson.dumps(response))
class Tester(): def __init__(self, test_folder, options): self.test_folder = os.path.join(test_folder, "tester") self.options = options self.config = Config.get_config() self.store_folder = os.path.join(self.test_folder, "stores") self.files_folder = os.path.join(self.test_folder, "files") self.restore_folder = os.path.join(self.test_folder, "restore") self.db = DB() self.max_fs_id = self.db.query("select max(fs_id) from fs", ())[0][0] if self.max_fs_id is None: self.max_fs_id = 0 self.max_version_id = self.db.query("select max(version_id) from versions", ())[0][0] if self.max_version_id is None: self.max_version_id = 0 self.max_run_id = self.db.query("select max(run_id) from runs", ())[0][0] if self.max_run_id is None: self.max_run_id = 0 self.max_message_id = self.db.query("select max(message_id) from messages", ())[0][0] if self.max_message_id is None: self.max_message_id = 0 log.debug("MAX IDs", self.max_fs_id, self.max_version_id, self.max_run_id, self.max_message_id) self.teststring1 = os.urandom(204800) self.teststring2 = os.urandom(204800) def run(self): try: self.simpleCycleTest() self.restoreTest() except: pass if self.options.message: try: from lib.dlg import Notify Notify(const.AppTitle, "Test run is complete") except: log.debug("Unable to notify. No-one logged in?") if self.options.email: self.send_email(True) if self.options.shutdown: os.system("shutdown -P +2") def simpleCycleTest(self): try: # build the test files/folders self.make_folders() # fill the files folder with the initial set of files self.fill_files() # Build a backup and store self.build_config() # self.run_test() for i in xrange(int(self.options.cycles)): log.info("Cycle", i) # Run a full and one incremental self.run_cycle_test() except Exception as e: log.error("Test run failed: %s" % str(e)) finally: self.cleanup() log.info("********Success") def restoreTest(self): try: # build the test files/folders self.make_folders() # fill the files folder with the initial set of files self.fill_files() except: pass # def build_config(self): # log.trace("build_config") # try: # if self.options.store: # store = self.config.storage[self.options.store] # else: # # Make the store about 3x the options size # s, dummy, dummy = utils.from_readable_form(self.options.size) # store_size = utils.readable_form(s * 3) # store = FolderStore("teststore1", store_size, True, os.path.join(self.store_folder, "teststore1")) # self.config.storage[store.name] = store # # backup = Backup("testbackup1") # backup.include_folders = [self.files_folder] # backup.store = store.name # backup.notify_msg = False # self.config.backups[backup.name] = backup # # # # Run full backup # # change some files # # Run incremental backup # # restore as at full date # # restore as at incr date # # # except Exception as e: # log.error("Test run failed: %s" % str(e)) # finally: # self.cleanup() log.info("********Success") def send_email(self, result, error_message=None): ''' Send a message to the appropriate users. If result is False (failure) then error message will contain the reason. @param result: @param error_message: ''' log.debug("send_email: ", result, error_message) if result: message_text = "Test to folder %s completed." % (self.test_folder) subject = "Test Completed" else: message_text = "Test FAILED\n\nERROR: %s" % (error_message) subject = "Test FAILED" log.debug("Starting mail send") try: sendemail.sendemail(subject, message_text) except Exception as e: self.db.save_message("Unable to email results: %s" % str(e)) log.trace("send_email completed") def run_cycle_test(self): options = BlankClass() options.dry_run = False options.message = False options.email = False options.shutdown = False options.norecurse = False # Run a full backup b = Run("testbackup1", const.FullBackup, options) b.run() # Run a full backup b = Run("testbackup2", const.FullBackup, options) b.run() # Now restore two files, one that will be on each store. restore_file1 = os.path.join(self.files_folder, "dir1", "f2.mp3") dest_file1 = os.path.join(self.restore_folder, restore_file1[1:]) restore_file2 = os.path.join(self.files_folder, "dir2", "f3.exe") dest_file2 = os.path.join(self.restore_folder, restore_file2[1:]) restore_file3 = os.path.join(self.files_folder, "dir3", "f4.txt") dest_file3 = os.path.join(self.restore_folder, restore_file3[1:]) r = Restore(self.restore_folder, [restore_file1, restore_file2, restore_file3], datetime.now(), options) r.run() for path in [dest_file1, dest_file2, dest_file3]: if not os.path.exists(path): raise Exception("File %s was not restored" % path) if open(path).read() != self.teststring1: raise Exception("Restored file contents incorrect %s" % path) os.remove(path) # Make sure the store is the right size for name in self.config.storage: store = self.config.storage[name].copy() size, used, avail = store.current_usage() log.debug("Store", store.name, "size", size, "used", used, "avail", avail) if store.auto_manage and used > size: raise Exception("Store %s has grown too large" % store.name) ######################PART 2 #wait a little time.sleep(1.1) for path in [restore_file1, restore_file2, restore_file3]: # Make a change with open(path, "w") as f: f.write(self.teststring2) #wait a little time.sleep(1.1) # Run an incremental backup b = Run("testbackup1", const.IncrBackup, options) b.run() # Run an incremental backup b = Run("testbackup2", const.IncrBackup, options) b.run() time.sleep(1.1) r = Restore(self.restore_folder, [restore_file1, restore_file2, restore_file3], datetime.now(), options) r.run() for path in [dest_file1, dest_file2, dest_file3]: if not os.path.exists(path): raise Exception("File %s was not restored after INCR %s" % path) if open(path).read() != self.teststring2: raise Exception("Restored file contents incorrect after INCR %s" % path) # raise Exception("Test Failure") # Make sure the store is the right size for name in self.config.storage: store = self.config.storage[name].copy() size, used, avail = store.current_usage() log.debug("Store", store.name, "size", size, "used", used) if store.auto_manage and used > size: raise Exception("Store %s has grown too large" % store.name) time.sleep(1.1) # change it back for path in [restore_file1, restore_file2, restore_file3]: with open(path, "w") as f: f.write(self.teststring1) def build_config(self): log.trace("build_config") #store1 = FTPStore("teststore1", "4MB", True, "localhost", "store1", "ftpuser", "ftpuserX9", False) #store2 = FTPStore("teststore2", "4MB", True, "localhost", "store2", "ftpuser", "ftpuserX9", False) if self.options.store: store1 = self.config.storage[self.options.store].copy() store2 = store1 else: # Make the store about 3x the options size s, dummy, dummy = utils.from_readable_form(self.options.size) store_size = utils.readable_form(s * 3) store1 = FolderStore("teststore1", store_size, True, os.path.join(self.store_folder, "teststore1")) store2 = FolderStore("teststore2", store_size, True, os.path.join(self.store_folder, "teststore2")) self.config.storage[store1.name] = store1 self.config.storage[store2.name] = store2 backup1 = Backup("testbackup1") backup1.include_folders = [self.files_folder] backup1.include_packages = True backup1.exclude_types = ["Music"] backup1.exclude_patterns = [] backup1.store = store1.name backup1.notify_msg = False self.config.backups[backup1.name] = backup1 backup2 = Backup("testbackup2") backup2.include_folders = [self.files_folder] backup2.include_packages = True backup2.exclude_patterns = [] backup1.exclude_types = ["Videos", "Programs"] backup2.store = store2.name backup2.notify_msg = False self.config.backups[backup2.name] = backup2 def fill_files(self, remaining=None, root=None): log.trace("fill_files") if not remaining: # First time in... remaining, dummy, dummy = utils.from_readable_form(self.options.size) root = self.files_folder list = [root] done = False while not done: newlist = [] for folder in list: for dname in ["dir1", "dir2", "dir3"]: path = os.path.join(folder, dname) utils.makedirs(path) newlist.append(path) for fname in ["f1.avi", "f2.mp3", "f3.exe", "f4.txt"]: path = os.path.join(folder, fname) with open(path, "w") as f: f.write(self.teststring1) remaining -= len(self.teststring1) if remaining < 0: done = True break list = newlist return def make_folders(self): log.trace("make_folders") if not os.path.isdir(self.test_folder): os.makedirs(self.test_folder) # if not os.path.isdir(self.store_folder): # os.makedirs(self.store_folder) if not os.path.isdir(self.files_folder): os.makedirs(self.files_folder) if not os.path.isdir(self.restore_folder): os.makedirs(self.restore_folder) def cleanup(self): log.trace("Cleanup") self.db.execute("delete from messages where message_id > ?", (self.max_message_id,)) self.db.execute("delete from versions where version_id > ?", (self.max_version_id,)) self.db.execute("delete from fs where fs_id > ?", (self.max_fs_id,)) self.db.execute("delete from runs where run_id > ?", (self.max_run_id,)) if self.options.store: stores = self.options.store else: stores = ["teststore1", "teststore2"] for name in stores: log.info("Cleaning up", name) store = self.config.storage[name].copy() store.connect() store.delete_store_data() store.disconnect() if not self.options.store: del self.config.storage["teststore1"] del self.config.storage["teststore2"] del self.config.backups["testbackup1"] del self.config.backups["testbackup2"] shutil.rmtree(self.test_folder)
class BaseCrawler(metaclass=ABCMeta): """ 만약 posts_soup 와 soup(post_soup) 를 분리해서 사용하고 싶다면 set_posts_soup 에서 self.soup = ... -> self.posts_soup = ... 이후 self.posts_soup 로 get_posts에서 독립적인 사용이 가능하다. """ def __init__( self, table: NamedTuple, brand_name: str, main_url: str, base_page_url: Optional[str] = "", model_date_format: str = "%Y-%m-%d", encoding: Optional[str] = None, custom_config: Config = None, ): self.brand_name = brand_name if custom_config is None: self.config = Config() else: self.config = custom_config self.init_logger() self.base_page_url = base_page_url self.curr_ctgr_url = None self.curr_post_url = None self.table = table if self.config.is_test is True: self.table_name = 'test.' + self.table.__name__ else: self.table_name = 'public.' + self.table.__name__ self.conn = DB(table=table, table_name=self.table_name) self.encoding = encoding self.session = RetrySession(encoding=encoding) self.main_url = main_url self.model_date_format = model_date_format self.soup = None self.posts_soup = None self.metas = {} if not self.config.is_valid(): raise ValueError(f"Invalid Config: \n {self.config.__repr__()}") def __getstate__(self): d = self.__dict__.copy() if "logger" in d: d["logger"] = d["logger"].name return d def __setstate__(self, d): if "logger" in d: d["logger"] = getLogger(d["logger"]) self.__dict__.update(d) # must add add_query as 'where brand_name' for unique together news_id, brand_name def get_exist_ids(self): rows = self.conn.select( fields=["news_id"], table=self.table_name, add_query=f" WHERE media_name='{self.brand_name}'", ) return set(map(lambda r: int(r[0]), rows)) # ======================== FLOW ============================= def go(self): self.logger.info( f"{self.brand_name} Crawler Start at {datetime.now()}") self.page_parse() def page_parse(self) -> None: datas = [] stop_crawling = False for page in self.gen_pages(): if stop_crawling: break self.curr_ctgr_url = page self.set_posts_soup(page) posts = self.get_posts() self.logger.debug( f"Num of Crawled Posts: {len(posts)} in Page: {page} ") for url in posts: self.curr_post_url = url try: data: NamedTuple = self.detail_parse() except Exception as e: self.logger.error( f"Error During Detail Parse \nURL: {url} \nDetail: {e}" ) continue # 현재 크롤링한 아이템의 날짜가 limit date를 넘으면 저장하지 않고 멈춤 if self.get_date() < self.config.end_date: stop_crawling = True break datas.append(tuple(data)) self.save(datas=datas) @retry(8, 3) def set_soup(self, url: str) -> None: try: res = self.session.get(url) self.soup = BeautifulSoup(res, "html.parser") except TypeError: self.soup = BeautifulSoup(res.text, "html.parser") @retry(5, 3) def set_posts_soup(self, url: str) -> None: self.set_soup(url) def set_metas(self) -> None: metas = {} for m in self.soup.find_all("meta"): if not isinstance(m, Tag): continue k = m.get("property", m.get("name", m.get('http-equiv', None))) v = m.get("content", None) if k is not None and v is not None: metas[k] = v self.metas = metas @staticmethod def get_id(url: str) -> str: return re.findall("\d+", url)[-1] def data_cleaning(self, datas: List[Tuple]) -> List[Tuple]: result = [] id_idx = 1 unique_ids = set(map(lambda d: d[id_idx], datas)) exist_ids = self.get_exist_ids() new_ids = unique_ids.difference(exist_ids) for d in datas: new_id = d[id_idx] if new_id in new_ids: result.append(d) new_ids.remove(new_id) return result def save(self, datas: List[Tuple]): self.logger.info( f"{self.brand_name} Crawler Finished at {datetime.now()}") self.logger.debug(f"Length Of Datas Before Cleaning: {len(datas)}") clean_datas = self.data_cleaning(datas) self.logger.debug( f"Length Of Datas After Cleaning: {len(clean_datas)}") if self.config.is_test is True: self.save_datas_as_pickle(clean_datas) assert self.table_name == f'test.{self.table.__name__}' self.conn.insert_magazine(clean_datas) self.logger.debug(f"Insert To Data to DB \n {self.conn.__dict__}") def detail_parse(self): post_url = self.curr_post_url self.set_soup(post_url) self.set_metas() return self.table( self.brand_name, int(self.get_id(post_url)), self.get_date(), self.get_title(), self.get_content(), self.curr_post_url, self.get_keywords(), self.get_post_type(), ) def gen_pages(self) -> Generator: page = 0 while True: page += 1 yield self.base_page_url + str(page) @abstractmethod def get_date(self) -> date: pass @abstractmethod def get_posts(self) -> List[str]: pass @abstractmethod def get_title(self) -> str: pass @abstractmethod def get_content(self) -> str: pass @abstractmethod def get_post_type(self) -> Optional[str]: pass @abstractmethod def get_keywords(self) -> str: pass # ======================== FLOW END ============================= # ======================== Utils[Optional] ====================== def save_datas_as_pickle(self, datas): pkl_save_path = f"{self.config.TESTFILE_SAVE_PATH}/" Path(pkl_save_path).mkdir(parents=True, exist_ok=True) with open(f"{pkl_save_path}/{self.brand_name}.pkl", "wb") as f: pickle.dump(datas, f) @staticmethod def clean_date_txt(txt): return txt.lower() @staticmethod def get_clean_txt(txt): return "".join(txt.split()) @check_return(date) def extract_time(self, text: str) -> Optional[date]: clean_txt = self.clean_date_txt(text) date_time = next(datefinder.find_dates(clean_txt), None) if date_time is None: return None return date_time.date() def furbish_link(self, link: str, prefix=None) -> str: if link.startswith("//"): link = "http:" + link if prefix is not None and isinstance(prefix, str): link = prefix + link if "http" not in link or link.startswith("/"): link = urljoin(self.main_url, link) return link def get_links(self, attrs) -> Dict[str, str]: # attrs: Dictionary or has values() method suspectors = ["href", "src", "ec-data-src"] links = defaultdict(str) for s in suspectors: link = attrs.get(s, None) if link is not None and isinstance(link, str): links[s] = self.furbish_link(link) return links @staticmethod def attrs_to_text(attrs) -> str: # attrs: Dictionary or has values() method all_hints = [] for v in attrs.values(): if isinstance(v, list): all_hints.append(" ".join(v)) else: all_hints.append(v) return " ".join(all_hints) def init_logger(self): if self.config is None or self.brand_name is None: raise SyntaxError( "Must be Called After Ininitalize !Config and brand_name!") l_path = self.config.LOG_SAVE_PATH if not os.path.exists(l_path): os.makedirs(l_path, exist_ok=True) self.logger = named_logger(f"{opjoin(l_path, self.brand_name)}.log", "crawl_logger")
class Workload(QtGui.QMainWindow): def __init__(self,app,onlyDBAccess=False): '''main window init''' QtGui.QMainWindow.__init__(self) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.db = DB(self) self.settings=Settings(self) self.onlyDBAccess=onlyDBAccess if onlyDBAccess: return #GUI setting guiSettings(self) connectSignals(self) changeStyle(self) self.taskOpened = False self.app = app loadContexts(self) self.currentContext = self.settings.getInitContext() selectCurrentContext(self) self.loadTasksList(init=True) self.tray=Trayicon(self) self.timer=TaskReminder(self) self.shortcuts=ShortcutsHandler(self,self.settings['keyMainWindowToggle']) finalizeInit(self) self.translate() self.adjustHeight(init=True) def resizeEvent(self,e): path=QtGui.QPainterPath() rect=e.size() path.addRoundedRect(-1,-1,rect.width()+1,rect.height()+1,7,7) region=QtGui.QRegion(path.toFillPolygon().toPolygon()) self.setMask(region) def taskListFocusIn(self,e): if e.reason()==QtCore.Qt.FocusReason.TabFocusReason: try: item=self.ui.taskList.itemAt(0) self.ui.taskList.setCurrentItem(self.ui.taskList.itemAt(0)) except: pass def toggle(self): if self.isVisible(): self.hide() else: self.show() def dropTask(self,e): Task.dropTask(self, e) def resizeColumns(self): self.ui.taskList.setColumnWidth(0, 20) self.ui.taskList.setColumnWidth(1, 20) self.ui.taskList.hideColumn(0) def setMarker(self,tasks): icon=QtGui.QIcon(':res/status/clock.png') items=self.ui.taskList.findItems("",QtCore.Qt.MatchContains|QtCore.Qt.MatchRecursive) for i in items: removeicon=QtGui.QIcon() i.setIcon(2,removeicon) for i in tasks: for j in items: if j.data(0,32)==i: j.setIcon(2,icon) def drawRow(self,painter,myopt,index): myopt.decorationPosition=QtGui.QStyleOptionViewItem.Right myopt.decorationAlignment=QtCore.Qt.AlignCenter QtGui.QTreeWidget.drawRow(self.ui.taskList,painter,myopt,index) # TASKS RELATED ACTIONS def addTask(self): t = self.ui.taskInput.text().strip() if t =="": return False self.ui.taskInput.clear() priority = 0 try: if t[1] == ":": priority = int(t[0]) if priority<6: t = t[2:] else: priority = 0 elif t[-2] == ":": priority = int(t[-1]) if priority<6: t = t[:-2] else: priority = 0 except: pass #TODO: create new function to handle input (regexp etc) if len(t)>50: taskname=t[:50] taskDescription=t else: taskname=t taskDescription="" if self.checkIfExist(taskname) is not True: duedate=self.defaultDueDate() taskid = self.db.addTask(taskname,priority, taskDescription, duedate, self.currentContext) self.createTaskItem(taskname, taskid, priority) self.adjustHeight() self.ui.statusbar.showMessage(QtGui.QApplication.translate("ui","New task created."),3300) else: self.ui.taskInput.setText(taskname) self.taskAlreadyExistMsg() def defaultDueDate(self): if self.settings["defaultDueDateOn"]: dueValue=int(self.settings["defaultDueDateValue"]) if self.settings["defaultDueDateUnit"]=="0": td=datetime.timedelta(hours=dueValue) else: td=datetime.timedelta(days=dueValue) return timestamp(datetime.datetime.now()+td) else: return None def createTaskItem(self, t, taskid=None, priority=0): item = QtGui.QTreeWidgetItem([str(priority),"", t]) item.setData(0, 32, taskid) item.setSizeHint(1, QtCore.QSize(0, 22)) self.ui.taskList.addTopLevelItem(item) self.setPriorityColor(item, priority) self.ui.taskList.sortItems(0,QtCore.Qt.AscendingOrder) def checkIfExist(self,t): if len(self.ui.taskList.findItems(t,QtCore.Qt.MatchFlags(QtCore.Qt.MatchExactly),2))>0: return True def taskAlreadyExistMsg(self,parent=None): text=QtGui.QApplication.translate("ui","Task with same name already exist, choose another") windowtitle=QtGui.QApplication.translate("ui","Task name already exists") msgWindow=QtGui.QMessageBox() if parent is not None: self=parent msgWindow.information(self, windowtitle, text, buttons=QtGui.QMessageBox.Ok ) def loadTasksList(self, archived=False,init=False): self.ui.taskList.clear() for i in self.db.getTasks(self.currentContext): self.createTaskItem(i[1], i[0],i[2]) def deleteSelectedTasks(self, force=False): selectedItems = self.ui.taskList.selectedItems() if len(selectedItems)>0: tasks = [] for item in selectedItems: tasks.append(item) windowtitle=QtGui.QApplication.translate("ui","Delete task") text=QtGui.QApplication.translate("ui","Do you really want to delete selected task(s) ?") if force: self.deleteTasks(tasks) windowtitle=QtGui.QApplication.translate("ui","Delete task") text=QtGui.QApplication.translate("ui","Do you really want to delete selected task(s) ?") elif self.questionPopup(windowtitle,text): self.deleteTasks(tasks) self.adjustHeight(downSize=True) def deleteTasks(self, tasks): for item in tasks: self.db.deleteTask(item.data(0, 32)) index = self.ui.taskList.indexOfTopLevelItem(item) self.ui.taskList.takeTopLevelItem(index) self.ui.statusbar.showMessage(QtGui.QApplication.translate("ui","Task removed."),3300) def setTaskPriority(self,priority): selectedItems = self.ui.taskList.selectedItems() for item in selectedItems: self.db.setTaskPriority(item.data(0, 32),priority) self.setPriorityColor(item, priority) item.setText(0,str(priority)) self.ui.taskList.sortItems(0,QtCore.Qt.AscendingOrder) self.ui.statusbar.showMessage(QtGui.QApplication.translate("ui","Priority updated."),3300) def setPriorityColor(self,item,priority): icon=QtGui.QIcon(':res/status/'+str(priority)+'.png') item.setIcon(1,icon) def openTask(self,taskname=None): if not self.taskOpened: item = self.getSelectedItem() if item: Task(self,item.data(0, 32)) def getSelectedItem(self): selectedItems = self.ui.taskList.selectedItems() if len(selectedItems) == 1: item = self.ui.taskList.selectedItems()[0] return item else: return False # SHORTCUTS AND KEYBOARD EVENTS RELATED ACTIONS def getKeysOnList(self, e): if e.key() == 16777223: # delete if (QtCore.Qt.ShiftModifier & e.modifiers()): self.deleteSelectedTasks(True) elif e.key()>48 and e.key()<54: self.setTaskPriority(e.key()-48) elif e.key()==78: self.ui.taskInput.setFocus() else: QtGui.QTreeWidget.keyPressEvent(self.ui.taskList,e) def getKeysOnInput(self, e): if e.key()==16777221 or e.key()==16777220: # enter/return if (QtCore.Qt.AltModifier & e.modifiers()): self.createTask() else: self.addTask() else: QtGui.QLineEdit.keyPressEvent(self.ui.taskInput,e) input=self.ui.taskInput.text() if len(input)>50: taskname=input[:50].strip() taskname=taskname.replace("\r\n","\n") taskname=taskname.replace("\n"," ") taskname=taskname.replace("\t"," ") taskname=taskname.replace(" ","") description=input Task(self,taskid=0,taskname=taskname,description=description) self.ui.taskInput.clear() #ADDITIONAL FUNTIONS def questionPopup(self, title, msg): window=QtGui.QMessageBox() window.setWindowFlags(QtCore.Qt.FramelessWindowHint) resp = window.question(self, title, msg, buttons=QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel) if resp == QtGui.QMessageBox.Ok: return True else: return False #WINDOWS MOVEMENT def mouseMoveEvent(self, e): if e.buttons() & QtCore.Qt.LeftButton: try: self.posx self.posy except: self.posx=e.x() self.posy=e.y() y=e.globalY()-self.posy x=e.globalX()-self.posx self.move(x,y) #e.accept() def mouseReleaseEvent(self, e): try: del(self.posx) del(self.posy) except: pass def adjustHeight(self,downSize=False,init=False): tasks=self.db.getTasks(self.currentContext) if len(tasks)>0: item=self.ui.taskList.topLevelItem(0) qmodel=self.ui.taskList.indexFromItem(item) taskheight=self.ui.taskList.rowHeight(qmodel) else: taskheight=22 if init: winheight=320 listheight=252 else: winheight=self.height() listheight=self.ui.taskList.height() desiredHeight=taskheight*len(tasks)+winheight-listheight+6 if ( desiredHeight>self.height() or downSize ) and desiredHeight<QtGui.QApplication.desktop().height(): self.resize(self.width(),desiredHeight) def closeEvent(self, e=None): self.hide() if e: e.ignore() ###### MENU FUNCTIONS def importTasklist(self): fname=QtGui.QFileDialog.getOpenFileName(self, QtGui.QApplication.translate("ui","Open"), "", QtGui.QApplication.translate("ui","Workload Import File (*.w)")) if fname[0]: filename=fname[0] from lib import importexport importexport.importTasks(self,filename,self.settings["dateFormat"]) def exportTaskList(self): fname=QtGui.QFileDialog.getSaveFileName(self,QtGui.QApplication.translate("ui","Save"), "", QtGui.QApplication.translate("ui","Workload Export File (*.w)")) if fname[0]: includeArchive=self.questionPopup(QtGui.QApplication.translate("ui","Exporting tasks"), QtGui.QApplication.translate("ui","Do you want to include completed tasks?")) tasks=self.db.exportTasks(self.currentContext, includeArchive) from lib import importexport filename=fname[0] importexport.export(tasks, filename,self.settings["dateFormat"]) def about(self): f=open("about.html") text=f.read() f.close() QtGui.QMessageBox.information(self, QtGui.QApplication.translate("ui","About"), text, buttons=QtGui.QMessageBox.Ok ) def exit(self,exitcode=0): exit_=False if self.settings["askOnExit"]: if self.questionPopup(QtGui.QApplication.translate("ui","Exit"), QtGui.QApplication.translate("ui","Are you sure?")): exit_=True else: exit_=True if exit_==True: self.settings.setCurrentContextAsLast() self.shortcuts.terminate() self.app.exit(exitcode) def createTask(self): Task(self,taskid=0) def completeTasks(self): if self.ui.taskList.hasFocus(): tasks=self.ui.taskList.selectedItems() for i in tasks: self.db.completeTask(i.data(0,32)) index = self.ui.taskList.indexOfTopLevelItem(i) self.ui.taskList.takeTopLevelItem(index) self.ui.statusbar.showMessage(QtGui.QApplication.translate("ui","Task completed."),3300) def showHistory(self): ArchiveWindow(self) def hoyKeyError(self): QtGui.QMessageBox.critical(self,QtGui.QApplication.translate("ui","Error"),QtGui.QApplication.translate("ui","Unable to register global shortcut")) def changeEvent(self, e): if e.type()==QtCore.QEvent.LanguageChange: self.ui.retranslateUi(self) self.ui.statusbar.showMessage(QtGui.QApplication.translate("ui","Hello! Ready to work ;-)"),3600) loadContexts(self) selectCurrentContext(self) QtGui.QMainWindow.changeEvent(self,e) def translate(self,lang=None): if lang is None: lang= self.settings["lang"] translate=True if lang=="auto": locale = QtCore.QLocale.system().name() elif lang=="en": translate=False try: self.app.removeTranslator(self.qtTranslator) except: pass else: locale=lang if translate: self.qtTranslator = QtCore.QTranslator() self.qtTranslator.load("i18n"+os.sep+"workload_"+locale+".qm") self.app.installTranslator(self.qtTranslator) def showMsg(self,msg): msgWindow=QtGui.QMessageBox() msgWindow.information(self, "Workload - Information", msg, buttons=QtGui.QMessageBox.Ok )
from flask import Flask, request, Response, jsonify from lib.etc import key_from, gen_settings, gen_configurations from lib.db import DB from lib.modelWrapper import ModelWrapper import random import json import os from flask_cors import CORS, cross_origin app = Flask(__name__) CORS(app, resources={r"/api/*": {"origins": "*"}}) db = DB() modelWrapper = ModelWrapper(db) @app.route('/api/entry/<string:environment>/<int:num_sensors>/<int:len_sensors>/<int:fov>', methods=['GET']) def getEntry(environment, num_sensors, len_sensors, fov): settings = gen_settings(num_sensors, len_sensors, fov) key = key_from(environment, settings) entry = db.get_entry(key) if entry and entry["solved"]: resp = Response(json.dumps({"status": "model found", "entry": entry})) return resp if entry and not entry["solved"]: resp = Response(json.dumps({"status": "found but not solved"})) return resp
def poll_pull_requests(api): __log.info("looking for PRs") db = DB.get_instance() try: db.query(""" CREATE TABLE IF NOT EXISTS meritocracy_mentioned ( id INTEGER PRIMARY KEY, commit_hash VARCHAR(40) ) """) except: __log.exception("Failed to create meritocracy mentioned DB table") # get voting window voting_window = gh.voting.get_initial_voting_window() # get all ready prs (disregarding of the voting window) prs = gh.prs.get_ready_prs(api, settings.URN, 0) # This sets up a voting record, with each user having a count of votes # that they have cast. try: fp = open('server/voters.json', 'x') fp.close() except: # file already exists, which is what we want pass with open('server/voters.json', 'r+') as fp: total_votes = {} fs = fp.read() if fs: total_votes = json.loads(fs) top_contributors = sorted(gh.repos.get_contributors(api, settings.URN), key=lambda user: user["total"], reverse=True) top_contributors = [item["author"]["login"].lower() for item in top_contributors] contributors = set(top_contributors) # store it while it's still a complete list top_contributors = top_contributors[:settings.MERITOCRACY_TOP_CONTRIBUTORS] top_contributors = set(top_contributors) top_voters = sorted(total_votes, key=total_votes.get, reverse=True) top_voters = set([user.lower() for user in top_voters[:settings.MERITOCRACY_TOP_VOTERS]]) meritocracy = top_voters | top_contributors __log.info("generated meritocracy: " + str(meritocracy)) with open('server/meritocracy.json', 'w') as mfp: json.dump(list(meritocracy), mfp) needs_update = False for pr in prs: pr_num = pr["number"] __log.info("processing PR #%d", pr_num) # gather all current votes votes, meritocracy_satisfied = gh.voting.get_votes(api, settings.URN, pr, meritocracy) # is our PR approved or rejected? vote_total, variance = gh.voting.get_vote_sum(api, votes, contributors) threshold = gh.voting.get_approval_threshold(api, settings.URN) is_approved = vote_total >= threshold and meritocracy_satisfied # the PR is mitigated or the threshold is not reached ? if variance >= threshold or not is_approved: voting_window = gh.voting.get_extended_voting_window(api, settings.URN) if vote_total >= threshold / 2: # check if we need to mention the meritocracy try: commit = pr["head"]["sha"] if not db.query("SELECT * FROM meritocracy_mentioned WHERE commit_hash=?", (commit,)): db.query("INSERT INTO meritocracy_mentioned (commit_hash) VALUES (?)", (commit,)) gh.comments.leave_meritocracy_comment(api, settings.URN, pr["number"], meritocracy) except: __log.exception("Failed to process meritocracy mention") # is our PR in voting window? in_window = gh.prs.is_pr_in_voting_window(api, pr, voting_window) if is_approved: __log.info("PR %d status: will be approved", pr_num) gh.prs.post_accepted_status( api, settings.URN, pr, voting_window, votes, vote_total, threshold, meritocracy_satisfied) if in_window: __log.info("PR %d approved for merging!", pr_num) try: sha = gh.prs.merge_pr(api, settings.URN, pr, votes, vote_total, threshold, meritocracy_satisfied) # some error, like suddenly there's a merge conflict, or some # new commits were introduced between finding this ready pr and # merging it except gh.exceptions.CouldntMerge: __log.info("couldn't merge PR %d for some reason, skipping", pr_num) gh.issues.label_issue(api, settings.URN, pr_num, ["can't merge"]) continue gh.comments.leave_accept_comment( api, settings.URN, pr_num, sha, votes, vote_total, threshold, meritocracy_satisfied) gh.issues.label_issue(api, settings.URN, pr_num, ["accepted"]) # chaosbot rewards merge owners with a follow pr_owner = pr["user"]["login"] gh.users.follow_user(api, pr_owner) needs_update = True else: __log.info("PR %d status: will be rejected", pr_num) if in_window: gh.prs.post_rejected_status( api, settings.URN, pr, voting_window, votes, vote_total, threshold, meritocracy_satisfied) __log.info("PR %d rejected, closing", pr_num) gh.comments.leave_reject_comment( api, settings.URN, pr_num, votes, vote_total, threshold, meritocracy_satisfied) gh.issues.label_issue(api, settings.URN, pr_num, ["rejected"]) gh.prs.close_pr(api, settings.URN, pr) elif vote_total < 0: gh.prs.post_rejected_status( api, settings.URN, pr, voting_window, votes, vote_total, threshold, meritocracy_satisfied) else: gh.prs.post_pending_status( api, settings.URN, pr, voting_window, votes, vote_total, threshold, meritocracy_satisfied) for user in votes: if user in total_votes: total_votes[user] += 1 else: total_votes[user] = 1 if fs: # prepare for overwriting fp.seek(0) fp.truncate() json.dump(total_votes, fp) # flush all buffers because we might restart, which could cause a crash os.fsync(fp) # we approved a PR, restart if needs_update: __log.info("updating code and requirements and restarting self") startup_path = join(THIS_DIR, "..", "startup.sh") # before we exec, we need to flush i/o buffers so we don't lose logs or voters sys.stdout.flush() sys.stderr.flush() os.execl(startup_path, startup_path) __log.info("Waiting %d seconds until next scheduled PR polling event", settings.PULL_REQUEST_POLLING_INTERVAL_SECONDS)
# parse input parameters parameters = {"sessionid": None, "zoneid": None, "output_type": "plain"} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == "/": current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None # disconnect client response = {"terminateCause": "UNKNOWN"} if parameters["sessionid"] is not None and parameters["zoneid"] is not None: cp_db = DB() # remove client client_session_info = cp_db.del_client(parameters["zoneid"], parameters["sessionid"]) if client_session_info is not None: cpIPFW = IPFW() cpIPFW.delete_from_table(parameters["zoneid"], client_session_info["ip_address"]) client_session_info["terminateCause"] = "User-Request" response = client_session_info # output result as plain text or json if parameters["output_type"] != "json": for item in response: print "%20s %s" % (item, response[item]) else: print (ujson.dumps(response))
def editType(self, type): return DB().editTypeGroup(self, type)
def setUpClass(cls): cls.db = DB(db_config)
def activateSilenceMode(self, text): DB().editModeGroup(self, 1) DB().addTextInSilenceMode(self, text)
def deactivate(self): return DB().editStatusGroup(self, 0)
def setUpClass(cls) -> None: cls.data_list = excel_to_list('test_user_data.xlsx', 'TestUserReg') cls.db = DB()
def __init__(self, obj): self.id = obj['message']['peer_id'] self.db = DB().getConversationInfo(peer=self.id) if self.db: self.db = self.db[0]
def __init__(self, dsn: str): self.database = DB(dsn, is_async=True) self.notice_queue = Queue() self.oid = None self.executor = None self.port = None