def incomplete(): log("incomplete:") active_nzbname_list, nzbname_list = nzbname_lists() nzoid_history_list = [x[1] for x in nzbname_list if x[1] is not None] for row in active_nzbname_list: url = "&nzoid=" + str(row[1]) + "&nzbname=" + utils.quote_plus(row[0]) +\ "&nzoidhistory_list=" + utils.quote_plus(';'.join(nzoid_history_list)) +\ "&folder=" + utils.quote_plus(row[0]) info = nfo.ReadNfoLabels(utils.join(INCOMPLETE_FOLDER, row[0])) info.info_labels['title'] = "Active - " + info.info_labels['title'] add_posts(info.info_labels, url, MODE_INCOMPLETE_LIST, info.thumbnail, info.fanart) for row in nzbname_list: if row[1]: url = "&nzoidhistory=" + str(row[1]) + "&nzbname=" + utils.quote_plus(row[0]) +\ "&nzoidhistory_list=" + utils.quote_plus(';'.join(nzoid_history_list)) +\ "&folder=" + utils.quote_plus(row[0]) info = nfo.ReadNfoLabels(utils.join(INCOMPLETE_FOLDER, row[0])) add_posts(info.info_labels, url, MODE_INCOMPLETE_LIST, info.thumbnail, info.fanart) else: # Clean out a failed SABnzbd folder removal utils.dir_exists(utils.join(INCOMPLETE_FOLDER, row[0]), None) xbmcplugin.setContent(HANDLE, 'movies') xbmcplugin.endOfDirectory(HANDLE, succeeded=True, cacheToDisc=True) return
def init(dirs, archived=True): statuses, msgs = [], [] if not dir_exists("logs"): os.makedirs("logs") db = connect() clean_dir = dirs["to"] ref_dir = dirs["ref"] archived_dir = dirs["old"] if dir_empty(dirs["to"]): msg = "No files found found in {}. Aborting.".format(dirs["to"]) # logger.critical(msg) return False, msg if archived is True: if not dir_exists(archived): os.makedirs(archived) # dump(archived_dir) dump_and_drop(archived_dir) insert_datasets(clean_dir, True) # print(status, msg) # statuses.append(status) # msgs.append(msg) insert_students(clean_dir, ref_dir) # msgs.append(msg) # statuses.append(status) insert_path(ref_dir) # statuses.append(status) # msgs.append(msg) # print(status, msg) return True, ""
def scan_library(self, my_status=None, ignorecache=False): # Check if operation is supported by the API if not self.mediainfo.get("can_play"): raise utils.EngineError("Operation not supported by current site or mediatype.") if not self.config["searchdir"]: raise utils.EngineError("Media directory is not set.") if not utils.dir_exists(self.config["searchdir"]): raise utils.EngineError("The set media directory doesn't exist.") t = time.time() library = {} library_cache = self.data_handler.library_cache_get() if not my_status: my_status = self.mediainfo["status_start"] self.msg.info(self.name, "Scanning local library...") tracker_list = self._get_tracker_list(my_status) # Do a full listing of the media directory for fullpath, filename in utils.regex_find_videos("mkv|mp4|avi", self.config["searchdir"]): show_id = None if not ignorecache and filename in library_cache.keys(): # If the filename was already seen before # use the cached information, if there's no information (None) # then it means it doesn't correspond to any show in the list # and can be safely skipped. if library_cache[filename]: show_id = library_cache[filename][0] show_ep = library_cache[filename][1] else: continue else: # If the filename has not been seen, extract # the information from the filename and do a fuzzy search # on the user's list. Cache the information. # If it fails, cache it as None. aie = extras.AnimeInfoExtractor.AnimeInfoExtractor(filename) (show_title, show_ep) = (aie.getName(), aie.getEpisode()) if show_title: show = utils.guess_show(show_title, tracker_list) if show: show_id = show["id"] library_cache[filename] = (show["id"], show_ep) else: library_cache[filename] = None else: library_cache[filename] = None # After we got our information, add it to our library if show_id: if show_id not in library.keys(): library[show_id] = {} library[show_id][show_ep] = fullpath self.msg.debug(self.name, "Time: %s" % (time.time() - t)) self.data_handler.library_save(library) self.data_handler.library_cache_save(library_cache) return library
def create_virtualenv(): """Creates the virtualenv.""" ve_dir = cget("virtualenv_dir") bin_path = pjoin(ve_dir, "bin") if not dir_exists(bin_path) or not exists(pjoin(bin_path, "activate")): show(yellow("Setting up new Virtualenv in: %s"), ve_dir) with settings(hide("stdout", "running")): run("virtualenv --distribute %s" % ve_dir)
def create_virtualenv(): """Creates the virtualenv.""" user = cget("user") ve_dir = cget("virtualenv_dir") bin_path = pjoin(ve_dir, "bin") if not dir_exists(bin_path) or not exists(pjoin(bin_path, "activate")): show(yellow("Setting up new Virtualenv in: %s"), ve_dir) with settings(hide("stdout", "running"), sudo_prefix=SUDO_PREFIX): sudo("virtualenv --distribute %s" % ve_dir, user=user)
def prepare_dirs(): dir_list = [] for d in dirs_to_make: if not dir_exists(d): dir_list.append(d) if dir_list: run("mkdir {0}".format(" ".join(['%s' % dir_ for dir_ in dir_list]))) with cd('/home/pi'): run('ln -sf /media/RaspiHD/ .')
def deploy_ssh(remove_banner=True): with settings(warn_only=True): # Remove annoying Debian banner if remove_banner: run('touch $HOME/.hushlogin') # Add our public keys if not dir_exists('~/.ssh'): for host in env.hosts: local('ssh-copy-id %s' % host)
def incomplete(): log("incomplete:") active_nzbname_list = [] m_nzbname_list = [] m_row = [] for folder in utils.listdir_dirs(INCOMPLETE_FOLDER): sab_nzo_id = SABNZBD.nzo_id(folder) if not sab_nzo_id: m_row.append(folder) m_row.append(None) m_nzbname_list.append(m_row) log("incomplete: m_nzbname_list.append: %s" % m_row) m_row = [] else: m_row.append(folder) m_row.append(sab_nzo_id) active_nzbname_list.append(m_row) log("incomplete: active_nzbname_list: %s" % m_row) m_row = [] nzbname_list = SABNZBD.nzo_id_history_list(m_nzbname_list) nzoid_history_list = [x[1] for x in nzbname_list if x[1] is not None] for row in active_nzbname_list: url = "&nzoid=" + str(row[1]) + "&nzbname=" + utils.quote_plus(row[0]) +\ "&nzoidhistory_list=" + utils.quote_plus(';'.join(nzoid_history_list)) +\ "&folder=" + utils.quote_plus(row[0]) info = nfo.ReadNfoLabels(utils.join(INCOMPLETE_FOLDER, row[0])) info.info_labels['title'] = "Active - " + info.info_labels['title'] add_posts(info.info_labels, url, MODE_INCOMPLETE_LIST, info.thumbnail, info.fanart) for row in nzbname_list: if row[1]: url = "&nzoidhistory=" + str(row[1]) + "&nzbname=" + utils.quote_plus(row[0]) +\ "&nzoidhistory_list=" + utils.quote_plus(';'.join(nzoid_history_list)) +\ "&folder=" + utils.quote_plus(row[0]) info = nfo.ReadNfoLabels(utils.join(INCOMPLETE_FOLDER, row[0])) add_posts(info.info_labels, url, MODE_INCOMPLETE_LIST, info.thumbnail, info.fanart) else: # Clean out a failed SABnzbd folder removal utils.dir_exists(utils.join(INCOMPLETE_FOLDER, row[0]), None) xbmcplugin.setContent(HANDLE, 'movies') xbmcplugin.endOfDirectory(HANDLE, succeeded=True, cacheToDisc=True) return
def check_and_assign_defaults(self): """ Check initialization parameters or assign defaults """ if not self.outfilename: self.outfilename = 'Not given' if not dir_exists(self.outputdir): msg = "*** ERROR: outputdir {} does not exist." print(msg.format(self.outputdir)) sys.exit(0) if not self.user: self.user = '******' if not self.memo: self.memo = 'Testing'
def check_and_assign_defaults(self): """ Check initialization parameters or assign defaults """ if not self.infilename: self.infilename = 'Not given' if not dir_exists(self.outputdir): msg = "*** ERROR: outputdir {} does not exist." print(msg.format(self.outputdir)) sys.exit(0) if not self.user: self.user = '******' if not self.memo: self.memo = 'No comments'
def play_episode(self, show, playep=0): """ Does a local search in the hard disk (in the folder specified by the config file) for the specified episode (**playep**) for the specified **show**. If no **playep** is specified, the next episode of the show will be played. """ # Check if operation is supported by the API if not self.mediainfo.get('can_play'): raise utils.EngineError( 'Operation not supported by current site or mediatype.') if not self.config['searchdir']: raise utils.EngineError('Media directory is not set.') if not utils.dir_exists(self.config['searchdir']): raise utils.EngineError('The set media directory doesn\'t exist.') try: playep = int(playep) except ValueError: raise utils.EngineError('Episode must be numeric.') if show: playing_next = False if not playep: playep = show['my_progress'] + 1 playing_next = True if show['total'] and playep > show['total']: raise utils.EngineError('Episode beyond limits.') self.msg.info(self.name, "Searching for %s %s..." % (show['title'], playep)) titles = self.data_handler.get_show_titles(show) filename, endep = self._search_video(titles, playep) if filename: self.msg.info(self.name, 'Found. Starting player...') arg_list = shlex.split(self.config['player']) arg_list.append(filename) try: with open(os.devnull, 'wb') as DEVNULL: subprocess.Popen(arg_list, stdout=DEVNULL, stderr=DEVNULL) except OSError: raise utils.EngineError( 'Player not found, check your config.json') return endep else: raise utils.EngineError('Episode file not found.')
def play_episode(self, show, playep=0): """ Does a local search in the hard disk (in the folder specified by the config file) for the specified episode (**playep**) for the specified **show**. If no **playep** is specified, the next episode of the show will be played. """ # Check if operation is supported by the API if not self.mediainfo.get('can_play'): raise utils.EngineError('Operation not supported by API.') if not self.config['searchdir']: raise utils.EngineError('Media directory is not set.') if not utils.dir_exists(self.config['searchdir']): raise utils.EngineError('The set media directory doesn\'t exist.') try: playep = int(playep) except ValueError: raise utils.EngineError('Episode must be numeric.') if show: playing_next = False if not playep: playep = show['my_progress'] + 1 playing_next = True if show['total'] and playep > show['total']: raise utils.EngineError('Episode beyond limits.') self.msg.info(self.name, "Searching for %s %s..." % (show['title'], playep)) titles = self.get_show_titles(show) filename = self._search_video(titles, playep) if filename: self.msg.info(self.name, 'Found. Starting player...') if self.tracker: self.tracker.disable() arg_list = shlex.split(self.config['player']) arg_list.append(filename) try: subprocess.call(arg_list) except OSError: raise utils.EngineError('Player not found, check your config.json') if self.tracker: self.tracker.enable() return playep else: raise utils.EngineError('Episode file not found.')
def play_episode(self, show, playep=0): """ Does a local search in the hard disk (in the folder specified by the config file) for the specified episode (**playep**) for the specified **show**. If no **playep** is specified, the next episode of the show will be played. """ # Check if operation is supported by the API if not self.mediainfo.get("can_play"): raise utils.EngineError("Operation not supported by current site or mediatype.") if not self.config["searchdir"]: raise utils.EngineError("Media directory is not set.") if not utils.dir_exists(self.config["searchdir"]): raise utils.EngineError("The set media directory doesn't exist.") try: playep = int(playep) except ValueError: raise utils.EngineError("Episode must be numeric.") if show: playing_next = False if not playep: playep = show["my_progress"] + 1 playing_next = True if show["total"] and playep > show["total"]: raise utils.EngineError("Episode beyond limits.") self.msg.info(self.name, "Searching for %s %s..." % (show["title"], playep)) titles = self.data_handler.get_show_titles(show) filename, endep = self._search_video(titles, playep) if filename: self.msg.info(self.name, "Found. Starting player...") arg_list = shlex.split(self.config["player"]) arg_list.append(filename) try: with open(os.devnull, "wb") as DEVNULL: subprocess.Popen(arg_list, stdout=DEVNULL, stderr=DEVNULL) except OSError: raise utils.EngineError("Player not found, check your config.json") return endep else: raise utils.EngineError("Episode file not found.")
def __deleteCache(self): pref = self.getCachePrefix() if not utils.dir_exists(pref): raise CacheException("Cache prefix %s doesn't exist." % pref) cache_directories = os.listdir(pref) # I've been told that there's a shell utility module that does this # probably safer and better, but I'm not sure where it is, or whether # or not it's portable to crappy no-name 32 bit operating systems # out of Redmond, Washington. for item in cache_directories: item = "%s%s%s" % (pref, os.sep, item) if os.path.isdir(item): print("Recursively deleting \"%s\"" % item) utils.recursive_delete(item) else: print("Eh? \"%s\" isn't a directory. That's odd..." % item)
def is_nzb_home(params): log("is_nzb_home: params: %s" % params) get = params.get nzb = utils.unquote_plus(get("nzb")) nzbname = m_nzb.Nzbname(utils.unquote_plus(get("nzbname"))).final_name folder = utils.join(INCOMPLETE_FOLDER, nzbname) iscanceled = False type = get('type', 'addurl') sab_nzo_id = SABNZBD.nzo_id(nzbname, nzb) log("is_nzb_home: folder: %s sab_nzo_id: %s" %(folder, sab_nzo_id)) if not utils.dir_exists(folder, sab_nzo_id): progressDialog = xbmcgui.DialogProgress() progressDialog.create('Pneumatic', 'Sending request to SABnzbd') category = get_category() if type == 'addurl': type, nzb = nzb_cache(type, nzb, nzbname) # SABnzbd and URI should be latin-1 encoded if type == 'addurl': response = SABNZBD.addurl(nzb.encode('latin-1'), nzbname, category=category) elif type == 'add_local': response = SABNZBD.add_local(nzb.encode('latin-1'), category=category) elif type == 'add_file': response = SABNZBD.add_file(nzb.encode('latin-1'), category=category) log("is_nzb_home: type: %s response: %s" %(type, response)) if "ok" in response: progressDialog.update(0, 'Request to SABnzbd succeeded', 'waiting for nzb download') seconds = 0 #SABnzbd uses nzb url as name until it has downloaded the nzb file sab_nzo_id_init = SABNZBD.nzo_id(nzbname, nzb) log("is_nzb_home: sab_nzo_id_init: %s" % sab_nzo_id_init) while not (sab_nzo_id and utils.exists(folder)): sab_nzo_id = SABNZBD.nzo_id(nzbname) label = str(seconds) + " seconds" log("is_nzb_home: waiting for nzb: sab_nzo_id: %s for: %s" % (sab_nzo_id, label)) progressDialog.update(0, 'Request to SABnzbd succeeded', 'waiting for nzb download', label) if progressDialog.iscanceled(): progressDialog.close() log("is_nzb_home: waiting for nzb: canceled") # Fix for hang when playing .strm time.sleep(1) xbmc.Player().stop() if sab_nzo_id is None and sab_nzo_id_init is not None: sab_nzo_id = sab_nzo_id_init #Trying to delete both the queue and history if sab_nzo_id is not None: pause = SABNZBD.pause_queue(id=sab_nzo_id) log("is_nzb_home: pause: sab_nzo_id: %s msg: %s" % (sab_nzo_id, pause)) time.sleep(3) delete_msg = SABNZBD.delete_queue('',sab_nzo_id) log("is_nzb_home: delete_queue: sab_nzo_id: %s nzbname: %s msg: %s" % (sab_nzo_id, nzbname, delete_msg)) if not "ok" in delete_msg: delete_msg = SABNZBD.delete_history('',sab_nzo_id) log("is_nzb_home: delete_history: sab_nzo_id: %s nzbname: %s msg: %s" % (sab_nzo_id, nzbname, delete_msg)) else: log("is_nzb_home: failed removing %s from the queue" % nzbname) iscanceled = True break time.sleep(1) seconds += 1 if not iscanceled: switch = SABNZBD.switch(0, '', sab_nzo_id) log("is_nzb_home: switch: sab_nzo_id: %s msg: %s" % (sab_nzo_id, switch)) if not "ok" in switch: progressDialog.update(0, 'Failed to prioritize the nzb!') time.sleep(1) # Dont add meta data for local nzb's if type == 'addurl': t = Thread(target=save_nfo, args=(folder,)) t.start() progressDialog.close() return True, sab_nzo_id else: progressDialog.close() return False, sab_nzo_id else: progressDialog.close() log("is_nzb_home: failed adding nzb to SAB") # Fix for hang when playing .strm xbmc.Player().stop() utils.notification("Request to SABnzbd failed!") return False, sab_nzo_id else: switch = SABNZBD.switch(0,'' , sab_nzo_id) log("is_nzb_home: switch: sab_nzo_id: %s msg: %s" % (sab_nzo_id, switch)) if not "ok" in switch: utils.notification("Failed to prioritize the nzb!") # TODO make sure there is also a NZB in the queue return True, sab_nzo_id
def _on_view_click(self): if not utils.dir_exists(utils.DL_PATH): self._event(utils.LABEL_STATUS, 'You haven\'t downloaded anything yet.') else: utils.open_file(utils.DL_PATH)
def is_nzb_home(params): log("is_nzb_home: params: %s" % params) get = params.get nzb = utils.unquote_plus(get("nzb")) nzbname = m_nzb.Nzbname(utils.unquote_plus(get("nzbname"))).final_name folder = utils.join(INCOMPLETE_FOLDER, os.path.join(nzbname, '')) iscanceled = False type = get('type', 'addurl') sab_nzo_id = sabnzbd.nzo_id(nzbname, nzb) log("is_nzb_home: folder: %s sab_nzo_id: %s" %(folder, sab_nzo_id)) if sab_nzo_id is None: nzo_id = sabnzbd.nzo_id_history(nzbname) else: nzo_id = sab_nzo_id log("is_nzb_home: nzo_id: %s" % nzo_id) if not utils.dir_exists(folder, nzo_id): progressDialog = xbmcgui.DialogProgress() progressDialog.create('Pneumatic', 'Sending request to SABnzbd') category = get_category() # correct wrong type mode if nzb.startswith('http'): type = "addurl" log("is_nzb_home: type changed to addurl") elif type == 'addurl': type = 'add_file' log("is_nzb_home: type changed to add_file") if type == 'addurl': type, nzb = nzb_cache(type, nzb, nzbname) # SABnzbd and URI should be latin-1 encoded if type == 'addurl': response = sabnzbd.addurl(nzb.encode('latin-1'), nzbname, category=category) # add_local will not work on remote shares, thus add_file elif type == 'add_file' or type == 'add_local': response = sabnzbd.add_file(nzb.encode('latin-1'), category=category) log("is_nzb_home: type: %s response: %s" %(type, response)) if "ok" in response: progressDialog.update(0, 'Request to SABnzbd succeeded', 'waiting for nzb download') seconds = 0 timer = 0 #SABnzbd uses nzb url as name until it has downloaded the nzb file sab_nzo_id_init = sabnzbd.nzo_id(nzbname, nzb) log("is_nzb_home: sab_nzo_id_init: %s" % sab_nzo_id_init) while not (sab_nzo_id and utils.exists_incomplete(folder)): # Ask user what incomplete dir is right every 10s if timer > 9: timer = 0 folder, nzbname = find_incomplete(folder, nzbname) sab_nzo_id = sabnzbd.nzo_id(nzbname) label = str(seconds) + " seconds" log("is_nzb_home: waiting for nzb: sab_nzo_id: %s for: %s" % (sab_nzo_id, label)) progressDialog.update(0, 'Request to SABnzbd succeeded', 'waiting for nzb download', label) if progressDialog.iscanceled(): progressDialog.close() log("is_nzb_home: waiting for nzb: canceled") # Fix for hang when playing .strm time.sleep(1) xbmc.Player().stop() if sab_nzo_id is None and sab_nzo_id_init is not None: sab_nzo_id = sab_nzo_id_init #Trying to delete both the queue and history if sab_nzo_id is not None: pause = sabnzbd.nzo_pause(sab_nzo_id) #log("rassi pause") log("is_nzb_home: pause: sab_nzo_id: %s msg: %s" % (sab_nzo_id, pause)) time.sleep(3) #log("rassi before delete") delete_msg = sabnzbd.nzo_delete_files(sab_nzo_id) log("is_nzb_home: delete_queue: sab_nzo_id: %s nzbname: %s msg: %s" % (sab_nzo_id, nzbname, delete_msg)) if not "ok" in delete_msg: delete_msg = sabnzbd.nzo_delete_history_files(sab_nzo_id) log("is_nzb_home: delete_history: sab_nzo_id: %s nzbname: %s msg: %s" % (sab_nzo_id, nzbname, delete_msg)) #log("rassi after delete") else: log("is_nzb_home: failed removing %s from the queue" % nzbname) iscanceled = True break time.sleep(1) seconds += 1 timer += 1 if not iscanceled: switch = sabnzbd.nzo_switch(sab_nzo_id, 0).replace('\n', '') log("is_nzb_home: switch: sab_nzo_id: %s msg: %s" % (sab_nzo_id, switch)) if not "0" in switch: progressDialog.update(0, 'Failed to prioritize the nzb!') time.sleep(1) # Dont add meta data for local nzb's if type == 'addurl': t = Thread(target=save_nfo, args=(folder,)) t.start() progressDialog.close() return True, sab_nzo_id else: progressDialog.close() return False, sab_nzo_id else: progressDialog.close() log("is_nzb_home: failed adding nzb to SAB") # Fix for hang when playing .strm xbmc.Player().stop() utils.notification("Request to SABnzbd failed!") return False, sab_nzo_id else: switch = sabnzbd.nzo_switch(sab_nzo_id, 0).replace('\n', '') log("is_nzb_home: switch: sab_nzo_id: %s msg: %s" % (sab_nzo_id, switch)) if not "0" in switch: utils.notification("Failed to prioritize the nzb!") # TODO make sure there is also a NZB in the queue return True, sab_nzo_id
def cmd(args): ''' Main function in shell to launch process: if --download: take the config settings: execute [download, clean, init, insert, stats] if no arguments: execute [init, insert, stats] ''' ### STEPS/STEP: init and populate db logger.info(args) status = True msg = "" ## STEPS is True: create the steps: [init, insert] if args.steps is True: logger.info("STEPS option is activated: execute only the steps") ## STEPS is True and download is False: create the steps: [init, insert] ## from files/clean if args.download is not False: logger.info("with --download") # logger.info("- arguments {}".format(args)) logger.info("create_steps(): download, clean, init, insert") status, msg = create_steps() return status, msg else: logger.info("create_steps(): init, insert") logger.info( "create_steps(): init from files in {}, insert from files in {}" .format(args.dirs["ref"], args.dirs["to"])) logger.info("remove download and clean") steps = STEPS # removed_steps = ["download"] removed_steps = ["download", "clean"] for del_s in removed_steps: steps.remove(del_s) for step in steps: status, msg = create_step(step) if status is False: logger.critical("STEP {}() is {}. {}".format( step, status, msg)) break return status, msg # STEP: specify the step between 'download,clean,init,insert' accept multiple steps coma separated elif args.step is not None: logger.info( "STEP option is activated: execute only the steps mentionned") for step in args.step.split(","): logger.info("STEP {}()".format(args.step)) status, msg = create_step(step.strip()) if status is False: logger.critical("STEP {}() is {}. {}".format( step, status, msg)) break return status, msg ### STATS : generate stats elif args.stats in ["create", "delete", "update"]: logger.info("STATS option is activated: only {} all stats".format( args.stats)) logger.info("STATS stats({},{})".format(args.stats, args.student)) if args.stats == "create": status, msg = stats(args.stats, args.student) elif args.stats == "delete": status, msg = stats(args.stats, args.student) else: #update stats("delete", args.student) status, msg = stats("create", args.student) return status, msg # STAT: specify the stat between 'activity, tasks, skills, progression' multiple stats accepted coma separated elif args.stat is not None: logger.info( "STAT option is activated. Only create specified stats {}".format( args.stats)) for stat_name in args.stat.split(","): logger.info("STAT {}({})".format(stat_name.strip(), args.student)) stat(stat_name, action="create", student=args.student) return status, msg ### TABLES: generate TABLES for STATS create delete or update elif args.tables in ["create", "delete", "update"]: logger.info("TABLES option is activated. {} all the tables".format( args.tables)) status, msg = tables(action=args.tables, student=args.student) return status, msg #TABLE specify the table between 'day,chapter,lesson,...', multiple tables accepteds coma sperated elif args.table is not None: logger.info( "TABLE option is activated. Create only the table: {}".format( args.table)) table_names = args.table.split(",") for tablename in table_names: logger.info("TABLE({},{}) option is activated".format( tablename.strip(), args.student)) status, msg = table(tablename, action="create", student=args.student, required_table=None) return status, msg ### COMPLETE SCRIPT elif args.download is not False: logger.info( "Complete execution with download: download, clean, init, insert, stats" ) create_steps() status, msg = stats("create", args.student) return satus, msg else: steps = STEPS logger.info("No arguments in cmd: falling back to settinngs.json") if config["FILES_SERVER"]["activate"] is False: # raw directory provided by user raw_data_dir = config["FILES_SERVER"]["dir"] # if raw_directory provided by user doesn't exists if not dir_exists(raw_data_dir) or dir_empty(raw_data_dir): # fall back to default CLEAN_DIR if not dir_exists(CLEAN_DIR) or dir_empty(CLEAN_DIR): # trying to get default RAW_DIR if not dir_exists(RAW_DIR) or dir_empty(RAW_DIR): msg = "No download option activated and no data found in both provided dir: {} and RAW dir: {}".format( raw_data_dir, RAW_DIR) logger.critical(msg) return False, msg else: steps.remove("download") else: steps.remove("download") steps.remove("clean") else: RAW_DIR = raw_data_dir steps.remove("download") if not dir_exists(CLEAN_DIR): os.makedirs(CLEAN_DIR) logger.info("Execute: {}".format(", ".join(steps))) create_steps(steps) status, msg = stats("create", args.student) return status, msg
from logging.handlers import RotatingFileHandler import argparse from settings import ROOT_DIR, RAW_DIR, CLEAN_DIR, ARCHIVED_DIR, LOG_DIR, REFERENCES_DIR, DIRS from settings import STEPS, STATS from settings import config from utils import timeit, dir_empty, dir_exists from db import create_step, create_steps # from steps import create_step # from steps import create_steps from db import stats, stat from db import tables, table if not dir_exists(LOG_DIR): os.makedirs(LOG_DIR) ### PARSE CMD ARGS parser = argparse.ArgumentParser() parser.add_argument("--from", help="Source folder", action="store_true", default=RAW_DIR) parser.add_argument("--to", help="Target dir", action="store_true", default=CLEAN_DIR) parser.add_argument("--old", help="Archived folder", action="store_true",
def scan_library(self, my_status=None, ignorecache=False): # Check if operation is supported by the API if not self.mediainfo.get('can_play'): raise utils.EngineError( 'Operation not supported by current site or mediatype.') if not self.config['searchdir']: raise utils.EngineError('Media directory is not set.') if not utils.dir_exists(self.config['searchdir']): raise utils.EngineError('The set media directory doesn\'t exist.') t = time.time() library = {} library_cache = self.data_handler.library_cache_get() if not my_status: my_status = self.mediainfo['status_start'] self.msg.info(self.name, "Scanning local library...") tracker_list = self._get_tracker_list(my_status) # Do a full listing of the media directory for fullpath, filename in utils.regex_find_videos( 'mkv|mp4|avi', self.config['searchdir']): show_id = None if not ignorecache and filename in library_cache.keys(): # If the filename was already seen before # use the cached information, if there's no information (None) # then it means it doesn't correspond to any show in the list # and can be safely skipped. if library_cache[filename]: show_id = library_cache[filename][0] show_ep = library_cache[filename][1] else: continue else: # If the filename has not been seen, extract # the information from the filename and do a fuzzy search # on the user's list. Cache the information. # If it fails, cache it as None. aie = extras.AnimeInfoExtractor.AnimeInfoExtractor(filename) (show_title, show_ep) = (aie.getName(), aie.getEpisode()) if show_title: show = utils.guess_show(show_title, tracker_list) if show: show_id = show['id'] library_cache[filename] = (show['id'], show_ep) else: library_cache[filename] = None else: library_cache[filename] = None # After we got our information, add it to our library if show_id: if show_id not in library.keys(): library[show_id] = {} library[show_id][show_ep] = fullpath self.msg.debug(self.name, "Time: %s" % (time.time() - t)) self.data_handler.library_save(library) self.data_handler.library_cache_save(library_cache) return library
def run_print_commands(): # noqa pylint: disable=too-many-locals,too-many-statements """ Print commands to file. """ pentest_dir_name, domain_name, ip_file_name, use_proxy = get_pentest_info() # pylint: disable=unused-variable pentest_path = os.path.join(config.BASE_PATH, pentest_dir_name) resource_path = os.path.join(pentest_path, 'rc_files') command_file = os.path.join(pentest_path, 'commands.txt') create_command_file(command_file, pentest_dir_name, use_proxy) # This allows the user to put the pentest_dir_name as something like company/external # and another as company/internal. All commands will work the same except for the # metasploit workspace and the nessus scan name will be company_external and # company_internal. if '/' in pentest_dir_name: workspace_name = pentest_dir_name.replace('/', '_') else: workspace_name = pentest_dir_name def write_command(command): write_command_to_file(command, command_file) def write_comment(comment): write_comment_to_file(comment, command_file) def pj(script): """ Shorthand script for path.join """ return os.path.join(config.SCRIPTS_PATH, script) def pyscript(script, in_file, out_dir=None, aha=False, use_proxy=False, line_fix=False): # pylint: disable=too-many-arguments """ Shorthand function to return python <script> in out """ command = "python {script} {in_file}{out_dir}{proxy}{aha}".format( script=pj(script), in_file=in_file, out_dir=" " + out_dir if out_dir else "", proxy=" --proxy" if use_proxy else "", aha=" | tee /dev/tty | aha -b {}> {}".format( "--line-fix " if line_fix else "", out_dir if out_dir else "" ) if aha else "", ) return command # Create directories dir_exists(resource_path, True) # Extract IPs from cidr/dashed to file: orig_ip_path = os.path.join(pentest_path, ip_file_name) extract_command = pyscript("ip_extract.py", orig_ip_path, pentest_path) write_command(extract_command) ips_text = os.path.join(pentest_path, '_ips.txt') # nessus scan write_comment('Make sure nessus is accessible via {} (configurable through config file)'.format(config.NESSUS_URL)) nessus_command = pyscript('nessus_scan.py', ips_text, workspace_name) write_command(nessus_command) # nmap nmap_out = os.path.join(pentest_path, "nmap") nmap_xml = os.path.join(pentest_path, "nmap.xml") nmap_html = os.path.join(pentest_path, "nmap.html") nmap_command = "nmap -sS -sU -v --script banner -sV --version-light -Pn -p U:631,161,123,138,137,1434,445,135,67,53,139,500,68,520,1900,4500,514,49152,162,69,5353,111,49154,1701,998,996,997,999,3283,49153,1812,136,2222,2049,3278,5060,1025,1433,3456,80,20031,1026,7,1646,1645,593,518,2048,31337,515,T:[1-65535] -oA {} -iL {} --min-hostgroup 128 --defeat-rst-ratelimit | tee /dev/tty | aha -b > {}".format(nmap_out, ips_text, nmap_html) write_command(nmap_command) # csv file csv_path = os.path.join(pentest_path, 'ports.csv') csv_command = pyscript('nmap_to_csv.py', nmap_xml, pentest_path) write_comment('Creates a ports.csv file with just the open ports.') write_command(csv_command) # webservers list webserver_path = os.path.join(pentest_path, "webservers.txt") webserver_command = pyscript("create_webserver_list.py", csv_path, pentest_path) write_comment('Creates a webservers.txt file with a list of every URL found.') write_command(webserver_command) # webserver screenshots screenshot_path = os.path.join(pentest_path, "website_screenshots") screenshot_command = pyscript("website_screenshot.py", webserver_path, screenshot_path, use_proxy=use_proxy) write_comment('Uses phantomjs to open each URL found and takes a picture of it and saves it in the website_screenshots folder.') write_command(screenshot_command) # metasploit workspace and import nmap workspace_import_path = os.path.join(resource_path, "db_import.rc") with open(workspace_import_path, "w") as f: f.write("workspace -a {}\n".format(workspace_name)) f.write("db_import {}\n".format(nmap_xml)) f.write("hosts") metasploit_workspace_command = 'msfconsole -r {}'.format(workspace_import_path) write_comment('Creates a metasploit workspace and imports the hosts found using the nmap.xml file.') write_command(metasploit_workspace_command) # dnsrecon write_comment('This will only run with python < 3 right now. So make sure the virtualenv is deactivated prior to running this command.') dnsrecon_html = os.path.join(pentest_path, "dnsrecon_{}_.html".format(domain_name)) dnsrecon_command = 'dnsrecon -d {0} -D /usr/share/wordlists/dnsmap.txt | tee /dev/tty | aha -b > {1}'.format(domain_name, dnsrecon_html) write_command(dnsrecon_command) # rawr rawr_command = "rawr.py {} --rd --dns -orx --downgrade --spider -d {}".format(nmap_xml, pentest_path) write_command(rawr_command) # yasuo yasuo_html = os.path.join(pentest_path, "yasuo.html") if use_proxy: yasuo_command = "proxychains yasuo.rb -s /opt/yasuo/signatures.yaml -f {} -t 10 | tee /dev/tty | aha -b > {}".format(nmap_xml, yasuo_html) else: yasuo_command = "yasuo.rb -s /opt/yasuo/signatures.yaml -f {} -t 10 | tee /dev/tty | aha -b > {}".format(nmap_xml, yasuo_html) write_command(yasuo_command) # multi_enum4linux enum4linux_html = os.path.join(pentest_path, "enum4linux.html") enum4linux_command = pyscript("multi_enum4linux.py", csv_path, enum4linux_html, aha=True, use_proxy=use_proxy, line_fix=True) write_comment('Runs the enum4linux command on each IP.') write_command(enum4linux_command) # multi_wpscan wpscan_html = os.path.join(pentest_path, "wpscan") wpscan_update_command = "wpscan --update" wpscan_command = pyscript(script="multi_wpscan.py", in_file=webserver_path, out_dir=wpscan_html) write_command(wpscan_update_command) write_comment('Runs the wpscan command for every URL found.') write_command(wpscan_command) # multi_nikto nikto_dir_path = os.path.join(pentest_path, "nikto") nikto_command = pyscript("multi_nikto.py", csv_path, nikto_dir_path, use_proxy=use_proxy) write_comment('Runs nikto for every URL found.') write_command(nikto_command) # endpointmapper endpoint_resource_file = os.path.join(resource_path, "endpoint_mapper.rc") with open(endpoint_resource_file, "w") as f: f.write("use auxiliary/scanner/dcerpc/endpoint_mapper\n") f.write("set RHOSTS file:{}\n".format(ips_text)) f.write("set THREADS 10\n") f.write("show options\n") f.write("exploit") endpoint_command = "resource {}".format(endpoint_resource_file) write_comment('Run this from within metasploit') write_command(endpoint_command) # multi_testssl testssl_path = os.path.join(pentest_path, "testssl") testssl_command = pyscript("multi_testssl.py", webserver_path, testssl_path) write_comment('Runs testssl for every URL found.') write_command(testssl_command) # smtp_relay write_comment("Run this from the metasploit console.") smtp_resource_path = os.path.join(resource_path, "smtp_relay.rc") with open(smtp_resource_path, "w") as f: f.write("use auxiliary/scanner/smtp/smtp_relay\n") f.write("services -p 25 -R\n") f.write("show options\n") f.write("exploit") smtp_relay_command = "resource {}".format(smtp_resource_path) write_command(smtp_relay_command) # multi_whatweb whatweb_path = os.path.join(pentest_path, "whatweb") whatweb_command = pyscript("multi_whatweb.py", webserver_path, whatweb_path) write_comment('Runs whatweb for every URL found.') write_command(whatweb_command) # zap_attack zap_path = os.path.join(pentest_path, "zap") zap_command = pyscript("zap_attack.py", webserver_path, zap_path) write_comment('Make sure ZAP is up and running first.') write_command(zap_command) # burp requests burp_command = pyscript("burp_requests.py", webserver_path) write_comment("Make sure burp is running and is listening on port {}".format(config.BURP_PORT)) write_command(burp_command) # open_websites open_websites_comments = "Opens each website in the default browser" open_websites_command = pyscript("open_websites.py", webserver_path) write_comment(open_websites_comments) write_command(open_websites_command) # pikebrute ike_path = os.path.join(pentest_path, "ike") ike_command = pyscript("pikebrute.py", csv_path, ike_path) write_command(ike_command)