def __init__(self, curVersion): self.localVersion = curVersion try: float(self.localVersion) except: logger.error('Could not determine local version.') sys.exit()
def extract(self): file = tarfile.open(self.localPath, 'r:gz') logger.debug('Archive opened') try: file.extractall(os.path.join(os.getcwd(), 'tmp')) file.close() except: logger.error('Extract error.') sys.exit()
def start(self): if len(self.filteredList) == 0: logger.error("No files in range.") return 0 self.pbar = ProgressBar(widgets=self.mainWidgets, maxval=len(self.filteredList)).start() if 'content' in self.type: self.startContent() else: self.startName() self.pbar.finish() self.printResults()
def download(self): if self.key == 'master': url = RELO_UPDATE_URL_MASTER elif self.key == 'develop': url = RELO_UPDATE_URL_DEVELOP logger.info('Download URL - %s' % url) self.localPath = os.path.join(os.getcwd(), 'tmp', 'relo-%s.tar.gz' % self.remoteVersion) try: curl(url, self.localPath) except: logger.error('Download error.') sys.exit()
def check(self): if self.key == 'master': url = RELO_MASTER_VERSION_URL elif self.key == 'develop': url = RELO_DEVELOP_VERSION_URL remote = urllib.urlopen(url) self.remoteVersion = remote.read() try: float(self.remoteVersion) except: logger.error('Could not determine remote version.') sys.exit() logger.info('Local Version - ' + self.localVersion) logger.info('Remote Version - ' + self.remoteVersion) if float(self.remoteVersion) > float(self.localVersion): logger.log("Found new version: " + self.remoteVersion) return True else: logger.head("Already Up-To-Date") return False
def main(): """ parses the arguments and starts the application """ parser = argparse.ArgumentParser(description='Recursive Document Content Search in Python') parser.add_argument('-v', '--version', action='version', version=('%(prog)s ' + __version__)) log_group = parser.add_mutually_exclusive_group() log_group.add_argument('-i', '--info', action='store_true', help='prints extra information') log_group.add_argument('-d', '--debug', action='store_true', help='prints debug information') reloParsers = parser.add_subparsers(help='sub-command help') ##### Config Arguments ##### update = reloParsers.add_parser('update', help='update help') update.set_defaults(which='update') update.add_argument('key', help="master/develop") config = reloParsers.add_parser('config', help='config help') #config.set_defaults(which='config') configParsers = config.add_subparsers() list = configParsers.add_parser('list', help="lists all available config fields") list.set_defaults(which='config.list') read = configParsers.add_parser('read', help="read a specific option from the config") read.set_defaults(which='config.read') read.add_argument('key', help="config key") write = configParsers.add_parser('write', help="write a specific value to the config") write.set_defaults(which='config.write') write.add_argument('key', help="config key") write.add_argument('value', help="new value") ##### Local Argumnets ##### ## INDEX index = reloParsers.add_parser('index', help='index help') index.set_defaults(which='index') index.add_argument('-s', '--hidden', '--secret', action='store_true', help='search hidden files') index.add_argument('directory', action='store', default='./', help='select directory') index_type_group = index.add_mutually_exclusive_group() index_type_group.add_argument('-m', '--meta', action='store_true', help='search match in fileNames') index_type_group.add_argument('-c', '--content', action='store_true', help='search match in content') ## STATS stats = reloParsers.add_parser('stats', help='analyze help') stats.set_defaults(which='stats') stats.add_argument('module', action='store', help='module to use') stats.add_argument('-s', '--hidden', '--secret', action='store_true', help='take hidden files into account') stats.add_argument('-d', '--directory', action='store', default='./', dest='directory', help='select directory') ## SEARCH search = reloParsers.add_parser('search', help='search help') search.set_defaults(which='search') search.add_argument('search_key', action='store', help='keyword to search for') search.add_argument('-s', '--hidden', '--secret', action='store_true', help='search hidden files') search.add_argument('--filelog', action='store_true', help='log is written to file - always in debug mode') search.add_argument('-r', '--recursive', action='store_true', help='search recursively') search.add_argument('-f', '--forceSearch', action='store_true', help='force a real file system search') doctype_group = search.add_mutually_exclusive_group() doctype_group.add_argument('-a', '--all', action='store_true', help='search all files (even non supported with standard plugin)') doctype_group.add_argument('--doctype', action='store', help='specify doctypes you want to use in your search') search_type_group = search.add_mutually_exclusive_group() search_type_group.add_argument('-n', '--name', action='store_true', help='search match in fileNames (regex allowed) - (default)') search_type_group.add_argument('-c', '--content', action='store_true', help='search match in content (regex allowed)') log_group = search.add_mutually_exclusive_group() log_group.add_argument('--info', action='store_true', help='enable info mode') log_group.add_argument('--debug', '--verbose', action='store_true', help='enable debug/verbose mode') search.add_argument('-d', '--directory', action='store', default='./', dest='directory', help='select Directory - (default=current)') ##### Remote Argumnets ##### crawl = reloParsers.add_parser('crawl', help='crawl help') crawl.set_defaults(which='crawl') crawl.add_argument('url', action='store', help='url to use') try: results = parser.parse_args(args=sys.argv[1:]) ########## PREP ########## if results.info: logger.level = 1 elif results.debug: logger.level = 2 except IOError, msg: parser.error(str(msg)) logger.error(str(msg)) return 1
if results.which.startswith('config'): if results.which == 'config.list': conf.listConfig(None) if results.which == 'config.write': conf.writeConfig(results.key, results.value) if results.which == 'config.read': print conf.readConfig(results.key) ########## UPDATE ########## elif results.which == 'update': from relo.core.update import ReloUpdater curVersion = __version__ relo = ReloUpdater(curVersion) if results.key in ['master', 'develop']: relo.update(results.key) else: logger.error('Invalid Repo-Key') ########## CRAWL ########## elif results.which == 'crawl': url = results.url sTime = time.time() crawler = rawl.Crawler(url, 16) crawler.crawl() print "\n".join(crawler.urls) eTime = time.time() tTime = eTime - sTime print "Found: %d" % crawler.links print "Followed: %d" % crawler.followed