def load_indexes(uri): def _organize(index_dump): indexes = {} for entry in index_dump: if any([r.match(entry['name']) for r in _INDEXES_TO_SKIP]): continue col = entry['ns'].split('.')[-1] if col not in indexes: indexes[col] = {} indexes[col][entry['name']] = entry indexes[col][entry['name']]['total_size'] = 'N/A' indexes[col][entry['name']]['index_size_ratio'] = 'N/A' indexes[col][entry['name']]['collection_size_ratio'] = 'N/A' indexes[col][entry['name']]['removal_score'] = 0 return indexes parts = urlparse(uri) indexes = None if parts.scheme == 'file': indexes = _organize(json.loads(open(parts.path).read())) elif parts.scheme == 'mongodb': client = MongoClient(uri) database = get_default_database(client, uri) indexes = _organize([i for i in database['system.indexes'].find()]) for col in indexes: indexes[col]['__stats'] = database.command('collstats', col) else: raise ValueError('unknown source_uri scheme %s' % (parts.scheme)) return indexes
def run(self): logging.info('running dex...') self._dex = DexRunner(self.args.monitor_uri, self.args.slowms) self._dex.start() while self._dex.is_alive(): if should_exit: self.kill() time.sleep(.1) self._dex.join() logging.info('dex stopped...') if self.args.output_uri is None: print self._dex.output else: parts = urlparse.urlparse(self.args.output_uri) if parts.scheme == 'file': with open(parts.path, 'w') as out_file: out_file.write(self._dex.output) elif parts.scheme == 'mongodb': client = pymongo.MongoClient(self.args.output_uri) db = get_default_database(client, args.output_uri) db.dex.insert({ 'session': self.args.session, 'created': datetime.utcnow(), 'output': json.loads(self._dex.output) })
def run_report(args): client = MongoClient(args.uri) database = get_default_database(client, args.uri) indexes = load_indexes(args.source_uri) stream = sys.stdout if args.out is None else open(args.out, 'w') report = Report(database, indexes, stream, args.session) report.build() if args.type == 'markdown': report.dump_mark_down() elif args.type == 'json': report.dump_json() stream.close()
def update_indexes(args): client = MongoClient(args.uri) database = client[args.database] if args.database is not None \ else get_default_database(client, args.uri) if args.backup is not None: backup_indexes(database, args.backup) def organize_index_list(list_): indexes = {} for index in list_: col = index['ns'].split('.')[-1] if col not in indexes: indexes[col] = {} indexes[col][index['name']] = index return indexes new_indexes = organize_index_list(json.loads(open(args.indexes).read())) old_indexes = \ organize_index_list([d for d in database['system.indexes'].find()]) add_indexes(database, new_indexes, old_indexes) drop_indexes(database, new_indexes, old_indexes)
def run(self): logging.info('running dex...') self._dex = DexRunner(self.args.monitor_uri, self.args.slowms) self._dex.start() while self._dex.is_alive(): if should_exit: self.kill() time.sleep(.1) self._dex.join() logging.info('dex stopped...') if self.args.output_uri is None: print self._dex.output else: parts = urlparse.urlparse(self.args.output_uri) if parts.scheme == 'file': with open(parts.path, 'w') as out_file: out_file.write(self._dex.output) elif parts.scheme == 'mongodb': client = pymongo.MongoClient(self.args.output_uri) db = get_default_database(client, args.output_uri) db.dex.insert({'session': self.args.session, 'created': datetime.utcnow(), 'output': json.loads(self._dex.output)})