def find_all(self, filter=None, projection=None, *args, flatten=True, **kwargs): if not filter: filter = dict() logger.info( f'db.getCollection("%s").find(\n' '%s\n' ',\n%s\n)', str(self.reports.name), strings.pad_lines(strings.to_json(filter)), strings.pad_lines(strings.to_json(projection)), ) with Timer('db find: db-stuff', log=logger.debug): with Timer('db find: db-stuff: find', log=logger.debug): # TODO this may take a while depending on the size of the collection cursor = self.reports.find(filter, projection, *args, **kwargs) with Timer('db find: db-stuff: fetch and flatten', log=logger.debug): if flatten: items = [ datautils.flatten(x, sep='.') for x in list(cursor) ] else: items = list(cursor) return items
def pretty(self): lines = [ 'Project <{self.name}>'.format(self=self), ' - workdir: {self.workdir}'.format(self=self), '%s' % strings.pad_lines('- {self.install.pretty}'.format(self=self)), '%s' % strings.pad_lines('- {self.test.pretty}'.format(self=self)), ] return '\n'.join(lines)
def find_one(self, filter=None, *args, **kwargs): if not filter: filter = dict() logger.debug( f'db.getCollection("{self.reports.name}").findOne(\n%s\n)' % strings.pad_lines(strings.to_json(filter))) return self.reports.find_one(filter, *args, **kwargs)
def aggregate(self, match=None, unwind=None, project=None, flatten=True, *args): """ Shortcut for a aggregate method Parameters ---------- match : dict $match aggregation object according to MongoDB specification unwind : dict or str $unwind aggregation object according to MongoDB specification project : dict $project aggregation object according to MongoDB specification """ pipeline = list() if match: pipeline.append({'$match': match}) if unwind: pipeline.append({'$unwind': unwind}) if project: pipeline.append({'$project': project}) if args: pipeline.extend(args) logger.debug( f'db.getCollection("{self.reports.name}").aggregate(\n%s\n)' % strings.pad_lines(strings.to_json(pipeline))) with Timer('db find: db-stuff', log=logger.debug): with Timer('db aggregate: db-stuff: find', log=logger.debug): # TODO this may take a while depending on the size of the collection cursor = self.reports.aggregate(pipeline) with Timer('db aggregate: db-stuff: fetch and flatten', log=logger.debug): if flatten: items = [ datautils.flatten(x, sep='.') for x in list(cursor) ] else: items = list(cursor) return items
def timers_stats(self): pipeline = [{ '$group': { '_id': { 'hash': '$git.commit', 'date': { '$dateToString': { 'date': '$git.datetime', 'format': '%Y-%m-%d %H:%M:%S' } } }, 'dur_sum': { '$sum': '$result.duration' }, 'dur_avg': { '$avg': '$result.duration' }, 'dur_max': { '$max': '$result.duration' }, 'dur_min': { '$min': '$result.duration' }, 'items': { '$sum': 1 }, } }, { '$sort': { '_id.date': -1 } }] logger.debug( f'db.getCollection("{self.reports.name}").aggregate(\n%s\n)' % strings.pad_lines(strings.to_json(pipeline))) return list(self.reports.aggregate(pipeline))
def __repr__(self): return ( '{self.__class__.__name__}{self.script_path}:\n' '{content}' ).format(self=self, content=strings.pad_lines(self.script_content, 4))