def test_apply(self): result_queryset = ExperimentQueryManager.apply(query_spec=self.query1, queryset=Experiment.objects) queries = [ str(Experiment.objects.filter( updated_at__lte='2020-10-10' ).filter( started_at__gt='2010-10-10' ).filter( ~Q(started_at='2016-10-01') ).query), str(Experiment.objects.filter( started_at__gt='2010-10-10' ).filter( ~Q(started_at='2016-10-01') ).filter( updated_at__lte='2020-10-10' ).query), str(Experiment.objects.filter( ~Q(started_at='2016-10-01') ).filter( started_at__gt='2010-10-10' ).filter( updated_at__lte='2020-10-10' ).query), ] assert str(result_queryset.query) in queries result_queryset = ExperimentQueryManager.apply(query_spec=self.query2, queryset=Experiment.objects) queries = [ str(Experiment.objects.filter( metric__values__loss__lte=0.8 ).filter( status__status__in=['starting', 'running'] ).query), str(Experiment.objects.filter( status__status__in=['starting', 'running'] ).filter( metric__values__loss__lte=0.8 ).query) ] assert str(result_queryset.query) in queries result_queryset = ExperimentQueryManager.apply(query_spec=self.query4, queryset=Experiment.objects) queries = [ str(Experiment.objects.filter( ~Q(tags__overlap=['tag1', 'tag2']) ).filter( tags__contains=['tag3'] ).query), str(Experiment.objects.filter( tags__contains=['tag3'] ).filter( ~Q(tags__overlap=['tag1', 'tag2']) ).query) ] assert str(result_queryset.query) in queries
def test_parse(self): tokenized_query = ExperimentQueryManager.tokenize(self.query1) parsed_query = ExperimentQueryManager.parse(tokenized_query) assert parsed_query == { 'updated_at': [QueryOpSpec(op='<=', negation=False, params='2020-10-10')], 'started_at': [ QueryOpSpec(op='>', negation=False, params='2010-10-10'), QueryOpSpec(op='=', negation=True, params='2016-10-01') ], } tokenized_query = ExperimentQueryManager.tokenize(self.query2) parsed_query = ExperimentQueryManager.parse(tokenized_query) assert parsed_query == { 'metric__loss': [QueryOpSpec('<=', False, params=0.8)], 'status': [QueryOpSpec('|', False, params=['starting', 'running'])], } tokenized_query = ExperimentQueryManager.tokenize(self.query3) parsed_query = ExperimentQueryManager.parse(tokenized_query) assert parsed_query == { 'finished_at': [QueryOpSpec('..', False, params=['2012-12-12', '2042-12-12'])], } tokenized_query = ExperimentQueryManager.tokenize(self.query4) parsed_query = ExperimentQueryManager.parse(tokenized_query) assert parsed_query == { 'tags': [ QueryOpSpec('|', True, params=['tag1', 'tag2']), QueryOpSpec('=', False, params='tag3') ], }
def test_tokenize(self): tokenized_query = ExperimentQueryManager.tokenize(self.query1) assert dict(tokenized_query.items()) == { 'updated_at': ['<=2020-10-10'], 'started_at': ['>2010-10-10', '~2016-10-01'], } tokenized_query = ExperimentQueryManager.tokenize(self.query2) assert dict(tokenized_query) == { 'metric.loss': ['<=0.8'], 'status': ['starting|running'], } tokenized_query = ExperimentQueryManager.tokenize(self.query3) assert tokenized_query == { 'finished_at': ['2012-12-12..2042-12-12'], } tokenized_query = ExperimentQueryManager.tokenize(self.query4) assert tokenized_query == { 'tags': ['~tag1|tag2', 'tag3'], } tokenized_query = ExperimentQueryManager.tokenize(self.query5) assert tokenized_query == {'name': ['%foo%'], 'description': ['~bal%']} with self.assertRaises(QueryError): ExperimentQueryManager.tokenize(self.query6)
def filter_queryset(cls, manager, query_spec, queryset): if manager == ExperimentQueryManager.NAME: return ExperimentQueryManager.apply(query_spec=query_spec, queryset=queryset) if manager == ExperimentGroupQueryManager.NAME: return ExperimentGroupQueryManager.apply(query_spec=query_spec, queryset=queryset) if manager == BuildQueryManager.NAME: return BuildQueryManager.apply(query_spec=query_spec, queryset=queryset) if manager == JobQueryManager.NAME: return JobQueryManager.apply(query_spec=query_spec, queryset=queryset) if manager == TensorboardQueryManager.NAME: return TensorboardQueryManager.apply(query_spec=query_spec, queryset=queryset)
def test_build(self): tokenized_query = ExperimentQueryManager.tokenize(self.query1) parsed_query = ExperimentQueryManager.parse(tokenized_query) built_query = ExperimentQueryManager.build(parsed_query) assert built_query == { 'updated_at': [ QueryCondSpec(DateTimeCondition(op='<=', negation=False), params='2020-10-10') ], 'started_at': [ QueryCondSpec(DateTimeCondition(op='>', negation=False), params='2010-10-10'), QueryCondSpec(DateTimeCondition(op='=', negation=True), params='2016-10-01') ], } tokenized_query = ExperimentQueryManager.tokenize(self.query2) parsed_query = ExperimentQueryManager.parse(tokenized_query) built_query = ExperimentQueryManager.build(parsed_query) assert built_query == { 'metric__loss': [ QueryCondSpec(ComparisonCondition(op='<=', negation=False), params=0.8) ], 'status': [ QueryCondSpec(ValueCondition(op='|', negation=False), params=['starting', 'running']) ], } tokenized_query = ExperimentQueryManager.tokenize(self.query3) parsed_query = ExperimentQueryManager.parse(tokenized_query) built_query = ExperimentQueryManager.build(parsed_query) assert built_query == { 'finished_at': [ QueryCondSpec(DateTimeCondition(op='..', negation=False), params=['2012-12-12', '2042-12-12']) ], } tokenized_query = ExperimentQueryManager.tokenize(self.query4) parsed_query = ExperimentQueryManager.parse(tokenized_query) built_query = ExperimentQueryManager.build(parsed_query) assert built_query == { 'tags': [ QueryCondSpec(ArrayCondition(op='|', negation=True), params=['tag1', 'tag2']), QueryCondSpec(ArrayCondition(op='=', negation=False), params='tag3') ], }
def test_handle(self): tokenized_query = ExperimentQueryManager.tokenize(self.query1) parsed_query = ExperimentQueryManager.parse(tokenized_query) built_query = ExperimentQueryManager.build(parsed_query) assert built_query == ExperimentQueryManager.handle_query(self.query1)
def filter_experiments(query_spec, queryset): return ExperimentQueryManager.apply(query_spec=query_spec, queryset=queryset)