def setUp(self, requests_post, get_landowner): group = Group(name='testers') group.save() cat = Category(name='test cat') cat.save() # we fake some 3rd party API calls requests_post.json.return_value = {'features': []} get_landowner.return_value = 'TEST landowner' # our test issues Issue(id=0, description='A wip issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.WIP, published=True, assigned=group).save() Issue(description='A very fresh issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.SUBMITTED, published=True, assigned=group).save() Issue(description='A old finished issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.SOLVED, published=False, assigned=group, photo='testphoto.jpg').save()
def test_import_csv_categories_clean(self, requests_post, utils_get_landowner): """Check if clean removes existing objects""" cat = Category(name='Testcategory') cat.save() cmd = BaseCommand() cati = CategoryImporter(cmd, './legacy/tests/basic-cat.csv') self.assertEqual(Category.objects.count(), 11)
def test_start_qa(self, requests_post, get_landowner): """Do we find all QA critical issues?""" tester = User(username='******', password=make_password('test')) tester.save() group = Group(name='testers') group.save() group.user_set.add(tester) group.save() cat = Category(name='test cat') cat.save() requests_post.json.return_value = {'features': []} get_landowner.return_value = 'TEST landowner' Issue(description='A old issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now() - timedelta(days=30), status=StatusTypes.WIP, published=False, assigned=group).save() Issue(description='Old assigned but unreviewed issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now() - timedelta(days=30), status=StatusTypes.REVIEW, published=False, assigned=group).save() Issue(description='Another Old assigned but unreviewed issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now() - timedelta(days=20), status=StatusTypes.REVIEW, published=False, assigned=group).save() Issue(description='Assigned very new issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.REVIEW, published=False, assigned=group).save() Issue(description='Unupdated issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now() - timedelta(days=35), status=StatusTypes.WIP, status_created_at=timezone.now() - timedelta(days=35), published=True, assigned=group).save() # Check if we get only old unreviewed issues back self.client.login(username='******', password='******') response = self.client.get(reverse('office:start')) qs = response.context['issues3dunchecked'] self.assertEqual(len(qs), 2) qs = response.context['issues30dunupdated'] self.assertEqual(len(qs), 1)
def start(request): l = [] for g in request.user.groups.all(): l.append(g.name) ourissues = Issue.objects.filter(assigned__in=request.user.groups.all()) ouropenissues = ourissues.exclude(status__in=[ StatusTypes.SOLVED, StatusTypes.IMPOSSIBLE, StatusTypes.DUBLICATE ]) # all review issues > 3d uncheckedIssues = ouropenissues.filter(status=StatusTypes.REVIEW) checkdate = timezone.now() - timezone.timedelta(days=3) uncheckedIssues = uncheckedIssues.filter(created_at__lt=checkdate) # all wip issues without status update > 30d unupdatedIssues = ouropenissues.filter(published=True) checkdate = timezone.now() - timezone.timedelta(days=30) unupdatedIssues = unupdatedIssues.filter(status_created_at__lt=checkdate) # all wip ideas older > 60d catidee = Category.get_ideas_root() if catidee: ourideas = ouropenissues.filter(category__in=catidee.get_descendants()) else: ourideas = None return render( request, 'office/start.html', { 'groups': l, 'issues3dunchecked': uncheckedIssues, 'issues30dunupdated': unupdatedIssues, 'ideas60d': ourideas })
def insert_test_task_in_spider_task_table(cls): category = Category(name="测试") db.session.add(category) db.session.commit() website = Website(name="测试网站", description="测试用", is_hot="1", category_id=1) db.session.add(website) db.session.commit() template = SpiderTemplate(name="测试任务", website_id=1, category_id=1, para_file="test.csv") db.session.add(template) db.session.commit() time_now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') task = SpiderTask(name="测试任务01", template_id=1, is_timing=0, status=4, execute_cycle=0, publish_time=time_now, paras_table="res_1_0000000001") db.session.add(task) db.session.commit()
def test_import_csv_issues_basic(self, requests_post, utils_get_landowner): """Check if parsing a wellformed CSV works fine""" cmd = BaseCommand() catA = Category(id=0, name='Category A') catA.save() for x in range(1, 8): cat = Category(id=x, name='A' + str(x), parent=catA) cat.save() requests_post.json.return_value = {'features': []} utils_get_landowner.return_value = 'TEST landowner' ii = IssueImporter(cmd, './legacy/tests/basic.csv') self.assertEqual(Issue.objects.count(), 8)
def test_import_csv_feedback_basic(self, requests_post, utils_get_landowner): """Check if parsing a wellformed CSV works fine""" cmd = BaseCommand() catA = Category(id=0, name='Category A') catA.save() requests_post.json.return_value = {'features': []} utils_get_landowner.return_value = 'TEST landowner' issue = Issue(id=1, description="test issue with defined id", position=Point(5, 23), category=catA) issue.save() issue = Issue(id=2, description="test issue with defined id", position=Point(5, 23), category=catA) issue.save() fbi = FeedbackImporter(cmd, './legacy/tests/basic-feedback.csv') self.assertEqual(Feedback.objects.count(), 2)
def test_new_Issue(self, requests_post, get_landowner): """ Can we create specific issues? """ #requests_post.status_code.return_value = 200 requests_post.json.return_value = {'features': []} get_landowner.return_value = 'TEST landowner' cat = Category(name="test category") cat.save() issue = Issue(description="test issue", position=Point(5, 23), category=cat) issue.save() self.assertEqual(len(Issue.objects.all()), 1) issue = Issue(id=666, description="test issue with defined id", position=Point(5, 23), category=cat) issue.save() self.assertEqual(issue.id, 666)
def test_detail_assigned(self, requests_post, get_landowner): tester = User(username='******', password=make_password('test')) tester.save() self.client.login(username='******', password='******') group = Group(name='testers') group.save() group.user_set.add(tester) group.save() cat = Category(name='test cat') cat.save() requests_post.json.return_value = {'features': []} get_landowner.return_value = 'TEST landowner' myIssue = Issue(description='My issue', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.WIP, published=False, assigned=group) myIssue.save() response = self.client.get( reverse('office:issue', kwargs={'pk': myIssue.id})) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['issue'], myIssue) # Everybody should at least read otherIssue = Issue(description='Not mine', position=Point(54.1, 12.1, srid=4326), category=cat, created_at=timezone.now(), status=StatusTypes.WIP, published=False, assigned=group) otherIssue.save() response = self.client.get( reverse('office:issue', kwargs={'pk': otherIssue.id})) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['issue'], otherIssue)
def insert(request): """ 执行添加商品类别页面 """ try: category_info = Category() category_info.name = request.POST['name'] category_info.pid = request.POST['pid'] category_info.path = request.POST['path'] category_info.save() context = {'info': '会员信息添加成功'} except Exception as err: context = {'info': '会员信息添加失败: %s' % str(err)} return render(request, 'myadmin/info.html', context)
def insert_test_task_in_spider_task_table2(): category = Category(name="京东") db.session.add(category) db.session.commit() website = Website(name="京东搜索", description="抓取京东搜索内容", is_hot="1", category_id=1) db.session.add(website) db.session.commit() template = SpiderTemplate(name="京东搜索", website_id=2, category_id=2, para_file="test2.csv") db.session.add(template) db.session.commit() time_now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') task = SpiderTask(name="京东搜索任务", template_id=1, is_timing=0, status=4, execute_cycle=0, publish_time=time_now, paras_table="jd_demo", res_table="res_2_0000000001") db.session.add(task) db.session.commit()
def parseRow(self, row): id = row['id'] name = row['name'] typeClass = row['typ'] parent_id = row['parent'] if parent_id == '': # a Maincategory parent = self.typeMap[typeClass] cat = Category(id=id, name=name, parent=parent) else: # a Subcategory parent = Category.objects.get(id=parent_id) cat = Category(id=id, name=name, parent=parent) cat.save()
def setUp(self): # Create a 3 level cat hierachy ideen = Category(name=Category.IDEA) ideen.save() mainCat = Category(name='main cat', parent=ideen) mainCat.save() catA = Category(name='test sub cat A', parent=mainCat) catA.save() catB = Category(name='test sub cat B', parent=mainCat) catB.save()
def eraseObjects(self): Category.objects.all().delete() problem = Category(id=200, name=Category.PROBLEM) problem.save() idea = Category(id=201, name=Category.IDEA) idea.save() tip = Category(id=202, name=Category.TIP) tip.save() self.typeMap = {"problem": problem, "idee": idea, "tipp": tip}
def get_queryset(self): query_params = self.request.query_params # TODO: Security Check the filterstrings max_requests = query_params.get('max_requests', None) # TODO: What is CitySDK default limit? also_archived = query_params.get('also_archived', 'false') start_date = query_params.get('start_date', None) end_date = query_params.get('end_date', None) lat = query_params.get('lat', None) long = query_params.get('long', None) radius = query_params.get('radius', None) keywords = query_params.get('keyword', None) with_picture = query_params.get('with_picture', None) queryStatusCitySDK = query_params.get('detailed_status', None) # TODO: params just_count # (updated_after, updated_before) # (agency_responsible) if also_archived.lower() == 'true': # TODO: Adapt to new depublish flag? queryset_list = Issue.objects.all().order_by('-created_at') else: queryset_list = Issue.objects.filter(published=True).order_by('-created_at') if start_date and end_date: queryset_list = queryset_list.filter(created_at__range=[start_date, end_date]) else: if start_date: queryset_list = queryset_list.filter(created_at__gte=start_date) if end_date: queryset_list = queryset_list.filter(created_at__lte=end_date) if keywords: # Limit by type (old Klarschiff uses keywords list) keywords=keywords.lower() if 'idee' in keywords: catidee = Category.get_ideas_root() queryset_list = queryset_list.filter(category__in=catidee.get_descendants()) if 'problem' in keywords: catproblem = Category.get_problem_root() queryset_list = queryset_list.filter(category__in=catproblem.get_descendants()) if queryStatusCitySDK: # Limit by status list queryStatus = [] statusMap = {'RECEIVED': StatusTypes.SUBMITTED, 'IN_PROCESS': StatusTypes.WIP, 'PROCESSED':StatusTypes.SOLVED, 'REJECTED':StatusTypes.IMPOSSIBLE, 'closed': StatusTypes.DUBLICATE} for x in queryStatusCitySDK.split(','): queryStatus.append(statusMap[x]) # Review is mapped as IN_PROCESS as well if StatusTypes.WIP in queryStatus: queryStatus.append(StatusTypes.REVIEW) queryset_list = queryset_list.filter(status__in = queryStatus) if with_picture: # Limit if photo present if with_picture.lower() == 'true': queryset_list = queryset_list.exclude(photo__exact='') if lat and long and radius: # Limit by surrounding geo bbox pnt = GEOSGeometry('POINT({} {})'.format(lat, long), srid=4326) print(pnt) # TODO: Make sure internal CRS is projected -> metrical # TODO: Limiting size for requests ? queryset_list = queryset_list.filter(position__distance_lte=(pnt, D(m=float(radius)))) if max_requests: # Limit by amount queryset_list = queryset_list[:int(max_requests)] return queryset_list