def test_select_specific_task(self): crawler = NetunoCrawler(ROOT_URL) crawler.login(username='******', password='******') self.assertTrue(crawler.logged_in) crawler.go_to_all_tasks() tasks = get_list_of_partial_tasks(crawler.content) self.assertEquals(5, len(tasks)) partial_task = tasks[0] crawler.go_to_task(partial_task['original_id']) task = get_task(crawler.content) self.assertEqual(task['type'], u'leaf') self.assertEqual(task['original_id'], 2376) self.assertEqual(task['project_id'], 118) self.assertEqual(task['name'], u'Reuniões') partial_task = tasks[2] crawler.go_to_task(partial_task['original_id']) task = get_task(crawler.content) self.assertEqual(task['type'], u'parent') self.assertEqual(task['original_id'], 2207) self.assertEqual(task['project_id'], 108) self.assertEqual(task['name'], u'1o Sprint') partial_task = tasks[-1] crawler.go_to_task(partial_task['original_id']) task = get_task(crawler.content) self.assertEqual(task['type'], u'leaf') self.assertEqual(task['original_id'], 1371) self.assertEqual(task['project_id'], 77) self.assertEqual(task['name'], u'Desenvolvimento')
def test_get_task_2056(self): task_page = self.get_sample_content('task2056.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'leaf') self.assertEqual(task['name'], u'Suporte APL Maio') self.assertEqual(task['original_id'], 2056)
def import_tasks(self, task_ids=None, parent=None): if task_ids is None: self.sign_in() self.crawler.go_to_all_tasks() partial_task_ids = [ task['original_id'] for task in get_list_of_partial_tasks(self.crawler.content) ] else: partial_task_ids = task_ids for task_id in partial_task_ids: self.logfile.write('Importing task (id=%d)\n' % task_id) try: self.crawler.go_to_task(task_id) task_dict = get_task(self.crawler.content) ImportedEntity.import_task(task_dict, parent) entity = ImportedEntity.objects.get( original_id=task_dict['original_id'], category='T') task = Task.objects.get(id=entity.new_id) if task_dict['type'] == 'parent': self.import_tasks(task_dict['subtasks_ids'], parent=task) except Exception as e: self.logfile.write('Task (id=%d) not imported because of error:\n' % task_id) traceback.print_exc(file=self.logfile) self.logfile.write('URL: ' + self.crawler.browser.geturl()) self.logfile.write('Content: ' + self.crawler.content) else: self.logfile.write('Task (id=%d) and subtasks imported\n' % task_id) if task_ids is None: self.already_done.append(TASKS)
def test_get_leaf_task(self): task_page = self.get_sample_content('task.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'leaf') self.assertEqual(task['name'], u'OS02 - Arquitetura da Informação e Identidade Visual') self.assertEqual(task['original_id'], 2832) self.assertEqual(task['project_id'], 151) self.assertEqual(task['description'], '')
def test_get_subtask(self): task_page = self.get_sample_content('subtask.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'leaf') self.assertEqual(task['name'], u'Listagem das Missões') self.assertEqual(task['original_id'], 2208) self.assertEqual(task['project_id'], 108) self.assertEqual(task['description'], '')
def test_get_empty_parent_task(self): task_page = self.get_sample_content('supertask-none.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'parent') self.assertEqual(task['name'], u'OS Intranet SETIN') self.assertEqual(task['original_id'], 2604) self.assertEqual(task['project_id'], 120) self.assertEqual(task['description'], '') self.assertItemsEqual(task['subtasks_ids'], [])
def test_get_parent_task(self): task_page = self.get_sample_content('supertask.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'parent') self.assertEqual(task['name'], u'1o Sprint') self.assertEqual(task['original_id'], 2207) self.assertEqual(task['project_id'], 108) self.assertEqual(task['description'], '') self.assertItemsEqual(task['subtasks_ids'], [2208, 2209, 2210])
def test_get_task_with_users(self): task_page = self.get_sample_content('task.html'); task = parser.get_task(task_page) self.assertEqual(task['type'], 'leaf') self.assertEqual(task['name'], u'OS02 - Arquitetura da Informação e Identidade Visual') self.assertEqual(task['original_id'], 2832) self.assertEqual(task['project_id'], 151) self.assertEqual(task['description'], '') self.assertItemsEqual(task['incubent'], 'vinicius.botelho') self.assertItemsEqual(task['users'], [ (u'Bruna Sodré', u'*****@*****.**'), (u'Jhony Buril Cardozo de Oliveira', u'*****@*****.**'), (u'Leonardo Antonialli', u'*****@*****.**'), (u'Wesley Rocha', u'*****@*****.**') ])
for count, task in enumerate(tasks): id = task['original_id'] filename = 'netunomock/html/task%s.html' % id if not os.path.exists(filename): print "Importing task #%d of #%d to file %s" % (count + 1, len(tasks), filename) response = crawler.browser.open(task_url % id) content = response.read() doc = file('tmp', 'w') doc.write(content) doc.close() #task = get_task(content) if 'Tarefas Filho' in content: # SGHOUD GO TO TAB! response = crawler.browser.follow_link(text='Tarefas Filho') content = response.read() task = get_task(content) for count, subid in enumerate(task['subtasks_ids']): subfilename = 'netunomock/html/task%s.html' % subid print "Importing subtask #%d of #%d to file %s" % ( count + 1, len(task['subtasks_ids']), subfilename) response = crawler.browser.open(task_url % subid) doc = file(subfilename, 'w') doc.write(response.read()) doc.close() doc = file(filename, 'w') doc.write(content) doc.close() else: print 'Already imported task #%s (%s)' % (count + 1, id)