Beispiel #1
0
 def __init__(self, url, username, password):
     self.crawler = NetunoCrawler(url)
     self.lock = threading.Lock()
     self.username = username
     self.password = password
     self.already_done = []
     self.is_running = False
     self.logfile = None
Beispiel #2
0
    def test_login(self):
        crawler = NetunoCrawler(ROOT_URL)
        self.assertFalse(crawler.logged_in)

        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.logout()
        self.assertFalse(crawler.logged_in)
        with self.assertRaises(AuthenticationException):
            crawler.login(username='******', password='******')
        self.assertFalse(crawler.logged_in)

        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)
Beispiel #3
0
    def test_select_projects(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_projects()
        projects = get_projects(crawler.content)
        self.assertEquals(104, len(projects))

        project = projects[0]
        self.assertEquals(project['name'], "Feedbackme")
        self.assertEquals(project['original_id'], 116)
        self.assertEquals(project['company_id'], 1)
        self.assertEquals(project['description'], "")

        project = projects[1]
        self.assertEquals(project['name'], "SixPro")
        self.assertEquals(project['original_id'], 122)
        self.assertEquals(project['company_id'], 38)
        self.assertEquals(project['description'], "")

        project = projects[-1]
        self.assertEquals(project['name'], "Liferay")
        self.assertEquals(project['original_id'], 158)
        self.assertEquals(project['company_id'], 48)
        self.assertEquals(project['description'], "")
Beispiel #4
0
    def test_select_user_from_companies(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_companies()
        companies = get_companies(crawler.content)
        self.assertEquals(len(companies), 43)

        company = companies[0]
        crawler.go_to_users_from_company(company['original_id'])
        users = get_users(crawler.content)
        self.assertEquals(len(users), 1)

        user = users[0]
        self.assertEquals(user['original_id'], 20)
        self.assertEquals(user['username'], 'lamatuzzi')
        self.assertEquals(user['first_name'], 'Luciano')
        self.assertEquals(user['last_name'], 'Teixeira')
        self.assertEquals(user['middle_name'], 'Amatuzzi')

        # SEA
        company = companies[26]
        crawler.go_to_users_from_company(company['original_id'])
        users = get_users(crawler.content)
        self.assertEquals(len(users), 60)

        user = users[0]
        self.assertEquals(user['original_id'], 13)
        self.assertEquals(user['username'], 'mabreu')
        self.assertEquals(user['first_name'], 'Marianne')
        self.assertEquals(user['last_name'], 'Abreu')
        self.assertEquals(user['middle_name'], '')

        user = users[59]
        self.assertEquals(user['original_id'], 6)
        self.assertEquals(user['username'], 'msousa')
        self.assertEquals(user['first_name'], 'Marcelo')
        self.assertEquals(user['last_name'], 'Zouza')
        self.assertEquals(user['middle_name'], '')
Beispiel #5
0
    def test_register_new_log(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_task_log_registration(2376)
        date = datetime(2011, 12, 31)
        crawler.register_log(date, 2.5,
                             '14h - 15h30: Making exporter for Netuno')
        self.assertTrue('Task id: 2376' in crawler.content)
        self.assertTrue('Log creator: 1' in crawler.content)
        self.assertTrue('Date: 20111231' in crawler.content)
        self.assertTrue('Worked hours: 2.5' in crawler.content)
        self.assertTrue('Description: 14h - 15h30: Making exporter for Netuno'
                        in crawler.content)
Beispiel #6
0
 def export_logs(self, wps, url, username, password):
     crawler = NetunoCrawler(url)
     if not crawler.logged_in:
         crawler.login(username, password)
     exportables = (wp 
             for wp in wps 
             if not ExportedLog.is_exported(wp)
                 and wp.is_complete())
     for wp in exportables:
         task_id = wp.executed_task.id
         entity = ImportedEntity.objects.get(category='T', new_id=task_id)
         original_task_id  = entity.original_id
         crawler.go_to_task_log_registration(original_task_id)
         crawler.register_log(wp.end, wp.total_time, get_exported_description(wp))
         log = ExportedLog(working_period=wp)
         log.save()
Beispiel #7
0
    def test_access_task_new_log(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_task_log_registration(2376)
        soup = BeautifulSoup(crawler.content)
        form = soup.find('form', {'name': 'editFrm'})
        self.assertIsNotNone(form)
        date = form.find('input', {'name': 'task_log_date'})
        self.assertIsNotNone(date)
        hours = form.find('input', {'name': 'task_log_hours'})
        self.assertIsNotNone(hours)
        description = form.find('textarea', {'name': 'task_log_description'})
        self.assertIsNotNone(description)
Beispiel #8
0
    def test_select_tasks(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_tasks()
        tasks = get_list_of_partial_tasks(crawler.content)
        self.assertEquals(5, len(tasks))

        task = tasks[0]
        self.assertEqual(task['type'], 'partial')
        self.assertEqual(task['original_id'], 2376)

        task = tasks[-1]
        self.assertEqual(task['type'], 'partial')
        self.assertEqual(task['original_id'], 1371)
Beispiel #9
0
    def test_select_companies(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_companies()
        companies = get_companies(crawler.content)
        self.assertEquals(len(companies), 43)

        company = companies[0]
        self.assertEquals(company['name'], "Ambar Tec")
        self.assertEquals(company['original_id'], 4)
        self.assertEquals(company['description'], "")

        company = companies[1]
        self.assertEquals(company['name'], "Anprotec")
        self.assertEquals(company['original_id'], 16)

        company = companies[-1]
        self.assertEquals(company['name'], "Zilics")
        self.assertEquals(company['original_id'], 26)
Beispiel #10
0
import os.path
import sys

from importer.crawler import NetunoCrawler
from importer.parser import get_companies, get_list_of_partial_tasks, get_task

crawler = NetunoCrawler('https://www.seatecnologia.com.br/netuno')
crawler.login(username=sys.argv[2], password=sys.argv[3])
base_url = sys.argv[1]
company_url = base_url + '?m=companies&a=view&company_id=%s'
users_url = base_url + '?m=companies&a=view&company_id=%s&tab=3'
task_url = base_url + '?m=tasks&a=view&task_id=%s'
'''crawler.go_to_all_companies()

companies = get_companies(crawler.content)
for count, company in enumerate(companies):
    id = company['original_id']
    filename = 'netunomock/html/company%s.html' % id
    print "Importing company #%d of #%d (%s) to file %s" % (count, len(companies), company['name'], filename)
    response = crawler.browser.open(company_url%id)
    doc = file(filename, 'w')
    doc.write(response.read())
    doc.close()

    filename = 'netunomock/html/company%s-users.html' % id
    print "Importing company #%d - users" % count
    response = crawler.browser.open(users_url%id)
    doc = file(filename, 'w')
    doc.write(response.read())
    doc.close()'''
Beispiel #11
0
class Importer(object):

    def __init__(self, url, username, password):
        self.crawler = NetunoCrawler(url)
        self.lock = threading.Lock()
        self.username = username
        self.password = password
        self.already_done = []
        self.is_running = False
        self.logfile = None

    @open_logfile
    def import_all(self, url=None, username=None, password=None):
        t = datetime.now().strftime("%Y%m%d%H%M%S")
        if self.is_running: return
        with self.lock:
            self.is_running = True
            if url: self.crawler.url = url
            if username: self.username = username
            if password: self.password = password
            self.already_done = []
            try:
                self.import_organizations()
                self.import_projects()
                self.import_employees()
                self.import_tasks()
            finally:
                self.is_running = False

    def sign_in(self):
        self.crawler.login(self.username, self.password)

    @open_logfile
    def import_organizations(self): 
        self.sign_in()
        self.crawler.go_to_all_companies()
        companies = get_companies(self.crawler.content)
        ImportedEntity.import_companies_as_organizations(companies)
        self.already_done.append(ORGANIZATIONS)
        self.logfile.write('Organizations imported')

    @open_logfile        
    def import_employees(self):
        self.sign_in()
        companies = ImportedEntity.objects.filter(category='C')
        for company in companies:
            self.crawler.go_to_users_from_company(company.original_id)
            users = get_users(self.crawler.content)
            ImportedEntity.import_users_as_employees(users)
        self.already_done.append(EMPLOYEES)
        self.logfile.write('Employees imported')

    @open_logfile
    def import_projects(self):
        self.sign_in()
        self.crawler.go_to_all_projects()
        projects = get_projects(self.crawler.content)
        ImportedEntity.import_projects(projects)
        self.already_done.append(PROJECTS)
        self.logfile.write('Projects imported')

    @open_logfile
    def import_tasks(self, task_ids=None, parent=None):
        if task_ids is None:
            self.sign_in()
            self.crawler.go_to_all_tasks()
            partial_task_ids = [
                    task['original_id'] 
                    for task in get_list_of_partial_tasks(self.crawler.content)
            ]
        else:
            partial_task_ids = task_ids
        for task_id in partial_task_ids:
            self.logfile.write('Importing task (id=%d)\n' % task_id)
            try:
                self.crawler.go_to_task(task_id)
                task_dict = get_task(self.crawler.content)
                ImportedEntity.import_task(task_dict, parent)
                entity = ImportedEntity.objects.get(
                    original_id=task_dict['original_id'], category='T')
                task = Task.objects.get(id=entity.new_id)
                if task_dict['type'] == 'parent':
                    self.import_tasks(task_dict['subtasks_ids'], parent=task)
            except Exception as e:
                self.logfile.write('Task (id=%d) not imported because of error:\n' % task_id)
                traceback.print_exc(file=self.logfile)
                self.logfile.write('URL: ' + self.crawler.browser.geturl())
                self.logfile.write('Content: ' + self.crawler.content)
            else:
                self.logfile.write('Task (id=%d) and subtasks imported\n' % task_id)
        if task_ids is None:
            self.already_done.append(TASKS)
Beispiel #12
0
    def test_select_specific_task(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_tasks()
        tasks = get_list_of_partial_tasks(crawler.content)
        self.assertEquals(5, len(tasks))

        partial_task = tasks[0]
        crawler.go_to_task(partial_task['original_id'])
        task = get_task(crawler.content)
        self.assertEqual(task['type'], u'leaf')
        self.assertEqual(task['original_id'], 2376)
        self.assertEqual(task['project_id'], 118)
        self.assertEqual(task['name'], u'Reuniões')

        partial_task = tasks[2]
        crawler.go_to_task(partial_task['original_id'])
        task = get_task(crawler.content)
        self.assertEqual(task['type'], u'parent')
        self.assertEqual(task['original_id'], 2207)
        self.assertEqual(task['project_id'], 108)
        self.assertEqual(task['name'], u'1o Sprint')

        partial_task = tasks[-1]
        crawler.go_to_task(partial_task['original_id'])
        task = get_task(crawler.content)
        self.assertEqual(task['type'], u'leaf')
        self.assertEqual(task['original_id'], 1371)
        self.assertEqual(task['project_id'], 77)
        self.assertEqual(task['name'], u'Desenvolvimento')