Beispiel #1
0
    def test_select_companies(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_companies()
        companies = get_companies(crawler.content)
        self.assertEquals(len(companies), 43)

        company = companies[0]
        self.assertEquals(company['name'], "Ambar Tec")
        self.assertEquals(company['original_id'], 4)
        self.assertEquals(company['description'], "")

        company = companies[1]
        self.assertEquals(company['name'], "Anprotec")
        self.assertEquals(company['original_id'], 16)

        company = companies[-1]
        self.assertEquals(company['name'], "Zilics")
        self.assertEquals(company['original_id'], 26)
Beispiel #2
0
    def test_select_user_from_companies(self):
        crawler = NetunoCrawler(ROOT_URL)
        crawler.login(username='******', password='******')
        self.assertTrue(crawler.logged_in)

        crawler.go_to_all_companies()
        companies = get_companies(crawler.content)
        self.assertEquals(len(companies), 43)

        company = companies[0]
        crawler.go_to_users_from_company(company['original_id'])
        users = get_users(crawler.content)
        self.assertEquals(len(users), 1)

        user = users[0]
        self.assertEquals(user['original_id'], 20)
        self.assertEquals(user['username'], 'lamatuzzi')
        self.assertEquals(user['first_name'], 'Luciano')
        self.assertEquals(user['last_name'], 'Teixeira')
        self.assertEquals(user['middle_name'], 'Amatuzzi')

        # SEA
        company = companies[26]
        crawler.go_to_users_from_company(company['original_id'])
        users = get_users(crawler.content)
        self.assertEquals(len(users), 60)

        user = users[0]
        self.assertEquals(user['original_id'], 13)
        self.assertEquals(user['username'], 'mabreu')
        self.assertEquals(user['first_name'], 'Marianne')
        self.assertEquals(user['last_name'], 'Abreu')
        self.assertEquals(user['middle_name'], '')

        user = users[59]
        self.assertEquals(user['original_id'], 6)
        self.assertEquals(user['username'], 'msousa')
        self.assertEquals(user['first_name'], 'Marcelo')
        self.assertEquals(user['last_name'], 'Zouza')
        self.assertEquals(user['middle_name'], '')
Beispiel #3
0
class Importer(object):

    def __init__(self, url, username, password):
        self.crawler = NetunoCrawler(url)
        self.lock = threading.Lock()
        self.username = username
        self.password = password
        self.already_done = []
        self.is_running = False
        self.logfile = None

    @open_logfile
    def import_all(self, url=None, username=None, password=None):
        t = datetime.now().strftime("%Y%m%d%H%M%S")
        if self.is_running: return
        with self.lock:
            self.is_running = True
            if url: self.crawler.url = url
            if username: self.username = username
            if password: self.password = password
            self.already_done = []
            try:
                self.import_organizations()
                self.import_projects()
                self.import_employees()
                self.import_tasks()
            finally:
                self.is_running = False

    def sign_in(self):
        self.crawler.login(self.username, self.password)

    @open_logfile
    def import_organizations(self): 
        self.sign_in()
        self.crawler.go_to_all_companies()
        companies = get_companies(self.crawler.content)
        ImportedEntity.import_companies_as_organizations(companies)
        self.already_done.append(ORGANIZATIONS)
        self.logfile.write('Organizations imported')

    @open_logfile        
    def import_employees(self):
        self.sign_in()
        companies = ImportedEntity.objects.filter(category='C')
        for company in companies:
            self.crawler.go_to_users_from_company(company.original_id)
            users = get_users(self.crawler.content)
            ImportedEntity.import_users_as_employees(users)
        self.already_done.append(EMPLOYEES)
        self.logfile.write('Employees imported')

    @open_logfile
    def import_projects(self):
        self.sign_in()
        self.crawler.go_to_all_projects()
        projects = get_projects(self.crawler.content)
        ImportedEntity.import_projects(projects)
        self.already_done.append(PROJECTS)
        self.logfile.write('Projects imported')

    @open_logfile
    def import_tasks(self, task_ids=None, parent=None):
        if task_ids is None:
            self.sign_in()
            self.crawler.go_to_all_tasks()
            partial_task_ids = [
                    task['original_id'] 
                    for task in get_list_of_partial_tasks(self.crawler.content)
            ]
        else:
            partial_task_ids = task_ids
        for task_id in partial_task_ids:
            self.logfile.write('Importing task (id=%d)\n' % task_id)
            try:
                self.crawler.go_to_task(task_id)
                task_dict = get_task(self.crawler.content)
                ImportedEntity.import_task(task_dict, parent)
                entity = ImportedEntity.objects.get(
                    original_id=task_dict['original_id'], category='T')
                task = Task.objects.get(id=entity.new_id)
                if task_dict['type'] == 'parent':
                    self.import_tasks(task_dict['subtasks_ids'], parent=task)
            except Exception as e:
                self.logfile.write('Task (id=%d) not imported because of error:\n' % task_id)
                traceback.print_exc(file=self.logfile)
                self.logfile.write('URL: ' + self.crawler.browser.geturl())
                self.logfile.write('Content: ' + self.crawler.content)
            else:
                self.logfile.write('Task (id=%d) and subtasks imported\n' % task_id)
        if task_ids is None:
            self.already_done.append(TASKS)