示例#1
0
    async def test_execute__no_retry(self) -> None:
        """Should not retry failed tasks"""
        queue = AsyncTaskQueue(self.logger, retry_failures=False)
        m_callable_fail_succeed = AsyncMethodMock(
            side_effect=[m_Exception("This is a failed call"), "success"])
        m_callable_succeed_fail = AsyncMethodMock(
            side_effect=["success",
                         m_Exception("This is a failed call")])

        queue.enqueue([
            AsyncTask(m_callable,
                      "some arg"),  # Task should execute successfully
            AsyncTask(
                m_callable_fail_succeed),  # Task should fail with no retry
            AsyncTask(m_callable, "some arg", fail=True),  # Task should fail
            AsyncTask(
                m_callable_succeed_fail),  # Sanity check, task should succeed
        ])

        await queue.execute()
        self.assertEqual(
            len(queue.task_queue),
            0,
            msg=
            "Expected task queue to be empty when all tasks have been executed",
        )
        self.assertEqual(len(queue.retry_task_queue),
                         0,
                         msg="Expected there to be no tasks to retry")
        self.assertEqual(len(queue.failed_tasks),
                         2,
                         msg="Expected 2 tasks to have failed")
        self.assertEqual(len(queue.succeeded_tasks),
                         2,
                         msg="Expected 3 tasks to have succeeded")
示例#2
0
    async def async_handle(self, *args, **kwargs):
        self.djwc = apps.apps.get_app_config('djwc')
        self.modules = dict()

        logger = logging.getLogger("foo")
        self.task_queue = AsyncTaskQueue(logger,
                                         batch_size=12,
                                         execution_timeout=300)
        self.task_queue.enqueue([
            AsyncTask(self.script_install, script)
            for t in self.djwc.components.values() for d in t
            for script in d.values()
        ])
        print(f'Ensuring all dependencies extracted in {self.djwc.static} ...')
        await self.task_queue.execute()

        self.patches = []
        self.task_queue = AsyncTaskQueue(logger,
                                         batch_size=60,
                                         execution_timeout=300)
        self.task_queue.enqueue([
            AsyncTask(self.script_patch, script)
            for t in self.djwc.components.values() for d in t
            for script in d.values()
        ])
        print(f'Ensuring all scripts have patched imports ...')
        await self.task_queue.execute()
示例#3
0
    async def test_execute__failed_tasks(self) -> None:
        """Should fail to complete all tasks"""
        queue = AsyncTaskQueue(self.logger)
        queue.enqueue([
            AsyncTask(m_callable, "some arg", fail=True),
            AsyncTask(m_callable, "another arg", fail=True),
        ])
        await queue.execute()

        self.assertEqual(
            len(queue.task_queue),
            0,
            msg=
            "Expected task queue to be empty when all tasks have been executed",
        )
        self.assertEqual(
            len(queue.retry_task_queue),
            0,
            msg="Expected there to be no more retry tasks",
        )
        self.assertEqual(len(queue.failed_tasks),
                         2,
                         msg="Expected all tasks to have failed")
        self.assertEqual(
            len(queue.succeeded_tasks),
            0,
            msg="Expected no tasks to have been executed successfully",
        )
示例#4
0
    async def test_execute__successful_tasks__batch_size_gt_num_tasks(
            self) -> None:
        """Should successfully execute all tasks"""
        queue = AsyncTaskQueue(self.logger, batch_size=10)
        queue.enqueue([
            AsyncTask(m_callable, "some arg", kwarg="some kwarg"),
            AsyncTask(m_callable, "another arg", kwarg="another kwarg"),
        ])
        await queue.execute()

        self.assertEqual(
            len(queue.task_queue),
            0,
            msg=
            "Expected task queue to be empty when all tasks have been executed",
        )
        self.assertEqual(len(queue.retry_task_queue),
                         0,
                         msg="Expected there to be no retry tasks")
        self.assertEqual(len(queue.failed_tasks),
                         0,
                         msg="Expected there to be no failed tasks")
        self.assertEqual(
            len(queue.succeeded_tasks),
            2,
            msg="Expected all tasks to have been executed successfully",
        )
示例#5
0
 async def test_enqueue(self) -> None:
     """Should be able to enqueue multiple tasks and maintain order"""
     queue = AsyncTaskQueue(self.logger)
     queue.enqueue([AsyncTask(m_callable, 0), AsyncTask(m_callable, 1)])
     queue.enqueue([AsyncTask(m_callable, 2), AsyncTask(m_callable, 3)])
     self.assertEqual(len(queue.task_queue),
                      4,
                      msg="Expected to 4 tasks to be enqueued")
     self.assertTrue(
         queue.task_queue[0]._args == (3, )
         and queue.task_queue[1]._args == (2, )
         and queue.task_queue[2]._args == (1, )
         and queue.task_queue[3]._args == (0, ),
         msg="Expected task enqueued to be in FIFO order",
     )
示例#6
0
    async def test_execute__use_concurrency_control(self) -> None:
        """Shouldn't break anything"""
        queue = AsyncTaskQueue(self.logger, batch_size=2, use_semaphore=True)
        m_callable_fail_succeed = AsyncMethodMock(
            side_effect=[m_Exception("This is a failed call"), "success"])

        queue.enqueue([
            AsyncTask(m_callable,
                      "some arg"),  # Task should execute successfully
            AsyncTask(m_callable_fail_succeed
                      ),  # Task should first fail then succeed on retry
            AsyncTask(m_callable, "some arg", fail=True),  # Task should fail
        ])

        await queue.execute()

        self.assertTrue(
            isinstance(queue._semaphore, asyncio.BoundedSemaphore),
            msg="Expected semaphore to have been initiated",
        )
        self.assertEqual(
            len(queue.task_queue),
            0,
            msg=
            "Expected task queue to be empty when all tasks have been executed",
        )
        self.assertEqual(
            len(queue.retry_task_queue),
            0,
            msg="Expected there to be no more tasks to retry",
        )
        self.assertEqual(
            len(queue.failed_tasks),
            1,
            msg="Expected 1 task to have failed after retrying",
        )
        self.assertEqual(
            len(queue.succeeded_tasks),
            2,
            msg="Expected 2 tasks to have succeeded after retrying",
        )
示例#7
0
    async def test_execute__retry(self) -> None:
        """Should retry and update queues"""
        queue = AsyncTaskQueue(self.logger)
        m_callable_fail_succeed = AsyncMethodMock(
            side_effect=[m_Exception("This is a failed call"), "success"])
        m_callable_succeed_fail = AsyncMethodMock(
            side_effect=["success",
                         m_Exception("This is a failed call")])

        queue.enqueue([
            AsyncTask(m_callable,
                      "some arg"),  # Task should execute successfully
            AsyncTask(m_callable_fail_succeed
                      ),  # Task should first fail then succeed on retry
            AsyncTask(m_callable, "some arg", fail=True),  # Task should fail
            AsyncTask(
                m_callable_succeed_fail),  # Sanity check, task should succeed
        ])

        await queue.execute()
        self.assertEqual(
            len(queue.task_queue),
            0,
            msg=
            "Expected task queue to be empty when all tasks have been executed",
        )
        self.assertEqual(
            len(queue.retry_task_queue),
            0,
            msg="Expected there to be no more retry tasks",
        )
        self.assertEqual(
            len(queue.failed_tasks),
            1,
            msg="Expected only 1 task to have failed after retrying",
        )
        self.assertEqual(
            len(queue.succeeded_tasks),
            3,
            msg="Expected 3 tasks to have succeeded after retrying",
        )
示例#8
0
    async def test_execute__timeout(self) -> None:
        """Should not error out on timing out

        1 task executes due to behavior of asyncio.wait_for. See note in AsyncTaskQueue.execute
        for more details.
        """
        queue = AsyncTaskQueue(self.logger, execution_timeout=0.5)

        async def m_callable_with_sleep(arg, fail=False):
            sleep(1)
            if fail:
                raise m_Exception()
            return "success"

        queue.enqueue([
            AsyncTask(m_callable_with_sleep, 1),
            AsyncTask(m_callable_with_sleep, 2, fail=True),
        ])
        await queue.execute()

        self.assertEqual(len(queue.task_queue),
                         0,
                         msg="Expected task queue to be empty")
        self.assertEqual(len(queue.retry_task_queue),
                         0,
                         msg="Expected there to be no retry tasks")
        self.assertEqual(len(queue.failed_tasks),
                         0,
                         msg="Expected there to be no failed tasks")
        self.assertEqual(
            len(queue.succeeded_tasks),
            1,
            msg="Expected only 1 task to have been executed successfully",
        )
        self.assertEqual(
            queue.succeeded_tasks[0]._args,
            (1, ),
            msg="Expected first task enqueued to have been executed",
        )
示例#9
0
    async def test_dequeue(self) -> None:
        """Should be able to dequeue multiple tasks and maintain order"""
        queue = AsyncTaskQueue(self.logger)
        queue.enqueue([
            AsyncTask(m_callable, 0),
            AsyncTask(m_callable, 1),
            AsyncTask(m_callable, 2),
            AsyncTask(m_callable, 3),
        ])

        tasks = queue.dequeue(2)
        self.assertEqual(len(tasks),
                         2,
                         msg="Expected 2 tasks to be returned by dequeuing")
        self.assertTrue(
            tasks[0]._args == (1, ) and tasks[1]._args == (0, ),
            msg="Expected tasks dequeued to be in FIFO order",
        )

        self.assertEqual(
            len(queue.task_queue),
            2,
            msg="Expected 2 tasks to be in task queue after dequeuing 2",
        )
        self.assertTrue(
            queue.task_queue[0]._args == (3, )
            and queue.task_queue[1]._args == (2, ),
            msg="Expected task queue to be in FIFO order",
        )

        # Check that trying to dequeue more tasks than exists doesn't break anything
        tasks = queue.dequeue(3)
        self.assertEqual(
            len(tasks),
            2,
            msg="Expected remaining tasks to be returned by dequeuing")
        self.assertEqual(
            len(queue.task_queue),
            0,
            msg="Expected task queue to be empty after dequeuing all tasks",
        )
示例#10
0
class Command(BaseCommand):
    help = 'Download registered webcomponents'

    def handle(self, *args, **kwargs):
        asyncio.run(self.async_handle(*args, **kwargs))

    async def async_handle(self, *args, **kwargs):
        self.djwc = apps.apps.get_app_config('djwc')
        self.modules = dict()

        logger = logging.getLogger("foo")
        self.task_queue = AsyncTaskQueue(logger,
                                         batch_size=12,
                                         execution_timeout=300)
        self.task_queue.enqueue([
            AsyncTask(self.script_install, script)
            for t in self.djwc.components.values() for d in t
            for script in d.values()
        ])
        print(f'Ensuring all dependencies extracted in {self.djwc.static} ...')
        await self.task_queue.execute()

        self.patches = []
        self.task_queue = AsyncTaskQueue(logger,
                                         batch_size=60,
                                         execution_timeout=300)
        self.task_queue.enqueue([
            AsyncTask(self.script_patch, script)
            for t in self.djwc.components.values() for d in t
            for script in d.values()
        ])
        print(f'Ensuring all scripts have patched imports ...')
        await self.task_queue.execute()

    def resolve(self, target, parent=None):
        print('RESOLVE ', target, parent)
        if str(target).startswith('.'):
            # resolve based on parent module
            return self.resolve(
                (self.djwc.static / parent / '..' / target).resolve())

        target = self.djwc.static / target

        if target.is_dir():
            # resolve directory from package.json
            pkg = target / 'package.json'
            if pkg.exists():
                with open(pkg, 'r') as f:
                    pkg = json.loads(f.read())
                if 'module' in pkg:
                    filename = pkg['module']
                elif 'main' in pkg:
                    filename = pkg['main']
                else:
                    filename = 'index.js'
            return target / filename

        elif not str(target).endswith('.js'):
            # append .js on filenames
            js = Path(str(target) + '.js')
            if js.exists:
                return js

        return target

    async def script_patch(self, script, parent=None):
        if script in self.patches:
            return
        self.patches.append(script)
        target = self.resolve(script, parent)
        print('MODULE ', script)
        print('PATCH ', target)

        with open(target, 'r') as f:
            contents = f.read()

        results = {
            i[2]: i[1]
            for i in re.findall(
                r"""(import|from)\s(['"])([^'"]*)['"]""",
                contents,
            )
        }
        for dependency, quote in results.items():
            if dependency.startswith(settings.STATIC_URL):
                dependency = dependency.replace(
                    f'{settings.STATIC_URL}djwc/',
                    '',
                )
                self.task_queue.enqueue(
                    [AsyncTask(
                        self.script_patch,
                        dependency,
                        script,
                    )])
                continue

            new_path = self.resolve(dependency, script)
            new_imp = str(new_path)[len(str(self.djwc.static) + '/'):]
            new_url = f'{settings.STATIC_URL}djwc/{new_imp}'
            print('DEPENDENCY ' + dependency)
            print('PATH ' + str(new_path))
            print('IMP ' + new_imp)
            print('URL ' + new_url)
            if script.startswith('.'):
                breakpoint()
                new_path = self.resolve(dependency, script)
            contents = contents.replace(
                quote + dependency + quote,
                quote + new_url + quote,
            )

            self.task_queue.enqueue(
                [AsyncTask(
                    self.script_patch,
                    new_imp,
                    script,
                )])

        with open(target, 'w') as f:
            f.write(contents)

    async def script_install(self, name):
        parts = name.split('/')
        if name.endswith('.js'):
            parts = parts[:-1]

        tests = []
        async with httpx.AsyncClient() as client:
            while parts:
                tests.append(
                    client.get(
                        f'https://registry.npmjs.org/{"/".join(parts)}/'))
                parts.pop()
        results = await asyncio.gather(*tests)
        for result in results:
            if result.status_code == 200:
                break
        if not result.status_code:
            print('Could not figure module for ' + name)
            return

        module = result.json()
        if 'name' not in module:
            import sys
            print('NPM module not found', name)
            sys.exit(1)
        if module['name'] in self.modules:
            return
        self.modules[module['name']] = module

        latest = module['dist-tags']['latest']
        url = module['versions'][latest]['dist']['tarball']

        target = self.djwc.static / module['name']
        if not target.exists():
            print(name + ' installing ...')
            os.makedirs(target)
            temp = self.djwc.static / url.split('/')[-1]
            cmd = ' && '.join([
                shlex.join(['cd', str(target)]),
                shlex.join(['wget', url]),
                shlex.join(['tar', 'xvzf',
                            url.split('/')[-1], '--strip=1']),
                shlex.join(['rm', '-rf', url.split('/')[-1]]),
            ])
            proc = await asyncio.create_subprocess_shell(
                cmd,
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE,
            )
            stdout, stderr = await proc.communicate()
            if proc.returncode == 0:
                print(f'{name} extract success !')
            else:
                print(f'[{cmd!r} exited with {proc.returncode}]')

        with open(os.path.join(target, 'package.json'), 'r') as f:
            package = f.read()
        package = json.loads(package)

        self.task_queue.enqueue([
            AsyncTask(self.script_install, script)
            for script, version in package.get('dependencies', {}).items()
        ])
示例#11
0
 async def test_execute__no_tasks(self) -> None:
     """Should not error out"""
     queue = AsyncTaskQueue(self.logger)
     await queue.execute()