Esempio n. 1
0
def ws_error_task(self, number):
    progress_recorder = WebSocketProgressRecorder(self)
    for i in range(number):
        time.sleep(.1)
        progress_recorder.set_progress(i + 1, number)
        if i == int(number / 2):
            raise StopIteration('We broke it!')
    return random() * 1000
Esempio n. 2
0
def product_upload_task(self, file):
    progress_recorder = WebSocketProgressRecorder(self)
    file = default_storage.open(file, 'r').read()
    csv_reader = csv.reader(file.strip().split('\n'))
    data = list(csv_reader)
    cols, data = data[0], data[1:]
    total_count = len(data)
    result = 0
    try:
        for rows in chunked(data, MAX_DB_INSERTIONS):
            add_or_update_bulk_product(rows, cols)
            result += len(rows)
            progress_recorder.set_progress(result, total_count)
    except Exception as e:
        logging.error(f"Error while running the task {e}")
    return result
Esempio n. 3
0
def create_pages(self, data: dict, uuid: str):
    """
    Take formatted JSON log data and save it to a log.
    :param self: Task instance, supplied by Celery.
    :param data: Formatted data.
    :param uuid: Log uuid.
    :return: Log uuid.
    """
    progress = WebSocketProgressRecorder(self)

    messages = data.pop('messages')

    batch_list = []
    batches = range(0, len(messages), 1000)
    total = len(batches)
    for count, batch in enumerate(batches):
        progress.set_progress(count, total)
        # [[0, 1, 2...], [1000, 1001, 1002...], [2000, 2001, 2002...]...]
        batch_list.append(messages[batch:batch +
                                   1000])  # Split messages by the 1000

    while not Log.objects.filter(uuid=uuid).exists():
        time.sleep(1)
    log = Log.objects.update_or_create(uuid=uuid,
                                       defaults={'users':
                                                 data.pop('users')})[0]
    pages = Page.objects.bulk_create([
        Page(log=log, messages=batch_list[i], index=i)
        for i in range(len(batch_list))
    ])
    log.pages.set(pages)
    progress.set_progress(total, total)

    return uuid
Esempio n. 4
0
def parse_text(self, log_type: str, content: str):
    """
    Convert raw log content into usable data.
    :param self: Task instance, supplied by Celery.
    :param log_type: Log type.
    :param content: Log content.
    :return: Parsed data.
    """
    if '\r' in content:
        content = re.sub('\r\n', '\n', content)
    if log_type in rowboat_types:
        log_type = 'rowboat'
    parser = getattr(handlers, log_type)
    message_array = parser(content, WebSocketProgressRecorder(self))
    if not message_array:
        raise IndexError('No messages match this pattern!')
    return message_array
Esempio n. 5
0
def parse_json(self, json_data: dict):
    """
    Convert raw JSON into finished message objects.
    :param self: Task instance, supplied by Celery.
    :param json_data: Raw JSON.
    :return: Parsed data.
    """
    messages = list()
    bad_messages = list()
    data = dict()

    users = [dict(t) for t in {tuple(d['author'].items()) for d in json_data}]

    total = len(json_data)
    progress = WebSocketProgressRecorder(self)

    for count, msg in enumerate(json_data):
        msg = MessageSerializer(data=msg, context={'users': users})
        if msg.is_valid():
            messages.append(msg.data)
        else:
            bad_messages.append((msg.initial_data, msg.errors))

        progress.set_progress(count, total)

    def sort_chronological(value):
        return int(value.get('id') or 0) or value.get('timestamp')

    if any([messages[0].get('timestamp'), messages[0].get('id')]):
        messages.sort(key=sort_chronological)
    data['messages'] = messages

    def sort_alphabetical(value):
        return value['username']

    users.sort(key=sort_alphabetical)
    data['users'] = users

    progress.set_progress(total, total)

    return data
Esempio n. 6
0
def parse_json(self, json_data: dict):
    """
    Convert raw JSON into finished message objects.
    :param self: Task instance, supplied by Celery.
    :param json_data: Raw JSON.
    :return: Parsed data.
    """
    messages = []
    bad_messages = []
    data = {}

    _users = {
        a['id'] or f'{a["username"]}#{a["discriminator"]}': a
        for a in [msg.get('author', unknown_author) for msg in json_data]
    }

    total = len(json_data)
    progress = WebSocketProgressRecorder(self)

    for count, msg in enumerate(json_data):
        msg = MessageSerializer(data=msg, context={'users': _users})
        if msg.is_valid():
            messages.append(msg.data)
        else:
            bad_messages.append((msg.initial_data, msg.errors))

        progress.set_progress(count, total)

    if any([messages[0].get('timestamp'), messages[0].get('id')]):
        messages.sort(key=lambda value: int(value.get('id') or 0) or value.get(
            'timestamp'))
    data['messages'] = messages

    users = list(_users.values())
    users.sort(key=lambda value: value['username'])
    data['users'] = users

    progress.set_progress(total, total)

    return data
Esempio n. 7
0
def ws_task(self, *args, number):
    progress_recorder = WebSocketProgressRecorder(self)
    for i in range(number):
        time.sleep(.1)
        progress_recorder.set_progress(i + 1, number)
    return int(random() * 10)
Esempio n. 8
0
def task_postrun_handler(task_id, **kwargs):
    """Runs after a task has finished. This will be used to push a websocket update for completed events.

    If the websockets version of this package is not installed, this will do nothing."""
    if WEBSOCKETS_AVAILABLE:
        WebSocketProgressRecorder.push_update(task_id)