예제 #1
0
 def load_django(self):
     # Reverse-engineered from manage() in
     # /var/lib/awx/venv/awx/lib/python3.6/site-packages/awx/__init__.py
     import awx
     import django
     awx.prepare_env()
     django.setup()
예제 #2
0
파일: firehose.py 프로젝트: SahilSyal/awx2
def generate_jobs(jobs, batch_size):
    print(f'inserting {jobs} job(s)')
    sys.path.insert(0, pkg_resources.get_distribution('awx').module_path)
    from awx import prepare_env
    prepare_env()
    setup_django()

    from awx.main.models import UnifiedJob, Job, JobTemplate
    fields = list(set(Job._meta.fields) - set(UnifiedJob._meta.fields))
    job_field_names = set([f.attname for f in fields])
    # extra unified job field names from base class
    for field_name in ('name', 'created_by_id', 'modified_by_id'):
        job_field_names.add(field_name)
    jt_count = JobTemplate.objects.count()

    def make_batch(N, jt_pos=0):
        jt = None
        while not jt:
            try:
                jt = JobTemplate.objects.all()[jt_pos % jt_count]
            except IndexError as e:
                # seems to happen every now and then due to some race condition
                print('Warning: IndexError on {} JT, error: {}'.format(
                    jt_pos % jt_count, e
                ))
            jt_pos += 1
        jt_defaults = dict(
            (f.attname, getattr(jt, f.attname))
            for f in JobTemplate._meta.get_fields()
            if f.concrete and f.attname in job_field_names and getattr(jt, f.attname)
        )
        jt_defaults['job_template_id'] = jt.pk
        jt_defaults['unified_job_template_id'] = jt.pk  # populated by save method

        jobs = [
            Job(
                status=STATUS_OPTIONS[i % len(STATUS_OPTIONS)],
                started=now(), created=now(), modified=now(), finished=now(),
                elapsed=0., **jt_defaults)
            for i in range(N)
        ]
        ujs = UnifiedJob.objects.bulk_create(jobs)
        query = InsertQuery(Job)
        query.insert_values(fields, ujs)
        with connection.cursor() as cursor:
            query, params = query.sql_with_params()[0]
            cursor.execute(query, params)
        return ujs[-1], jt_pos

    i = 1
    jt_pos = 0
    s = time()
    while jobs > 0:
        s_loop = time()
        print('running batch {}, runtime {}'.format(i, time() - s))
        created, jt_pos = make_batch(min(jobs, batch_size), jt_pos)
        print('took {}'.format(time() - s_loop))
        i += 1
        jobs -= batch_size
    return created
예제 #3
0
def generate_jobs(jobs):
    print(f'inserting {jobs} job(s)')
    sys.path.insert(0, pkg_resources.get_distribution('awx').module_path)
    from awx import prepare_env
    prepare_env()
    setup_django()

    from awx.main.models import UnifiedJob, Job, JobTemplate
    fields = list(set(Job._meta.fields) - set(UnifiedJob._meta.fields))
    job_field_names = set([f.attname for f in fields])
    jt = JobTemplate.objects.first()
    jt_defaults = dict((f.attname, getattr(jt, f.attname))
                       for f in JobTemplate._meta.get_fields()
                       if f.editable and f.attname in job_field_names
                       and getattr(jt, f.attname))
    jt_defaults['job_template_id'] = jt.pk

    def make_batch(N, **extra):
        jobs = [
            Job(status='canceled',
                created=now(),
                modified=now(),
                elapsed=0.,
                **extra) for i in range(N)
        ]
        ujs = UnifiedJob.objects.bulk_create(jobs)
        query = InsertQuery(Job)
        query.insert_values(fields, ujs)
        with connection.cursor() as cursor:
            query, params = query.sql_with_params()[0]
            cursor.execute(query, params)
        return ujs[-1]

    i = 1
    while jobs > 0:
        s = time()
        print('running batch {}, runtime {}'.format(i, time() - s))
        created = make_batch(min(jobs, 1000), **jt_defaults)
        print('took {}'.format(time() - s))
        i += 1
        jobs -= 1000
    return created
예제 #4
0
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import os
import logging
import django
from awx import __version__ as tower_version

# Prepare the AWX environment.
from awx import prepare_env, MODE
from channels.routing import get_default_application  # noqa

prepare_env()  # NOQA
"""
ASGI config for AWX project.

It exposes the ASGI callable as a module-level variable named ``channel_layer``.

For more information on this file, see
https://channels.readthedocs.io/en/latest/deploying.html
"""

if MODE == 'production':
    logger = logging.getLogger('awx.main.models.jobs')
    try:
        fd = open("/var/lib/awx/.tower_version", "r")
        if fd.read().strip() != tower_version:
            raise ValueError()
    except FileNotFoundError:
        pass
    except ValueError as e:
        logger.error(
예제 #5
0
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.

import logging
from awx import __version__ as tower_version

# Prepare the AWX environment.
from awx import prepare_env, MODE
prepare_env()

import django  # NOQA
from django.conf import settings  # NOQA
from django.urls import resolve  # NOQA
from django.core.wsgi import get_wsgi_application  # NOQA
import social_django  # NOQA
"""
WSGI config for AWX project.

It exposes the WSGI callable as a module-level variable named ``application``.

For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""

if MODE == 'production':
    logger = logging.getLogger('awx.main.models.jobs')
    try:
        fd = open("/var/lib/awx/.tower_version", "r")
        if fd.read().strip() != tower_version:
            raise Exception()
    except Exception:
예제 #6
0
import kombu
import six
import uuid
import msgpack
import socket
import threading
import datetime
import jsonpickle

from asgiref.base_layer import BaseChannelLayer
from collections import deque
from kombu.pools import producers

import awx
awx.prepare_env()

from awx.main.models import ChannelGroup


class AMQPChannelLayer(BaseChannelLayer):
    def __init__(self,
                 url=None,
                 prefix='asgi:',
                 expiry=60,
                 group_expiry=86400,
                 capacity=100,
                 channel_capacity=None):
        super(AMQPChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,