Exemple #1
0
def import_workflow(request):
    if request.method == "POST":
        form = ImportForm(request.POST)
        if form.is_valid():
            new_workflow = Workflow()
            new_workflow.user = request.user
            new_workflow.import_from_json(json.loads(form.cleaned_data['data']),{},{})
            return redirect(new_workflow.get_absolute_url())
    else:
        form = ImportForm()
    return render(request,'website/import_workflow.html',{"form":form})
    def import_workflows(self, galaxy_url):
        if galaxy_url:
            galaxy_server, created = Server.objects.get_or_create(
                url=galaxy_url)
        else:
            try:
                galaxy_server = Server.objects.get(current=True)
            except Server.DoesNotExist:
                raise CommandError(
                    'Server Galaxy does not exist, please use --galaxyurl')

        galaxy_key = GalaxyUser.objects.filter(
            galaxy_server__url=galaxy_url,
            anonymous=True)
        print(galaxy_key)
        workflows_url = '%s/%s/%s/?key=%s' % (
            galaxy_server.url,
            'api',
            'workflows',
            galaxy_key.first().api_key)

        # fetch list of tools
        connection = requests.get(workflows_url)
        print(workflows_url)
        print(connection.status_code)
        if connection.status_code == 200:
            wf_list = connection.json() or []
            for wf in wf_list:
                wfname = wf.get('name')
                wfid = wf.get('id')
                self.stdout.write(
                    self.style.SUCCESS(
                        "importing workflow %s" % (wfname)
                    )
                )
                if(re.search('oneclick', wfname, re.IGNORECASE) or
                   wfname in self.wfnames):
                    w = Workflow(
                        galaxy_server=galaxy_server,
                        id_galaxy=wfid,
                        name=wfname,
                        category='base',
                        description=wfname,
                        slug=slugify(wfname))
                    w.save()
        else:
            self.stdout.write("Problem while querying galaxy server")
    def get_object(self, queryset=None, detail=True):
        # load workflow
        wk_json = self.request.galaxy.workflows.show_workflow(
            workflow_id=self.kwargs['id'])
        wkname = wk_json.get('name')

        # create workflow
        wk_obj = Workflow(galaxy_server=self.request.galaxy_server,
                          id_galaxy=self.kwargs['id'],
                          name=wkname,
                          category='automaker',
                          description="Auto Generated Workflow",
                          slug=wkname)

        # add galaxy json information
        wk_obj.json = wk_json
        return wk_obj
Exemple #4
0
def view(view_type):
    view_options_form = ViewOptionsForm(request.form)
    google_earth_form = GoogleEarthForm(request.form)
    scheduling_form = SchedulingForm(request.form)
    scheduling_form.scripts.choices = Script.choices()
    scheduling_form.workflows.choices = Workflow.choices()
    labels = {'node': 'name', 'link': 'name'}
    if 'script' in request.form:
        data = dict(request.form)
        selection = map(int, session['selection'])
        scripts = request.form.getlist('scripts')
        workflows = request.form.getlist('workflows')
        data['scripts'] = [get_obj(Script, name=name) for name in scripts]
        data['workflows'] = [
            get_obj(Workflow, name=name) for name in workflows
        ]
        data['nodes'] = [get_obj(Node, id=id) for id in selection]
        data['user'] = current_user
        task = Task(**data)
        db.session.add(task)
        db.session.commit()
        return redirect(url_for('tasks_blueprint.task_management'))
    elif 'view_options' in request.form:
        # update labels
        labels = {
            'node': request.form['node_label'],
            'link': request.form['link_label']
        }
    # for the sake of better performances, the view defaults to markercluster
    # if there are more than 2000 nodes
    view = 'leaflet' if len(Node.query.all()) < 2000 else 'markercluster'
    if 'view' in request.form:
        view = request.form['view']
    # we clean the session's selected nodes
    session['selection'] = []
    return render_template(
        '{}_view.html'.format(view_type),
        filters=Filter.query.all(),
        view=view,
        scheduling_form=scheduling_form,
        view_options_form=view_options_form,
        google_earth_form=google_earth_form,
        labels=labels,
        names=pretty_names,
        subtypes=node_subtypes,
        node_table={
            obj:
            OrderedDict([(property, getattr(obj, property))
                         for property in type_to_public_properties[obj.type]])
            for obj in Node.query.all()
        },
        link_table={
            obj:
            OrderedDict([(property, getattr(obj, property))
                         for property in type_to_public_properties[obj.type]])
            for obj in Link.query.all()
        })
Exemple #5
0
def task_management():
    scheduling_form = SchedulingForm(request.form)
    scheduling_form.scripts.choices = Script.choices()
    scheduling_form.workflows.choices = Workflow.choices()
    tasks = Task.query.all()
    return render_template('task_management.html',
                           tasks=tasks,
                           compare_form=CompareForm(request.form),
                           scheduling_form=scheduling_form)
    def import_workflows(self, galaxy_url):
        if galaxy_url:
            galaxy_server, created = Server.objects.get_or_create(
                url=galaxy_url)
        else:
            try:
                galaxy_server = Server.objects.get(current=True)
            except Server.DoesNotExist:
                raise CommandError(
                    'Server Galaxy does not exist, please use --galaxyurl')

        galaxy_key = GalaxyUser.objects.filter(galaxy_server__url=galaxy_url,
                                               anonymous=True)
        print(galaxy_key)
        workflows_url = '%s/%s/%s/?key=%s' % (
            galaxy_server.url, 'api', 'workflows', galaxy_key.first().api_key)

        # fetch list of tools
        connection = requests.get(workflows_url)
        print(workflows_url)
        print(connection.status_code)
        if connection.status_code == 200:
            wf_list = connection.json() or []
            for wf in wf_list:
                wfname = wf.get('name')
                wfid = wf.get('id')
                self.stdout.write(
                    self.style.SUCCESS("importing workflow %s" % (wfname)))
                if (re.search('oneclick', wfname, re.IGNORECASE)
                        or wfname in self.wfnames):
                    try:
                        w = Workflow.objects.get(id_galaxy=wfid)
                        self.stdout.write("Workflow %s already present" %
                                          wfname)
                    except Workflow.DoesNotExist:
                        w = Workflow(galaxy_server=galaxy_server,
                                     id_galaxy=wfid,
                                     name=wfname,
                                     category='base',
                                     description=wfname,
                                     slug=slugify(wfname))
                        w.save()
        else:
            self.stdout.write("Problem while querying galaxy server")
Exemple #7
0
def view(view_type):
    add_node_form = AddNode(request.form)
    add_link_form = AddLink(request.form)
    all_nodes = Node.choices()
    add_link_form.source.choices = add_link_form.destination.choices = all_nodes
    view_options_form = ViewOptionsForm(request.form)
    google_earth_form = GoogleEarthForm(request.form)
    scheduling_form = SchedulingForm(request.form)
    scheduling_form.scripts.choices = Script.choices()
    scheduling_form.workflows.choices = Workflow.choices()
    labels = {'node': 'name', 'link': 'name'}
    if 'view_options' in request.form:
        # update labels
        labels = {
            'node': request.form['node_label'],
            'link': request.form['link_label']
        }
    # for the sake of better performances, the view defaults to markercluster
    # if there are more than 2000 nodes
    view = 'leaflet' if len(Node.query.all()) < 2000 else 'markercluster'
    if 'view' in request.form:
        view = request.form['view']
    # we clean the session's selected nodes
    session['selection'] = []
    # name to id
    name_to_id = {node.name: id for id, node in enumerate(Node.query.all())}
    return render_template(
        '{}_view.html'.format(view_type),
        filters=Filter.query.all(),
        view=view,
        scheduling_form=scheduling_form,
        view_options_form=view_options_form,
        google_earth_form=google_earth_form,
        add_node_form=add_node_form,
        add_link_form=add_link_form,
        labels=labels,
        names=pretty_names,
        subtypes=node_subtypes,
        name_to_id=name_to_id,
        node_table={
            obj: OrderedDict([
                (property, getattr(obj, property))
                for property in type_to_public_properties[obj.type]
            ])
            for obj in Node.query.all()
        },
        link_table={
            obj: OrderedDict([
                (property, getattr(obj, property))
                for property in type_to_public_properties[obj.type]
            ])
            for obj in Link.query.all()
        })
Exemple #8
0
    def setUp(self):
        fone = Field(name='Test Field One', data_type='INTEGER')
        fone.save()
        ftwo = Field(name='Test Field Two', data_type='INTEGER')
        ftwo.save()

        s = Status(name='Blah')
        s.save()

        wone = Workflow(name='Test Flow One', create_status=s)
        wone.save()
        wtwo = Workflow(name='Test Flow Two', create_status=s)
        wtwo.save()

        u = User.objects.create_user('fakelead', '*****@*****.**', 'fake')
        u.save()

        self.project = Project(key='SCHEME',
                               name='Test Scheme Project',
                               lead=u)
        self.project.save()

        self.ttypeone = TicketType(name='Test Type One')
        self.ttypeone.save()
        self.ttypetwo = TicketType(name='Test Type One')
        self.ttypetwo.save()

        self.default_wkflow_scheme = WorkflowScheme(workflow=wone,
                                                    project=self.project)
        self.default_wkflow_scheme.save()

        self.ttypeone_wkflow_scheme = WorkflowScheme(workflow=wtwo,
                                                     project=self.project,
                                                     ticket_type=self.ttypeone)
        self.ttypeone_wkflow_scheme.save()

        self.default_field_scheme = FieldScheme(name='Default Field Scheme',
                                                project=self.project)
        self.default_field_scheme.save()

        FieldSchemeField(field=fone, scheme=self.default_field_scheme).save()

        self.ttypeone_field_scheme = FieldScheme(name='Ttypeone Field Scheme',
                                                 project=self.project,
                                                 ticket_type=self.ttypeone)
        self.ttypeone_field_scheme.save()

        FieldSchemeField(field=fone, scheme=self.ttypeone_field_scheme).save()
        FieldSchemeField(field=ftwo, scheme=self.ttypeone_field_scheme).save()
Exemple #9
0
def calendar():
    scheduling_form = SchedulingForm(request.form)
    scheduling_form.scripts.choices = Script.choices()
    scheduling_form.workflows.choices = Workflow.choices()
    tasks = {}
    for task in Task.query.all():
        # javascript dates range from 0 to 11, we must account for that by
        # substracting 1 to the month for the date to be properly displayed in
        # the calendar
        python_month = search(r'.*-(\d{2})-.*', task.start_date).group(1)
        month = '{:02}'.format((int(python_month) - 1) % 12)
        tasks[task] = sub(r"(\d+)-(\d+)-(\d+) (\d+):(\d+).*",
                          r"\1, " + month + r", \3, \4, \5", task.start_date)
    return render_template('calendar.html',
                           tasks=tasks,
                           scheduling_form=scheduling_form)
Exemple #10
0
def import_workflow(request):
    if request.method == "POST":
        form = ImportForm(request.POST)
        if form.is_valid():
            new_workflow = Workflow()
            new_workflow.user = request.user
            new_workflow.import_from_json(
                json.loads(form.cleaned_data['data']), {}, {})
            return redirect(new_workflow.get_absolute_url())
    else:
        form = ImportForm()
    return render(request, 'website/import_workflow.html', {"form": form})
Exemple #11
0
def view(view_type):
    add_node_form = AddNode(request.form)
    add_link_form = AddLink(request.form)
    view_options_form = ViewOptionsForm(request.form)
    google_earth_form = GoogleEarthForm(request.form)
    scheduling_form = SchedulingForm(request.form)
    scheduling_form.scripts.choices = Script.choices()
    scheduling_form.workflows.choices = Workflow.choices()
    labels = {'node': 'name', 'link': 'name'}
    if 'script' in request.form:
        data = dict(request.form)
        selection = map(int, session['selection'])
        scripts = request.form.getlist('scripts')
        workflows = request.form.getlist('workflows')
        data['scripts'] = [get_obj(Script, name=name) for name in scripts]
        data['workflows'] = [
            get_obj(Workflow, name=name) for name in workflows
        ]
        data['nodes'] = [get_obj(Node, id=id) for id in selection]
        data['user'] = current_user
        task = Task(**data)
        db.session.add(task)
        db.session.commit()
        return redirect(url_for('tasks_blueprint.task_management'))
    elif 'view_options' in request.form:
        # update labels
        labels = {
            'node': request.form['node_label'],
            'link': request.form['link_label']
        }
    elif 'google earth' in request.form:
        kml_file = Kml()
        for node in Node.query.all():
            point = kml_file.newpoint(name=node.name)
            point.coords = [(node.longitude, node.latitude)]
            point.style = styles[node.subtype]
            point.style.labelstyle.scale = request.form['label_size']

        for link in Link.query.all():
            line = kml_file.newlinestring(name=link.name)
            line.coords = [(link.source.longitude, link.source.latitude),
                           (link.destination.longitude,
                            link.destination.latitude)]
            line.style = styles[link.type]
            line.style.linestyle.width = request.form['line_width']
        filepath = join(current_app.ge_path, request.form['name'] + '.kmz')
        kml_file.save(filepath)
    # for the sake of better performances, the view defaults to markercluster
    # if there are more than 2000 nodes
    view = 'leaflet' if len(Node.query.all()) < 2000 else 'markercluster'
    if 'view' in request.form:
        view = request.form['view']
    # we clean the session's selected nodes
    session['selection'] = []
    # name to id
    name_to_id = {node.name: id for id, node in enumerate(Node.query.all())}
    return render_template(
        '{}_view.html'.format(view_type),
        filters=Filter.query.all(),
        view=view,
        scheduling_form=scheduling_form,
        view_options_form=view_options_form,
        google_earth_form=google_earth_form,
        add_node_form=add_node_form,
        add_link_form=add_link_form,
        labels=labels,
        names=pretty_names,
        subtypes=node_subtypes,
        name_to_id=name_to_id,
        node_table={
            obj:
            OrderedDict([(property, getattr(obj, property))
                         for property in type_to_public_properties[obj.type]])
            for obj in Node.query.all()
        },
        link_table={
            obj:
            OrderedDict([(property, getattr(obj, property))
                         for property in type_to_public_properties[obj.type]])
            for obj in Link.query.all()
        })
Exemple #12
0
def update_workflows_from_request_manager():
    updated_wfs = []

    logger.debug(
        'Preparing to fetch preps from request manager url {} and cert at {}'.
        format(settings.REQUEST_MANAGER_API_URL, settings.CERT_PATH))

    try:
        wfs = WorkflowToUpdate.objects \
                  .filter(Q(updated__exists=False) |
                          Q(updated__lte=datetime.utcnow() - timedelta(hours=settings.WORKFLOWS_UPDATE_TIMEOUT))) \
                  .order_by('updated')[:settings.WORKFLOWS_UPDATE_LIMIT]
        wfs_count = len(wfs)
        logger.info('Workflows to update {}'.format(wfs_count))

        if wfs_count == 0:
            return

        preps_data_response = requests \
            .get(settings.REQUEST_MANAGER_API_URL + '?mask=PrepID&mask=RequestName&status=completed',
                 verify=settings.CA_PATH,
                 cert=(settings.CERT_PATH, settings.CERT_KEY_PATH,))
        preps_data = preps_data_response.json()['result'][0]

        # logger.debug('Received JSON response from request manager {0}'.format(preps_data))

        for wf in wfs:
            logger.debug('Updating data for workflow: {} {}'.format(
                wf.name, wf.updated))

            if wf.name not in preps_data:
                logger.debug('Workflow {} prep not found. Skipping...'.format(
                    wf.name))
                continue

            # mark workflow as updated
            wf.update(updated=datetime.utcnow())

            wf_preps = preps_data[wf.name]['PrepID']
            wf_preps = wf_preps if isinstance(wf_preps, list) else [wf_preps]
            logger.debug('Processing workflow {}'.format(wf.name))

            for prep_id in wf_preps:
                if prep_id == '666':
                    continue

                logger.debug('Fetch server stats details for:')
                logger.debug('Workflow {}'.format(wf.name))
                logger.debug('PrepID {}'.format(prep_id))

                statuses_data_response = requests \
                    .get(settings.SERVER_STATS_API_URL,
                         params={'PrepID': prep_id},
                         verify=settings.CA_PATH,
                         cert=(settings.CERT_PATH, settings.CERT_KEY_PATH,))
                statuses_data = statuses_data_response.json()['result']

                # logger.debug('Received JSON response from server stats {0}'.format(statuses_data))

                for stat in statuses_data:
                    if 'AgentJobInfo' not in stat:
                        continue

                    prep = Prep(
                        name=prep_id,
                        campaign=stat.get('Campaign'),
                        priority=stat.get('RequestPriority'),
                        cpus=stat.get('Multicore'),
                        memory=stat.get('Memory'),
                    )

                    for job, job_data in stat['AgentJobInfo'].items():
                        if 'tasks' not in job_data:
                            continue

                        job_wf = job_data['workflow']

                        logger.debug('Job {}'.format(job))
                        logger.debug('Workflow {}'.format(job_wf))

                        workflow = Workflow(name=stat['RequestName'])

                        try:
                            update_workflow_tasks(prep, workflow, job_data)
                        except Exception as e:
                            logger.error(
                                'Exception raised when updating Task {} statuses: {}'
                                .format(workflow.name, e))

                        updated_wfs.append(job_wf)

    except Exception as e:
        logger.error('Exception raised: {}'.format(e))

    logger.info('Workflows updated {}'.format(updated_wfs.count()))
Exemple #13
0
def update_workflows_from_request_manager():
    logger.debug(
        'Preparing to fetch preps from request manager url {} and cert at {}'.
        format(settings.REQUEST_MANAGER_API_URL, settings.CERT_PATH))

    try:
        wfs = WorkflowToUpdate.objects \
                  .filter(Q(updated__exists=False) |
                          Q(updated__lte=datetime.utcnow() - timedelta(hours=settings.WORKFLOWS_UPDATE_TIMEOUT))) \
                  .order_by('updated')[:settings.WORKFLOWS_UPDATE_LIMIT]
        wfs_count = len(wfs)
        logger.info('Workflows to update {}'.format(wfs_count))

        if wfs_count == 0:
            return

        preps_data_response = requests \
            .get(settings.REQUEST_MANAGER_API_URL + '?mask=PrepID&mask=RequestName&status=completed',
                 verify=settings.CA_PATH,
                 cert=(settings.CERT_PATH, settings.CERT_KEY_PATH,))
        preps_data = preps_data_response.json()['result'][0]

        # TODO old data cleanup here

        updated_preps = []

        for wf in wfs:
            logger.debug('Updating data for unified workflow: {} {}'.format(
                wf.name, wf.updated))

            if wf.name not in preps_data:
                logger.debug('Workflow {} prep not found. Skipping...'.format(
                    wf.name))
                # TODO: delete wf from unified_workflows, cleanup current preps if they are not in this list, remove tasks - function
                continue

            # mark workflow as updated
            wf.update(updated=datetime.utcnow())

            prep_ids = preps_data[wf.name]['PrepID']
            prep_ids = prep_ids if isinstance(prep_ids, list) else [prep_ids]
            logger.debug('Processing workflow {}'.format(wf.name))

            for prep_id in prep_ids:
                if prep_id == '666':
                    continue

                logger.debug(
                    'Fetch wmstats details for PrepID: {}'.format(prep_id))

                statuses_data_response = requests \
                    .get(settings.SERVER_STATS_API_URL,
                         params={'PrepID': prep_id},
                         verify=settings.CA_PATH,
                         cert=(settings.CERT_PATH, settings.CERT_KEY_PATH,))
                statuses_data = statuses_data_response.json()['result']

                campaign = ''
                priority = 0
                workflows = []

                for stat in statuses_data:
                    logger.debug('In statuses loop: {}'.format(prep_id))
                    if not campaign:
                        campaign = stat.get('Campaign')
                        priority = stat.get('RequestPriority')

                    if 'AgentJobInfo' not in stat:
                        continue

                    task_prep = TaskPrep(
                        name=prep_id,
                        campaign=campaign,
                        priority=priority,
                    )

                    workflow_name = stat['RequestName']
                    parent_workflow = stat.get('OriginalRequestName', '')

                    logger.debug('Workflow {}, Parent workflow {}'.format(
                        workflow_name, parent_workflow))

                    tasks = []

                    for job, job_data in stat['AgentJobInfo'].items():
                        if 'tasks' not in job_data:
                            continue

                        logger.debug('Job {}'.format(job))

                        try:
                            tasks.extend(
                                update_workflow_tasks(
                                    task_prep, workflow_name, parent_workflow,
                                    job_data['tasks'].items()))

                        except Exception as e:
                            logger.error(
                                'Exception raised when updating Workflow {} tasks: {}'
                                .format(workflow_name, e))

                    workflows.append(
                        Workflow(name=workflow_name,
                                 parent_workflow=parent_workflow,
                                 tasks=tasks))

                Prep(name=prep_id,
                     campaign=campaign,
                     priority=priority,
                     updated=datetime.utcnow(),
                     workflows=workflows).save()

                updated_preps.append(prep_id)

        logger.info('Workflows updated {}'.format(len(updated_preps)))

    except Exception as e:
        logger.error('Exception raised: {}'.format(e))
Exemple #14
0
    def handle(self, *args, **kwargs):
        try:
            admin = User.objects.create_superuser('testadmin',
                                                  '*****@*****.**',
                                                  'test',
                                                  first_name='Test',
                                                  last_name='Testerson II')
            user = User.objects.create_user('testuser',
                                            '*****@*****.**',
                                            'test',
                                            first_name='Test',
                                            last_name='Testerson')

            admin.save()
            user.save()

            users = [admin, user]

            p = Project(name='Dummy Project', lead=admin, key='TEST')

            p.save()

            high = FieldOption(name='High')
            medium = FieldOption(name='Medium')
            low = FieldOption(name='Low')

            high.save()
            medium.save()
            low.save()

            priorities = [high, medium, low]

            story_points = Field(name='Story Points', data_type='INTEGER')
            priority = Field(name='Priority', data_type='OPTION')
            priority.save()
            priority.options.set(priorities)

            story_points.save()
            priority.save()

            backlog = Status(name='Backlog', state='TODO')
            in_progress = Status(name='In Progress', state='IN_PROGRESS')
            done = Status(name='Done', state='DONE')

            backlog.save()
            in_progress.save()
            done.save()

            w = Workflow(name='Default Workflow', create_status=backlog)

            w.save()

            create = Transition(name='Backlog', to_status=backlog, workflow=w)
            to_prog = Transition(name='In Progress',
                                 to_status=in_progress,
                                 workflow=w)
            to_done = Transition(name='Done', to_status=done, workflow=w)

            to_done.save()
            to_prog.save()
            create.save()

            bug = TicketType(name='Bug')
            epic = TicketType(name='Epic')
            feature = TicketType(name='Feature')

            bug.save()
            epic.save()
            feature.save()

            ticket_types = [bug, feature, epic]
            p.ticket_types.add(bug)
            p.ticket_types.add(feature)
            p.ticket_types.add(epic)
            p.save()

            fs = FieldScheme(name='Bug Field Scheme',
                             project=p,
                             ticket_type=bug)
            fs.save()

            FieldSchemeField(field=priority, scheme=fs).save()

            fs = FieldScheme(name='Epic Field Scheme',
                             project=p,
                             ticket_type=epic)
            fs.save()

            FieldSchemeField(field=priority, scheme=fs).save()

            fs = FieldScheme(name='Feature Field Scheme',
                             project=p,
                             ticket_type=feature)
            fs.save()

            FieldSchemeField(field=priority, scheme=fs).save()
            FieldSchemeField(field=story_points, scheme=fs).save()

            ws = WorkflowScheme(project=p, ticket_type=None, workflow=w)
            ws.save()

            for i in range(100):
                t = Ticket(key=p.key + '-' + str(i + 1),
                           summary='This is ticket #' + str(i + 1),
                           reporter=users[randint(0, 1)],
                           assignee=users[randint(0, 1)],
                           ticket_type=ticket_types[randint(0, 2)],
                           project=p,
                           status=backlog,
                           workflow=w,
                           description="""
# Utque erant et edentem suoque nox fertur

## Tegi aurum inridet flumine auras natas vulnus

Lorem markdownum misit sudor, sine eodem libratum munus aristis tutos, hac.
Longe mens vultus iurgia Iovem difficilis suus; ut erat mollis robore terga ei
perque! Quae quos sacrorum custodit quaecumque harena fallis et modo hinc
[recessu](http://venerat.com/), superorum Peleus, temptamenta. **Pudore** Sparte
lentisciferumque nataque inpulsos et ille novat haec sollicitare Plura, levis
vellet valuit, summo dum lutea viso. Solebat lintea ingentibus capillis dicta
Paridis seque quoquam [poposcit in](http://per.net/) Tempe vivacem.

1. Nate nulli
2. Coniunx hausi nunc Quirini Othrys
3. Caede nascuntur ubera congreditur vincula ubi regis
4. Spatium pectore amplexus ferunt ille instat cultores

Illo dolores voluit Achaemenias unde theatris paventem secum ineamus minacia
retro. Duplicataque siste suo recessit; opes albus moribunda referentem animam
nulloque domini et laborent hac?

## Senecta finita

Iovi nec aperire mihi mira respondit, qui exit vulnere aeterno dixerunt dat
corpus? Erit terrae, avidas; sola plenum, cauda edax et referre. Quater posuere:
facit mihi primaque remanet parte, eundo.
            """)
                t.save()

                fvs = [
                    FieldValue(field=story_points,
                               int_value=randint(1, 20),
                               content_object=t),
                    FieldValue(field=priority,
                               str_value=priorities[randint(0, 2)].name,
                               content_object=t)
                ]

                for fv in fvs:
                    fv.save()

            t = Ticket.objects.get(key='TEST-1')

            for i in range(25):
                body = """This is the %d th comment

# Yo Dawg

**I** *heard* you

> like markdown

so I put markdown in your comment""" % i
                c = Comment(body=body, author=users[randint(0, 1)], ticket=t)
                c.save()
        except IntegrityError as e:
            print(
                "Database already seeded, run cleandb first to force seeding..."
            )
            print(e)
            pass