コード例 #1
0
ファイル: views.py プロジェクト: jcgrenier/TS
def config_publishers(request, ctx):
    globalconfig = GlobalConfig.get()
    # Rescan Publishers
    publishers.purge_publishers()
    publishers.search_for_publishers(globalconfig)
    pubs = Publisher.objects.all().order_by('name')
    ctx.update({"publishers": pubs})
コード例 #2
0
def send_deep_laser_metrics(event_type, event_data, schema=1):
    if schema == 1:
        payload = {
            "request": "senddata",
            "appName": "Torrent Suite",
            "appVersion": TS_version,
            "datatype": "metric",
            "eventId": event_type,
            "parameters": event_data,
        }
        payload["parameters"]["serverSerial"] = get_servicetag()
        payload["parameters"]["siteName"] = GlobalConfig.get().site_name
        return send_deep_laser_iot_request(payload)
    elif schema == 2:
        raise DeprecationWarning
        # payload = {
        #     "request": "senddata",
        #     "application": "Torrent Suite",
        #     "applicationVersion": TS_version,
        #     "timestamp": int(time.time()),
        #     "datatype": "metric",
        #     "eventId": event_type,
        #     "eventParameters": event_data,
        # }
        # payload["eventParameters"]["serverSerial"] = get_servicetag()
        # payload["eventParameters"]["siteName"] = GlobalConfig.get().site_name
        # return send_deep_laser_iot_request(payload)
    else:
        raise ValueError("send_deep_laser_metrics schema must be 1 or 2")
コード例 #3
0
ファイル: views.py プロジェクト: zjwang6/TS
def monitor(request):
    """This is a the main entry point to the Monitor tab."""
    pageSize = GlobalConfig.get().records_to_display
    resource = MonitorResultResource()
    objects = resource.get_object_list(request)
    paginator = resource._meta.paginator_class(
        request.GET,
        objects,
        resource_uri=resource.get_resource_uri(),
        limit=pageSize,
        max_limit=resource._meta.max_limit,
        collection_name=resource._meta.collection_name,
    )
    to_be_serialized = paginator.page()
    to_be_serialized["objects"] = [
        resource.full_dehydrate(resource.build_bundle(obj=obj,
                                                      request=request))
        for obj in to_be_serialized["objects"]
    ]
    serialized_exps = resource.serialize(None, to_be_serialized,
                                         "application/json")

    context = {"initial_runs": serialized_exps, "pageSize": pageSize}
    return render_to_response("rundb/monitor/monitor.html",
                              context,
                              context_instance=RequestContext(request))
コード例 #4
0
ファイル: views.py プロジェクト: rb94/TS
def monitor(request):
    """This is a the main entry point to the Monitor tab."""
    pageSize = GlobalConfig.get().records_to_display
    resource = MonitorResultResource()
    objects = resource.get_object_list(request)
    paginator = resource._meta.paginator_class(
        request.GET,
        objects,
        resource_uri=resource.get_resource_uri(), 
        limit=pageSize, 
        max_limit=resource._meta.max_limit, 
        collection_name=resource._meta.collection_name
    )
    to_be_serialized = paginator.page()
    to_be_serialized['objects'] = [
        resource.full_dehydrate(resource.build_bundle(obj=obj, request=request)) 
        for obj in to_be_serialized['objects']
    ]
    serialized_exps = resource.serialize(None, to_be_serialized, 'application/json')

    context = {
        'initial_runs': serialized_exps,
        'pageSize' : pageSize
    }
    return render_to_response("rundb/monitor/monitor.html", context,
                              context_instance=RequestContext(request))
コード例 #5
0
ファイル: views.py プロジェクト: jcgrenier/TS
def configure_configure(request):
    ctx = RequestContext(request, {})
    emails = EmailAddress.objects.all().order_by('pk')
    enable_nightly = GlobalConfig.get().enable_nightly_email
    ctx.update({"email": emails, "enable_nightly": enable_nightly})
    config_contacts(request, ctx)
    config_site_name(request, ctx)
    return render_to_response("rundb/configure/configure.html", context_instance=ctx)
コード例 #6
0
ファイル: views.py プロジェクト: Brainiarc7/TS
def config_site_name(request, context):
    """The site name will be automatically loaded on the page, so all we have
    to do here is check whether we should update it, and if so, do so.
    """
    if request.method == "POST" and "site_name" in request.POST:
        config = GlobalConfig.get()
        config.site_name = request.POST["site_name"]
        config.save()
        context.update({"base_site_name": request.POST["site_name"]})
コード例 #7
0
def news(request):
    profile = request.user.userprofile
    ctx = {
        "articles": list(NewsPost.objects.all().order_by("-updated")),
        "last_read": profile.last_read_news_post,
        "is_updating": GlobalConfig.get().check_news_posts,
    }
    profile.last_read_news_post = timezone.now()
    profile.save()
    return render(request, "rundb/home/news.html", ctx)
コード例 #8
0
ファイル: views.py プロジェクト: stevematyas/TS
def news(request):
	profile = request.user.userprofile
	ctx = {
		"articles": list(NewsPost.objects.all().order_by('-updated')),
		"last_read": profile.last_read_news_post,
		"is_updating": GlobalConfig.get().check_news_posts,

	}
	profile.last_read_news_post = timezone.now()
	profile.save()
	return render_to_response("rundb/extra/news.html", ctx, context_instance=RequestContext(request))
コード例 #9
0
ファイル: views.py プロジェクト: iontorrent/TS
def news(request):
    profile = request.user.userprofile
    ctx = {
        "articles": list(NewsPost.objects.all().order_by('-updated')),
        "last_read": profile.last_read_news_post,
        "is_updating": GlobalConfig.get().check_news_posts,

    }
    profile.last_read_news_post = timezone.now()
    profile.save()
    return render(request, "rundb/home/news.html", ctx)
コード例 #10
0
def fileserver_space_check():
    """For each file server, compare disk usage to backup threshold value.
    If disk usage exceeds threshold, launch celery task to delete/archive
    raw data directories.
    python -c "from iondb.bin import djangoinit; from iondb.rundb.data import data_management as dm; dm.fileserver_space_check()"
    """
    logid = {"logid": "%s" % ("DM")}
    try:
        starttime = time.time()
        # Get GlobalConfig object in order to access auto-acknowledge bit
        global_config = GlobalConfig.get()
        auto_acknowledge = global_config.auto_archive_ack
        auto_action_enabled = global_config.auto_archive_enable

        category_list = dm_utils.dm_category_list()

        # -------------------------------------------------------------
        # Action loop - for each category, launch action per deviceid
        # -------------------------------------------------------------
        if not auto_action_enabled:
            logger.info("Data management auto-action is disabled.",
                        extra=logid)

        # update any DMFileStats that are in use by active analysis jobs
        try:
            update_files_in_use()
        except Exception:
            logger.error("Unable to update active DMFileStats", extra=logid)
            logger.error(traceback.format_exc(), extra=logid)

        # Checks for manual export and archive requests.
        manage_manual_action.delay()

        for category, dict in category_list.items():
            for deviceid in dict["devlist"]:
                pathlist = [
                    item["path"] for item in dict["partitions"]
                    if item["devid"] == deviceid
                ]
                manage_data.delay(
                    deviceid,
                    dict["dmfileset"],
                    pathlist,
                    auto_acknowledge,
                    auto_action_enabled,
                )
        endtime = time.time()
        logger.info("Disk Check: %f s" % (endtime - starttime), extra=logid)
    except SoftTimeLimitExceeded:
        logger.error("fileserver_space_check exceeded execution time limit",
                     extra=logid)
    return
コード例 #11
0
def send_email(recipient, subject_line, text, html=None):
    """sends an email to recipients"""
    import socket
    from django.core import mail
    from iondb.rundb.models import GlobalConfig

    if not recipient:
        logger.warning("No email recipient for %s" % subject_line)
        return False
    else:
        recipient = recipient.replace(",", " ").replace(";", " ").split()

    # Needed to send email
    settings.EMAIL_HOST = "localhost"
    settings.EMAIL_PORT = 25
    settings.EMAIL_USE_TLS = False

    site_name = GlobalConfig.get().site_name or "Torrent Server"
    hname = socket.getfqdn()

    message = "From: %s (%s)\n\n" % (site_name, hname)
    message += text
    message += "\n"

    if html:
        html_message = "From: %s (<a href=%s>%s</a>)<br>" % (site_name, hname, hname)
        html_message += html
        html_message += "<br>"
    else:
        html_message = ""

    reply_to = "*****@*****.**"

    # Send the email
    try:
        if html_message:
            sendthis = mail.EmailMultiAlternatives(
                subject_line, message, reply_to, recipient
            )
            sendthis.attach_alternative(html_message, "text/html")
            sendthis.send()
        else:
            mail.send_mail(subject_line, message, reply_to, recipient)
    except:
        logger.error(traceback.format_exc())
        return False
    else:
        logger.info("%s email sent to %s" % (subject_line, recipient))
        return True
コード例 #12
0
ファイル: data_management.py プロジェクト: BENMFeng/TS
def notify(name_list, recipient):
    '''sends an email with list of experiments slated for removal'''

    # Check for blank email
    # TODO: check for valid email address
    if recipient is None or recipient == "":
        return False

    # Needed to send email
    settings.EMAIL_HOST = 'localhost'
    settings.EMAIL_PORT = 25
    settings.EMAIL_USE_TLS = False

    try:
        site_name = GlobalConfig.get().site_name
    except:
        site_name = "Torrent Server"

    hname = socket.gethostname()

    subject_line = 'Torrent Server Data Management Action Request'
    reply_to = '*****@*****.**'
    message = 'From: %s (%s)\n' % (site_name, hname)
    message += '\n'
    message += 'Results drive capacity threshold has been reached.\n'
    message += 'Signal Processing files have been identified for removal.\n'
    message += 'Please go to Services Page and acknowledge so that removal can proceed.\n'
    message += 'Removal will not occur without this acknowledgement.\n'
    message += '\n'
    message += 'The following Reports have Signal Processing files selected for Deletion:'
    message += "\n"
    count = 0
    for e in name_list:
        message += "- %s\n" % e
        count += 1

    # Send the email only if there are runs that have not triggered a notification
    if count > 0:
        try:
            mail.send_mail(subject_line, message, reply_to, recipient)
        except:
            logger.warning(traceback.format_exc(), extra=logid)
            return False
        else:
            logger.info("Notification email sent for user acknowledgement",
                        extra=logid)
            return True
コード例 #13
0
ファイル: utils.py プロジェクト: iontorrent/TS
def send_email(recipient, subject_line, text, html=None):
    '''sends an email to recipients'''
    import socket
    from django.core import mail
    from iondb.rundb.models import GlobalConfig

    if not recipient:
        logger.warning("No email recipient for %s" % subject_line)
        return False
    else:
        recipient = recipient.replace(',', ' ').replace(';', ' ').split()

    #Needed to send email
    settings.EMAIL_HOST = 'localhost'
    settings.EMAIL_PORT = 25
    settings.EMAIL_USE_TLS = False

    site_name = GlobalConfig.get().site_name or 'Torrent Server'
    hname = socket.getfqdn()

    message = 'From: %s (%s)\n\n' % (site_name, hname)
    message += text
    message += '\n'

    if html:
        html_message = 'From: %s (<a href=%s>%s</a>)<br>' % (site_name, hname, hname)
        html_message += html
        html_message += '<br>'
    else:
        html_message = ''

    reply_to = '*****@*****.**'

    # Send the email
    try:
        if html_message:
            sendthis = mail.EmailMultiAlternatives(subject_line, message, reply_to, recipient)
            sendthis.attach_alternative(html_message, "text/html")
            sendthis.send()
        else:
            mail.send_mail(subject_line, message, reply_to, recipient)
    except:
        logger.error(traceback.format_exc())
        return False
    else:
        logger.info("%s email sent to %s" % (subject_line, recipient))
        return True
コード例 #14
0
ファイル: data_management.py プロジェクト: biocyberman/TS
def notify(name_list, recipient):
    '''sends an email with list of experiments slated for removal'''

    # Check for blank email
    # TODO: check for valid email address
    if recipient is None or recipient == "":
        return False

    # Needed to send email
    settings.EMAIL_HOST = 'localhost'
    settings.EMAIL_PORT = 25
    settings.EMAIL_USE_TLS = False

    try:
        site_name = GlobalConfig.get().site_name
    except:
        site_name = "Torrent Server"

    hname = socket.gethostname()

    subject_line = 'Torrent Server Data Management Action Request'
    reply_to = '*****@*****.**'
    message = 'From: %s (%s)\n' % (site_name, hname)
    message += '\n'
    message += 'Results drive capacity threshold has been reached.\n'
    message += 'Signal Processing files have been identified for removal.\n'
    message += 'Please go to Services Page and acknowledge so that removal can proceed.\n'
    message += 'Removal will not occur without this acknowledgement.\n'
    message += '\n'
    message += 'The following Reports have Signal Processing files selected for Deletion:'
    message += "\n"
    count = 0
    for e in name_list:
        message += "- %s\n" % e
        count += 1

    # Send the email only if there are runs that have not triggered a notification
    if count > 0:
        try:
            mail.send_mail(subject_line, message, reply_to, recipient)
        except:
            logger.warning(traceback.format_exc(), extra=logid)
            return False
        else:
            logger.info("Notification email sent for user acknowledgement", extra=logid)
            return True
コード例 #15
0
ファイル: chips.py プロジェクト: zjwang6/TS
def showpage(request):
    """Main Chips Files Display"""
    site_name = GlobalConfig.get().site_name

    # search all File Servers for a "Chips" directory
    fileservers = FileServer.objects.all()
    files = {}
    locList = []
    for server in fileservers:
        directory = os.path.join(server.filesPrefix, "Chips")
        if os.path.isdir(directory):
            files[server.name] = []
            listoffiles = os.listdir(directory)
            listoffiles.sort()
            listoffiles.reverse()
            for filename in listoffiles:
                if fnmatch.fnmatch(filename, "*AutoPH*.bz2"):
                    # instName = string.split(filename, '_')[0]
                    instName = filename.split("_")[0]
                    if not [instName, server.name] in locList:
                        locList.append([instName, server.name])

                    if fnmatch.fnmatch(filename, "*AutoPHFail*"):
                        passVar = "F"
                    elif fnmatch.fnmatch(filename, "*AutoPHPass*"):
                        passVar = "T"

                    files[server.name].append([
                        filename.split(".")[0],
                        instName,
                        os.path.join(directory, filename),
                        passVar,
                    ])

    protonDiags = findProtonDiags(fileservers)

    ctxd = {
        "error_state": 0,
        "locations_list": locList,
        "base_site_name": site_name,
        "files": files,
        "protonDiags": protonDiags,
    }
    ctx = RequestContext(request, ctxd)
    return render_to_response("rundb/configure/ion_chips.html",
                              context_instance=ctx)
コード例 #16
0
ファイル: data_management.py プロジェクト: iontorrent/TS
def fileserver_space_check():
    '''For each file server, compare disk usage to backup threshold value.
    If disk usage exceeds threshold, launch celery task to delete/archive
    raw data directories.
    python -c "from iondb.bin import djangoinit; from iondb.rundb.data import data_management as dm; dm.fileserver_space_check()"
    '''
    logid = {'logid': "%s" % ('DM')}
    try:
        starttime = time.time()
        # Get GlobalConfig object in order to access auto-acknowledge bit
        global_config = GlobalConfig.get()
        auto_acknowledge = global_config.auto_archive_ack
        auto_action_enabled = global_config.auto_archive_enable

        category_list = dm_utils.dm_category_list()

        #-------------------------------------------------------------
        # Action loop - for each category, launch action per deviceid
        #-------------------------------------------------------------
        if not auto_action_enabled:
            logger.info("Data management auto-action is disabled.", extra=logid)

        # update any DMFileStats that are in use by active analysis jobs
        try:
            update_files_in_use()
        except:
            logger.error('Unable to update active DMFileStats', extra=logid)
            logger.error(traceback.format_exc(), extra=logid)

        # Checks for manual export and archive requests.
        manage_manual_action.delay()

        for category, dict in category_list.iteritems():
            for deviceid in dict['devlist']:
                pathlist = [item['path'] for item in dict['partitions'] if item['devid'] == deviceid]
                manage_data.delay(
                    deviceid, dict['dmfileset'], pathlist, auto_acknowledge, auto_action_enabled)
        endtime = time.time()
        logger.info("Disk Check: %f s" % (endtime - starttime), extra=logid)
    except SoftTimeLimitExceeded:
        logger.error("fileserver_space_check exceeded execution time limit", extra=logid)
    return
コード例 #17
0
ファイル: chips.py プロジェクト: biocyberman/TS
def showpage(request):
    '''Main Chips Files Display'''
    site_name = GlobalConfig.get().site_name

    # search all File Servers for a "Chips" directory
    fileservers = FileServer.objects.all()
    files = {}
    locList = []
    for server in fileservers:
        directory = os.path.join(server.filesPrefix, 'Chips')
        if os.path.isdir(directory):
            files[server.name] = []
            listoffiles = os.listdir(directory)
            listoffiles.sort()
            listoffiles.reverse()
            for filename in listoffiles:
                if fnmatch.fnmatch(filename, "*AutoPH*.bz2"):
                    #instName = string.split(filename, '_')[0]
                    instName = filename.split('_')[0]
                    if not [instName, server.name] in locList:
                        locList.append([instName, server.name])

                    if fnmatch.fnmatch(filename, "*AutoPHFail*"):
                        passVar = 'F'
                    elif fnmatch.fnmatch(filename, "*AutoPHPass*"):
                        passVar = 'T'

                    files[server.name].append([filename.split('.')[0], instName, os.path.join(directory, filename), passVar])

    protonDiags = findProtonDiags(fileservers)

    ctxd = {
        "error_state": 0,
        "locations_list": locList,
        "base_site_name": site_name,
        "files": files,
        "protonDiags": protonDiags,
    }
    ctx = RequestContext(request, ctxd)
    return render_to_response("rundb/configure/ion_chips.html", context_instance=ctx)
コード例 #18
0
ファイル: test_manager.py プロジェクト: basmaNasser/TS
 def setUp(self):
     self.tempdir = tempfile.mkdtemp()
     self.assertEqual(1, len(GlobalConfig.objects.all().order_by('pk')))
     self.gc = GlobalConfig.get()
コード例 #19
0
ファイル: test_manager.py プロジェクト: zjwang6/TS
 def setUp(self):
     self.tempdir = tempfile.mkdtemp()
     self.assertEqual(1, len(GlobalConfig.objects.all().order_by("pk")))
     self.gc = GlobalConfig.get()
コード例 #20
0
def dashboard_fragments(request, skip_runs=False):
    """ Returns the dashboard sections as html in a json object"""
    time_span = request.GET.get("time_span", "24hours")
    now = datetime.datetime.now(pytz.UTC)

    DASHBOARD_TIME_SPANS = {
        "hour": now - datetime.timedelta(hours=1),
        "today": now.replace(hour=0, minute=0, second=0, microsecond=0),
        "24hours": now - datetime.timedelta(hours=24),
        "7days": now - datetime.timedelta(days=7),
        # Used for testing only. Do not expose to the UI.
        "__all__": datetime.datetime(year=1971, month=1, day=1),
    }
    if time_span not in DASHBOARD_TIME_SPANS:
        raise Http404("Time span %s not available!" % time_span)

    # runs section
    if skip_runs:
        runs_context = {
            # Runs Section
            "runs": {
                "time_span": time_span,
                "stages": DASHBOARD_STAGES,
                "runs": [],
                "error": "",
            }
        }
    else:
        try:
            runs = get_runs_list(DASHBOARD_TIME_SPANS[time_span])
            runs_error = None
        except Exception as err:
            runs = []
            runs_error = str(err)

        runs_context = {
            # Runs Section
            "runs": {
                "time_span": time_span,
                "stages": DASHBOARD_STAGES,
                "runs": runs,
                "error": runs_error,
            }
        }

    # software update
    update_status = GlobalConfig.get().ts_update_status

    # services
    services_down = []
    for process, state in process_set():
        if not state:
            services_down.append(process)

    show_cluster = False
    nodes_down = []
    if Cruncher.objects.count() > 0:
        show_cluster = True
        nodes_down = Cruncher.objects.exclude(state="G").values_list("name",
                                                                     flat=True)

    # storage status
    storage = get_storage_status()

    # data management
    disk_usage = get_disk_usage()
    dm_active_jobs = DMFileStat.objects.filter(
        action_state__in=["AG", "DG", "EG", "SA", "SE", "SD", "IG"
                          ]).values_list("action_state", flat=True)
    dm_errors = DMFileStat.objects.filter(action_state="E").count()

    # instruments
    rigs = Rig.objects.exclude(host_address="")
    num_rigs = len(rigs)
    if num_rigs > 1:
        with ManagedPool(processes=min(num_rigs, 50)) as pool:
            instruments = pool.map(get_instrument_info, rigs)
    else:
        instruments = [get_instrument_info(rig) for rig in rigs]

    instr_connected = sum(
        [instr["status"] == CONNECTED for instr in instruments])
    instr_offline = sum([instr["status"] == OFFLINE for instr in instruments])
    instr_alarm = sum([instr["status"] == ALARM for instr in instruments])

    summary_context = {
        # Summary Section
        "summary": {
            "ts_version": TS_version,
            "update_status": update_status,
            "instruments": {
                "connected": instr_connected,
                "offline": instr_offline,
                "alerts": instr_alarm,
            },
            "services": {
                "url": reverse("configure_services"),
                "number_services_down": len(services_down),
                "services_down": services_down,
                "show_cluster": True if show_cluster else False,
                "number_nodes_down": len(nodes_down) if show_cluster else "",
                "show_nas": storage["show_nas"],
                "nas_status": storage.get("nas_status", ""),
                "show_raid": storage["show_raid"],
                "raid_status": storage.get("raid_status", ""),
            },
            "data_management": {
                "url":
                reverse("datamanagement"),
                "disk_usage":
                disk_usage,
                "show_path":
                len(disk_usage) > 1,
                "dm_jobs": [
                    ("archive in progress",
                     sum([s == "AG" for s in dm_active_jobs])),
                    ("export in progress",
                     sum([s == "EG" for s in dm_active_jobs])),
                    ("delete in progress",
                     sum([s == "DG" for s in dm_active_jobs])),
                    ("import in progress",
                     sum([s == "IG" for s in dm_active_jobs])),
                    ("archive pending",
                     sum([s == "SA" for s in dm_active_jobs])),
                    ("export pending", sum([s == "SE"
                                            for s in dm_active_jobs])),
                    ("delete pending", sum([s == "SD"
                                            for s in dm_active_jobs])),
                ],
                "dm_errors":
                dm_errors,
            },
        }
    }

    instruments_context = {
        "instruments":
        sorted(instruments, key=lambda x: (x["status"], x["name"].lower()))
    }

    return HttpResponse(
        json.dumps({
            "summary":
            render_to_string("rundb/home/fragments/summary.html",
                             summary_context),
            "runs":
            render_to_string("rundb/home/fragments/runs.html", runs_context),
            "instruments":
            render_to_string("rundb/home/fragments/instruments.html",
                             instruments_context),
        }),
        content_type="application/json",
    )
コード例 #21
0
ファイル: kits_step_data.py プロジェクト: skner/TS
    def __init__(self, sh_type):
        super(KitsStepData, self).__init__(sh_type)
        self.resourcePath = 'rundb/plan/page_plan/page_plan_kits.html'
        
        #20130827-test
        ##self._dependsOn.append(StepNames.IONREPORTER) 
                
        self._dependsOn.append(StepNames.APPLICATION) 
        self._dependsOn.append(StepNames.BARCODE_BY_SAMPLE)
        
        self.savedFields[KitsFieldNames.SAMPLE_PREPARATION_KIT] = None
        self.prepopulatedFields[KitsFieldNames.SAMPLE_PREP_KITS] = KitInfo.objects.filter(kitType='SamplePrepKit', isActive=True).order_by('description')
        
        self.savedFields[KitsFieldNames.LIBRARY_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.LIB_KITS] = KitInfo.objects.filter(kitType='LibraryKit', isActive=True).order_by("description")
        
        self.savedFields[KitsFieldNames.LIBRARY_KEY] = None
        self.prepopulatedFields[KitsFieldNames.FORWARD_LIB_KEYS] = LibraryKey.objects.filter(direction='Forward', runMode='single').order_by('-isDefault', 'name')
        self.savedFields[KitsFieldNames.LIBRARY_KEY] = self.prepopulatedFields[KitsFieldNames.FORWARD_LIB_KEYS][0].sequence
        
        self.savedFields[KitsFieldNames.TF_KEY] = GlobalConfig.get().default_test_fragment_key
        
        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[KitsFieldNames.FORWARD_3_ADAPTERS] = ThreePrimeadapter.objects.filter(direction='Forward', runMode='single').order_by('-isDefault', 'chemistryType', 'name')
        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[KitsFieldNames.FORWARD_3_ADAPTERS][0].sequence
        
        self.savedFields[KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTERS] = ThreePrimeadapter.objects.filter(direction='Forward', runMode='single', chemistryType = 'avalanche').order_by('-isDefault', 'name')
        self.savedFields[KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTERS][0].sequence
        
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        ##no longer default to OneTouch
        #self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = KitsFieldNames.ONE_TOUCH
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = None
        
        oneTouchDict = {
                KitsFieldNames.KIT_VALUES          : KitInfo.objects.filter(kitType__in=['TemplatingKit', 'AvalancheTemplateKit'], isActive=True).order_by("description"),
                KitsFieldNames.APPLICATION_DEFAULT : None
                }
        
        ionChefDict = {
                KitsFieldNames.KIT_VALUES          : KitInfo.objects.filter(kitType='IonChefPrepKit', isActive=True).order_by("description"),
                KitsFieldNames.APPLICATION_DEFAULT : None
                }
        
        oneTouchAvalancheDict = {
                KitsFieldNames.KIT_VALUES          : KitInfo.objects.filter(kitType__in=['AvalancheTemplateKit'], isActive=True).order_by("description"),
                KitsFieldNames.APPLICATION_DEFAULT : None
                }
        
        self.prepopulatedFields[KitsFieldNames.TEMPLATE_KIT_TYPES] = {
                                                                KitsFieldNames.ONE_TOUCH : oneTouchDict, 
                                                                KitsFieldNames.ION_CHEF  : ionChefDict,
                                                                KitsFieldNames.ONE_TOUCH_AVALANCHE : oneTouchAvalancheDict
                                                                }
        
        self.savedFields[KitsFieldNames.SEQUENCE_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.SEQ_KITS] = KitInfo.objects.filter(kitType='SequencingKit', isActive=True).order_by("description")
                            
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.TEMPLATE_KITS] = KitInfo.objects.filter(kitType__in=['TemplatingKit', 'AvalancheTemplateKit'], isActive=True).order_by("description")
        self.prepopulatedFields[KitsFieldNames.ALL_TEMPLATE_KITS] = KitInfo.objects.filter(kitType__in=['TemplatingKit', 'AvalancheTemplateKit', 'IonChefPrepKit'], isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CONTROL_SEQUENCE] = None
        self.prepopulatedFields[KitsFieldNames.CONTROL_SEQ_KITS] = KitInfo.objects.filter(kitType='ControlSequenceKit', isActive=True).order_by("description")
        
        self.savedFields[KitsFieldNames.CHIP_TYPE] = None
        self.prepopulatedFields[KitsFieldNames.CHIP_TYPES] = list(Chip.objects.filter(isActive=True).order_by('description', 'name').distinct('description'))
        
        self.savedFields[KitsFieldNames.BARCODE_ID] = None
        self.prepopulatedFields[KitsFieldNames.BARCODES] = list(dnaBarcode.objects.values('name').distinct().order_by('name'))
        
        gc = GlobalConfig.get()
        self.savedFields[KitsFieldNames.IS_DUPLICATED_READS] = gc.mark_duplicates

        self.savedFields[KitsFieldNames.BASE_RECALIBRATE]= gc.base_recalibration_mode

        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES] = OrderedDict()
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["standard_recal"] = "Default Calibration"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["panel_recal"] = "Enable Calibration Standard"        
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["no_recal"] = "No Calibration"        
        
        self.savedFields[KitsFieldNames.REALIGN] = gc.realign        
        
        self.savedFields[KitsFieldNames.FLOWS] = 0
        self.savedFields[KitsFieldNames.LIBRARY_READ_LENGTH] = 0

        self.prepopulatedFields[KitsFieldNames.IS_BARCODE_KIT_SELECTION_REQUIRED] = False

        self.prepopulatedFields[KitsFieldNames.TEMPLATING_SIZE_CHOICES] = ["200", "400"]
        self.savedFields[KitsFieldNames.TEMPLATING_SIZE] = ""
    
        self.sh_type = sh_type
コード例 #22
0
ファイル: views_helper.py プロジェクト: biocyberman/TS
def isOCP_enabled():
    return GlobalConfig.get().enable_compendia_OCP
コード例 #23
0
def fileserver_space_check():
    '''For each file server, compare disk usage to backup threshold value.
    If disk usage exceeds threshold, launch celery task to delete/archive
    raw data directories.
    '''
    logger = get_task_logger('data_management')

    # Get GlobalConfig object in order to access auto-acknowledge bit
    gc = GlobalConfig.get()
    auto_acknowledge = gc.auto_archive_ack
    auto_action_enabled = gc.auto_archive_enable

    # Get list of File Server objects
    file_servers = FileServer.objects.all().order_by('pk').values()

    # Get list of Report Storage objects
    report_storages = ReportStorage.objects.all().order_by('pk').values()

    # dict of fileset cateogries each with list of partition ids that can be acted upon.
    category_list = {}
    #-------------------------------------------------
    # DELETE action only happens if threshold reached
    #-------------------------------------------------
    for dmfileset in DMFileSet.objects.filter(version=settings.RELVERSION).filter(auto_action='DEL').values():

        cat_name = slugify(dmfileset['type'])
        category_list[cat_name] = {
            'dmfileset':dmfileset,
            'devlist':[],
            'partitions':[],
        }

        for partition in _partitions(file_servers,report_storages):

            if partition['diskusage'] >= dmfileset['auto_trigger_usage']:

                logger.info("%s %s %.2f%% exceeds %s %.0f%%" % (
                    hex(partition['devid']),
                    partition['path'],
                    partition['diskusage'],
                    dmfileset['type'],
                    dmfileset['auto_trigger_usage']))

                category_list[cat_name]['devlist'].append(partition['devid'])
                category_list[cat_name]['partitions'].append(partition)

            else:

                logger.info("%s %s %.2f%% below %s %.0f%%" % (
                    hex(partition['devid']),
                    partition['path'],
                    partition['diskusage'],
                    dmfileset['type'],
                    dmfileset['auto_trigger_usage']))

        # uniquify the deviceid list
        category_list[cat_name]['devlist'] = list(set(category_list[cat_name]['devlist']))

    #-------------------------------------------------------------------------------
    #ARCHIVE action happens as soon as grace period has expired (no threshold check)
    #-------------------------------------------------------------------------------
    for dmfileset in DMFileSet.objects.filter(version=settings.RELVERSION).filter(auto_action='ARC').values():

        cat_name = slugify(dmfileset['type'])
        category_list[cat_name] = {
            'dmfileset':dmfileset,
            'devlist':[],
            'partitions':[],
        }

        for partition in _partitions(file_servers,report_storages):
            logger.debug("%s %s" %( partition['path'],hex(partition['devid'])))
            category_list[cat_name]['devlist'].append(partition['devid'])
            category_list[cat_name]['partitions'].append(partition)

        # uniquify the deviceid list
        category_list[cat_name]['devlist'] = list(set(category_list[cat_name]['devlist']))


    #-------------------------------------------------------------
    # Action loop - for each category, launch action per deviceid
    #-------------------------------------------------------------
    if not auto_action_enabled:
        logger.info("Data management auto-action is disabled.")

    # update any DMFileStats that are in use by active analysis jobs
    try:
        update_files_in_use()
    except:
        logger.error('Unable to update active DMFileStats')
        logger.error(traceback.format_exc())

    # Checks for manual export and archive requests.
    manage_manual_action.delay()

    for category,dict in category_list.iteritems():
        for deviceid in dict['devlist']:
            pathlist = [item['path'] for item in dict['partitions'] if item['devid'] == deviceid]
            manage_data.delay(deviceid, dict['dmfileset'], pathlist, auto_acknowledge, auto_action_enabled)

    return
コード例 #24
0
    def __init__(self, sh_type):
        super(KitsStepData, self).__init__(sh_type)
        self.resourcePath = "rundb/plan/page_plan/page_plan_kits.html"
        self.prev_step_url = reverse("page_plan_application")
        self.next_step_url = reverse("page_plan_plugins")

        # 20130827-test
        # self._dependsOn.append(StepNames.IONREPORTER)

        self._dependsOn.append(StepNames.APPLICATION)
        self._dependsOn.append(StepNames.BARCODE_BY_SAMPLE)

        self.savedFields[KitsFieldNames.SAMPLE_PREPARATION_KIT] = None
        self.prepopulatedFields[
            KitsFieldNames.SAMPLE_PREP_KITS] = KitInfo.objects.filter(
                kitType="SamplePrepKit", isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.LIBRARY_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.LIB_KITS] = KitInfo.objects.filter(
                kitType__in=["LibraryKit", "LibraryPrepKit"],
                isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.LIBRARY_KEY] = None
        self.prepopulatedFields[
            KitsFieldNames.FORWARD_LIB_KEYS] = LibraryKey.objects.filter(
                direction="Forward",
                runMode="single").order_by("-isDefault", "name")
        self.savedFields[KitsFieldNames.LIBRARY_KEY] = self.prepopulatedFields[
            KitsFieldNames.FORWARD_LIB_KEYS][0].sequence

        self.savedFields[KitsFieldNames.TF_KEY] = GlobalConfig.get(
        ).default_test_fragment_key

        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[
            KitsFieldNames.
            FORWARD_3_ADAPTERS] = ThreePrimeadapter.objects.filter(
                direction="Forward",
                runMode="single").order_by("-isDefault", "chemistryType",
                                           "name")
        self.savedFields[
            KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[
                KitsFieldNames.FORWARD_3_ADAPTERS][0].sequence

        self.savedFields[KitsFieldNames.FLOW_ORDER] = None
        self.prepopulatedFields[
            KitsFieldNames.FLOW_ORDERS] = FlowOrder.objects.filter(
                isActive=True).order_by("-isDefault", "description")
        self.savedFields[KitsFieldNames.FLOW_ORDER] = None

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        # no longer default to OneTouch
        # self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = KitsFieldNames.ONE_TOUCH
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = None

        oneTouchDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(
                kitType__in=["TemplatingKit", "AvalancheTemplateKit"],
                isActive=True).exclude(
                    samplePrep_instrumentType="IA").order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None,
        }

        isoAmpDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(
                kitType__in=["TemplatingKit"],
                isActive=True,
                samplePrep_instrumentType="IA",
            ).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None,
        }
        ionChefDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(kitType="IonChefPrepKit",
                                   isActive=True).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None,
        }

        self.prepopulatedFields[
            KitsFieldNames.TEMPLATE_KIT_TYPES] = OrderedDict([
                (KitsFieldNames.ONE_TOUCH, oneTouchDict),
                (KitsFieldNames.ION_CHEF, ionChefDict),
                (KitsFieldNames.ISO_AMP, isoAmpDict),
            ])
        self.ModelsSamplePrepInstrumentToLabelsSamplePrepInstrumentAsDict = {
            KitsFieldNames.ONE_TOUCH: SamplePrepInstrument.OT,
            KitsFieldNames.ION_CHEF: SamplePrepInstrument.IC,
            KitsFieldNames.ISO_AMP: SamplePrepInstrument.IA,
        }

        self.savedFields[KitsFieldNames.SEQUENCE_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.SEQ_KITS] = KitInfo.objects.filter(
                kitType="SequencingKit", isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.TEMPLATE_KITS] = KitInfo.objects.filter(
                kitType__in=[
                    "TemplatingKit", "AvalancheTemplateKit", "IonChefPrepKit"
                ],
                isActive=True,
            ).order_by("description")

        self.savedFields[KitsFieldNames.CONTROL_SEQUENCE] = None
        self.prepopulatedFields[
            KitsFieldNames.CONTROL_SEQ_KITS] = KitInfo.objects.filter(
                kitType="ControlSequenceKit",
                isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CHIP_TYPE] = None
        self.prepopulatedFields[
            KitsFieldNames.
            INSTRUMENT_TYPES] = Chip.getInstrumentTypesForActiveChips(
                include_undefined=False)
        self.prepopulatedFields[KitsFieldNames.CHIP_TYPES] = list(
            Chip.objects.filter(isActive=True).order_by(
                "description", "name").distinct("description"))

        self.savedFields[KitsFieldNames.BARCODE_ID] = None
        self.prepopulatedFields[KitsFieldNames.BARCODES] = list(
            dnaBarcode.objects.filter(
                active=True).values("name").distinct().order_by("name"))

        gc = GlobalConfig.get()
        self.savedFields[
            KitsFieldNames.IS_DUPLICATED_READS] = gc.mark_duplicates

        self.savedFields[
            KitsFieldNames.BASE_RECALIBRATE] = gc.base_recalibration_mode

        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES] = OrderedDict()
        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES]["standard_recal"] = {
                "text":
                _("workflow.step.kits.base_recalibration_modes.standard_recal"
                  ),
                "title":
                _("workflow.step.kits.base_recalibration_modes.standard_recal.title"
                  ),
            }  # "Default Calibration"
        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES]["panel_recal"] = {
                "text":
                _("workflow.step.kits.base_recalibration_modes.panel_recal"),
                "title":
                _("workflow.step.kits.base_recalibration_modes.panel_recal.title"
                  ),
            }  # "Enable Calibration Standard"
        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES]["blind_recal"] = {
                "text":
                _("workflow.step.kits.base_recalibration_modes.blind_recal"),
                "title":
                _("workflow.step.kits.base_recalibration_modes.blind_recal.title"
                  ),
            }  # "Blind Calibration"
        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES]["no_recal"] = {
                "text":
                _("workflow.step.kits.base_recalibration_modes.no_recal"),
                "title":
                _("workflow.step.kits.base_recalibration_modes.no_recal.title"
                  ),
            }  # "No Calibration"

        self.savedFields[KitsFieldNames.REALIGN] = gc.realign

        self.savedFields[KitsFieldNames.FLOWS] = 0
        self.savedFields[KitsFieldNames.LIBRARY_READ_LENGTH] = 0
        self.savedFields[KitsFieldNames.READ_LENGTH] = 0

        self.prepopulatedFields[
            KitsFieldNames.IS_BARCODE_KIT_SELECTION_REQUIRED] = False

        self.prepopulatedFields[KitsFieldNames.TEMPLATING_SIZE_CHOICES] = [
            "200", "400"
        ]
        self.savedFields[KitsFieldNames.TEMPLATING_SIZE] = ""
        # For raptor templating kits, templating size cannot be used to drive UI behavior or db persistence.  Use read length instead.
        self.prepopulatedFields[KitsFieldNames.READ_LENGTH_CHOICES] = [
            "200", "400"
        ]
        self.prepopulatedFields[
            KitsFieldNames.FLOWS_FROM_CATEGORY_RULES] = json.dumps(
                KitInfo._category_flowCount_rules)

        self.savedFields[KitsFieldNames.SAMPLE_PREP_PROTOCOL] = None
        self.prepopulatedFields[
            KitsFieldNames.SAMPLE_PREP_PROTOCOLS] = common_CV.objects.filter(
                isActive=True, cv_type="samplePrepProtocol").order_by("uid")
        self.prepopulatedFields[KitsFieldNames.PLAN_CATEGORIES] = ""
        self.savedFields[KitsFieldNames.ADVANCED_SETTINGS_CHOICE] = "default"
        self.prepopulatedFields[KitsFieldNames.ADVANCED_SETTINGS] = "{}"

        self.sh_type = sh_type
コード例 #25
0
ファイル: data_management.py プロジェクト: dkeren/TS
def fileserver_space_check():
    '''For each file server, compare disk usage to backup threshold value.
    If disk usage exceeds threshold, launch celery task to delete/archive
    raw data directories.
    '''
    # Get GlobalConfig object in order to access auto-acknowledge bit
    gc = GlobalConfig.get()
    auto_acknowledge = gc.auto_archive_ack
    auto_action_enabled = gc.auto_archive_enable

    # Get list of File Server objects
    file_servers = FileServer.objects.all().order_by('pk').values()

    # Get list of Report Storage objects
    report_storages = ReportStorage.objects.all().order_by('pk').values()

    # dict of fileset cateogries each with list of partition ids that can be acted upon.
    category_list = {}
    #-------------------------------------------------
    # DELETE action only happens if threshold reached
    #-------------------------------------------------
    for dmfileset in DMFileSet.objects.filter(
            version=settings.RELVERSION).filter(auto_action='DEL').values():

        cat_name = slugify(dmfileset['type'])
        category_list[cat_name] = {
            'dmfileset': dmfileset,
            'devlist': [],
            'partitions': [],
        }

        for partition in _partitions(file_servers, report_storages):

            if partition['diskusage'] >= dmfileset['auto_trigger_usage']:

                logger.info("%s %s %.2f%% exceeds %s %.0f%%" %
                            (hex(partition['devid']), partition['path'],
                             partition['diskusage'], dmfileset['type'],
                             dmfileset['auto_trigger_usage']))

                category_list[cat_name]['devlist'].append(partition['devid'])
                category_list[cat_name]['partitions'].append(partition)

            else:

                logger.info("%s %s %.2f%% below %s %.0f%%" %
                            (hex(partition['devid']), partition['path'],
                             partition['diskusage'], dmfileset['type'],
                             dmfileset['auto_trigger_usage']))

        # uniquify the deviceid list
        category_list[cat_name]['devlist'] = list(
            set(category_list[cat_name]['devlist']))

    #-------------------------------------------------------------------------------
    #ARCHIVE action happens as soon as grace period has expired (no threshold check)
    #-------------------------------------------------------------------------------
    for dmfileset in DMFileSet.objects.filter(
            version=settings.RELVERSION).filter(auto_action='ARC').values():

        cat_name = slugify(dmfileset['type'])
        category_list[cat_name] = {
            'dmfileset': dmfileset,
            'devlist': [],
            'partitions': [],
        }

        for partition in _partitions(file_servers, report_storages):
            logger.debug("%s %s" %
                         (partition['path'], hex(partition['devid'])))
            category_list[cat_name]['devlist'].append(partition['devid'])
            category_list[cat_name]['partitions'].append(partition)

        # uniquify the deviceid list
        category_list[cat_name]['devlist'] = list(
            set(category_list[cat_name]['devlist']))

    #-------------------------------------------------------------
    # Action loop - for each category, launch action per deviceid
    #-------------------------------------------------------------
    if not auto_action_enabled:
        logger.info("Data management auto-action is disabled.")

    # update any DMFileStats that are in use by active analysis jobs
    try:
        update_files_in_use()
    except:
        logger.error('Unable to update active DMFileStats')
        logger.error(traceback.format_exc())

    # Checks for manual export and archive requests.
    manage_manual_action.delay()

    for category, dict in category_list.iteritems():
        for deviceid in dict['devlist']:
            pathlist = [
                item['path'] for item in dict['partitions']
                if item['devid'] == deviceid
            ]
            async_task_result = manage_data.delay(deviceid, dict['dmfileset'],
                                                  pathlist, auto_acknowledge,
                                                  auto_action_enabled)

    return
コード例 #26
0
ファイル: views.py プロジェクト: stevematyas/TS
def dashboard_fragments(request):
    """ Returns the dashboard sections as html in a json object"""
    time_span = request.GET.get("time_span", "24hours")
    now = datetime.datetime.now(pytz.UTC)

    DASHBOARD_TIME_SPANS = {
        "hour": now - datetime.timedelta(hours=1),
        "today": now.replace(hour=0, minute=0, second=0, microsecond=0),
        "24hours": now - datetime.timedelta(hours=24),
        "7days": now - datetime.timedelta(days=7),
        # Used for testing only. Do not expose to the UI.
        "__all__": datetime.datetime(year=1971, month=1, day=1),
    }
    if time_span not in DASHBOARD_TIME_SPANS:
        raise Http404("Time span %s not available!" % time_span)

    # runs section
    runs = get_runs_list(DASHBOARD_TIME_SPANS[time_span])
    runs_context = {
        # Runs Section
        "runs": {
            "time_span": time_span,
            "stages": DASHBOARD_STAGES,
            "runs": runs,
        },
    }

    # software update
    update_status = GlobalConfig.get().ts_update_status

    # services
    services_down = []
    for process, state in process_set():
        if not state:
            services_down.append(process)

    show_cluster = False
    nodes_down = []
    if Cruncher.objects.count() > 0:
        show_cluster = True
        nodes_down = Cruncher.objects.exclude(state='G').values_list('name',
                                                                     flat=True)

    # data management
    disk_usage = {}
    for fs in FileServer.objects.all().order_by('pk'):
        if os.path.exists(fs.filesPrefix):
            disk_usage[fs.filesPrefix] = fs.percentfull

    dm_active_jobs = DMFileStat.objects.filter(
        action_state__in=['AG', 'DG', 'EG', 'SA', 'SE', 'SD', 'IG'
                          ]).values_list('action_state', flat=True)

    # instruments
    rigs = Rig.objects.exclude(host_address='')
    num_rigs = len(rigs)
    if num_rigs > 1:
        pool = Pool(processes=min(num_rigs, 20))
        instruments = pool.map(get_instrument_info, rigs)
    else:
        instruments = [get_instrument_info(rig) for rig in rigs]

    instr_connected = sum(
        [instr['status'] == CONNECTED for instr in instruments])
    instr_offline = sum([instr['status'] == OFFLINE for instr in instruments])
    instr_alarm = sum([instr['status'] == ALARM for instr in instruments])

    summary_context = {
        # Summary Section
        "summary": {
            "ts_version": TS_version,
            "update_status": update_status,
            "instruments": {
                "connected": instr_connected,
                "offline": instr_offline,
                "alerts": instr_alarm,
            },
            "services": {
                "url": reverse("configure_services"),
                "number_services_down": len(services_down),
                "services_down": services_down,
                "show_cluster": True if show_cluster else False,
                "number_nodes_down": len(nodes_down) if show_cluster else "",
            },
            "data_management": {
                "url":
                reverse("datamanagement"),
                "disk_usage":
                disk_usage,
                "show_path":
                len(disk_usage) > 1,
                "dm_jobs": [
                    ("archive in progress",
                     sum([s == 'AG' for s in dm_active_jobs])),
                    ("export in progress",
                     sum([s == 'EG' for s in dm_active_jobs])),
                    ("delete in progress",
                     sum([s == 'DG' for s in dm_active_jobs])),
                    ("import in progress",
                     sum([s == 'IG' for s in dm_active_jobs])),
                    ("archive pending",
                     sum([s == 'SA' for s in dm_active_jobs])),
                    ("export pending", sum([s == 'SE'
                                            for s in dm_active_jobs])),
                    ("delete pending", sum([s == 'SD'
                                            for s in dm_active_jobs])),
                ]
            }
        },
    }

    instruments_context = {
        "instruments":
        sorted(instruments, key=lambda x: (x['status'], x['name'].lower()))
    }

    return HttpResponse(json.dumps({
        "summary":
        render_to_string("rundb/dashboard/fragments/summary.html",
                         summary_context),
        "runs":
        render_to_string("rundb/dashboard/fragments/runs.html", runs_context),
        "instruments":
        render_to_string("rundb/dashboard/fragments/instruments.html",
                         instruments_context)
    }),
                        content_type="application/json")
コード例 #27
0
def isOCP_enabled():
    return GlobalConfig.get().enable_compendia_OCP
コード例 #28
0
ファイル: kits_step_data.py プロジェクト: iontorrent/TS
    def __init__(self, sh_type):
        super(KitsStepData, self).__init__(sh_type)
        self.resourcePath = 'rundb/plan/page_plan/page_plan_kits.html'
        self.prev_step_url = reverse("page_plan_application")
        self.next_step_url = reverse("page_plan_plugins")

        # 20130827-test
        # self._dependsOn.append(StepNames.IONREPORTER)

        self._dependsOn.append(StepNames.APPLICATION)
        self._dependsOn.append(StepNames.BARCODE_BY_SAMPLE)

        self.savedFields[KitsFieldNames.SAMPLE_PREPARATION_KIT] = None
        self.prepopulatedFields[KitsFieldNames.SAMPLE_PREP_KITS] = KitInfo.objects.filter(kitType='SamplePrepKit', isActive=True).order_by('description')

        self.savedFields[KitsFieldNames.LIBRARY_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.LIB_KITS] = KitInfo.objects.filter(kitType__in=["LibraryKit", "LibraryPrepKit"], isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.LIBRARY_KEY] = None
        self.prepopulatedFields[KitsFieldNames.FORWARD_LIB_KEYS] = LibraryKey.objects.filter(direction='Forward', runMode='single').order_by('-isDefault', 'name')
        self.savedFields[KitsFieldNames.LIBRARY_KEY] = self.prepopulatedFields[KitsFieldNames.FORWARD_LIB_KEYS][0].sequence

        self.savedFields[KitsFieldNames.TF_KEY] = GlobalConfig.get().default_test_fragment_key

        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[KitsFieldNames.FORWARD_3_ADAPTERS] = ThreePrimeadapter.objects.filter(direction='Forward', runMode='single').order_by('-isDefault', 'chemistryType', 'name')
        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[KitsFieldNames.FORWARD_3_ADAPTERS][0].sequence

        self.savedFields[KitsFieldNames.FLOW_ORDER] = None
        self.prepopulatedFields[KitsFieldNames.FLOW_ORDERS] = FlowOrder.objects.filter(isActive=True).order_by('-isDefault', 'description')
        self.savedFields[KitsFieldNames.FLOW_ORDER] = None

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        # no longer default to OneTouch
        # self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = KitsFieldNames.ONE_TOUCH
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = None

        oneTouchDict = {
            KitsFieldNames.KIT_VALUES: KitInfo.objects.filter(kitType__in=['TemplatingKit', 'AvalancheTemplateKit'], isActive=True).exclude(samplePrep_instrumentType="IA").order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT: None
        }

        isoAmpDict = {
            KitsFieldNames.KIT_VALUES: KitInfo.objects.filter(kitType__in=['TemplatingKit'], isActive=True, samplePrep_instrumentType="IA").order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT: None
        }
        ionChefDict = {
            KitsFieldNames.KIT_VALUES: KitInfo.objects.filter(kitType='IonChefPrepKit', isActive=True).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT: None
        }

        self.prepopulatedFields[KitsFieldNames.TEMPLATE_KIT_TYPES] = OrderedDict([
            (KitsFieldNames.ONE_TOUCH, oneTouchDict),
            (KitsFieldNames.ION_CHEF, ionChefDict),
            (KitsFieldNames.ISO_AMP, isoAmpDict)
        ])

        self.savedFields[KitsFieldNames.SEQUENCE_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.SEQ_KITS] = KitInfo.objects.filter(kitType='SequencingKit', isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        self.prepopulatedFields[KitsFieldNames.TEMPLATE_KITS] = KitInfo.objects.filter(kitType__in=['TemplatingKit', 'AvalancheTemplateKit', 'IonChefPrepKit'], isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CONTROL_SEQUENCE] = None
        self.prepopulatedFields[KitsFieldNames.CONTROL_SEQ_KITS] = KitInfo.objects.filter(kitType='ControlSequenceKit', isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CHIP_TYPE] = None
        self.prepopulatedFields[KitsFieldNames.INSTRUMENT_TYPES] = [(k, v) for k, v in Chip.ALLOWED_INSTRUMENT_TYPES if k]
        self.prepopulatedFields[KitsFieldNames.CHIP_TYPES] = list(Chip.objects.filter(isActive=True).order_by('description', 'name').distinct('description'))

        self.savedFields[KitsFieldNames.BARCODE_ID] = None
        self.prepopulatedFields[KitsFieldNames.BARCODES] = list(dnaBarcode.objects.filter(active=True).values('name').distinct().order_by('name'))

        gc = GlobalConfig.get()
        self.savedFields[KitsFieldNames.IS_DUPLICATED_READS] = gc.mark_duplicates

        self.savedFields[KitsFieldNames.BASE_RECALIBRATE] = gc.base_recalibration_mode

        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES] = OrderedDict()
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["standard_recal"] = "Default Calibration"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["panel_recal"] = "Enable Calibration Standard"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["blind_recal"] = "Blind Calibration"         
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES]["no_recal"] = "No Calibration"

        self.savedFields[KitsFieldNames.REALIGN] = gc.realign

        self.savedFields[KitsFieldNames.FLOWS] = 0
        self.savedFields[KitsFieldNames.LIBRARY_READ_LENGTH] = 0
        self.savedFields[KitsFieldNames.READ_LENGTH] = 0

        self.prepopulatedFields[KitsFieldNames.IS_BARCODE_KIT_SELECTION_REQUIRED] = False

        self.prepopulatedFields[KitsFieldNames.TEMPLATING_SIZE_CHOICES] = ["200", "400"]
        self.savedFields[KitsFieldNames.TEMPLATING_SIZE] = ""
        # For raptor templating kits, templating size cannot be used to drive UI behavior or db persistence.  Use read length instead.
        self.prepopulatedFields[KitsFieldNames.READ_LENGTH_CHOICES] = ["200", "400"]
        self.prepopulatedFields[KitsFieldNames.FLOWS_FROM_CATEGORY_RULES] = json.dumps(KitInfo._category_flowCount_rules)

        self.savedFields[KitsFieldNames.SAMPLE_PREP_PROTOCOL] = None
        self.prepopulatedFields[KitsFieldNames.SAMPLE_PREP_PROTOCOLS] = common_CV.objects.filter(isActive=True, cv_type = "samplePrepProtocol").order_by('uid')
        self.prepopulatedFields[KitsFieldNames.PLAN_CATEGORIES] = ""
        self.savedFields[KitsFieldNames.ADVANCED_SETTINGS_CHOICE] = "default"
        self.prepopulatedFields[KitsFieldNames.ADVANCED_SETTINGS] = "{}"

        self.sh_type = sh_type
コード例 #29
0
ファイル: views.py プロジェクト: iontorrent/TS
def dashboard_fragments(request, skip_runs=False):
    """ Returns the dashboard sections as html in a json object"""
    time_span = request.GET.get("time_span", "24hours")
    now = datetime.datetime.now(pytz.UTC)

    DASHBOARD_TIME_SPANS = {
        "hour": now - datetime.timedelta(hours=1),
        "today": now.replace(hour=0, minute=0, second=0, microsecond=0),
        "24hours": now - datetime.timedelta(hours=24),
        "7days": now - datetime.timedelta(days=7),
        # Used for testing only. Do not expose to the UI.
        "__all__": datetime.datetime(year=1971, month=1, day=1),
    }
    if time_span not in DASHBOARD_TIME_SPANS:
        raise Http404("Time span %s not available!" % time_span)

    # runs section
    if skip_runs:
        runs_context = {
            # Runs Section
            "runs": {
                "time_span": time_span,
                "stages": DASHBOARD_STAGES,
                "runs": [],
                "error": ""
            },
        }
    else:
        try:
            runs = get_runs_list(DASHBOARD_TIME_SPANS[time_span])
            runs_error = None
        except Exception as err:
            runs = []
            runs_error = str(err)

        runs_context = {
            # Runs Section
            "runs": {
                "time_span": time_span,
                "stages": DASHBOARD_STAGES,
                "runs": runs,
                "error": runs_error
            },
        }

    # software update
    update_status = GlobalConfig.get().ts_update_status

    # services
    services_down = []
    for process, state in process_set():
        if not state:
            services_down.append(process)

    show_cluster = False
    nodes_down = []
    if Cruncher.objects.count() > 0:
        show_cluster = True
        nodes_down = Cruncher.objects.exclude(state='G').values_list('name', flat=True)

    # storage status
    storage = get_storage_status()

    # data management
    disk_usage = get_disk_usage()

    dm_active_jobs = DMFileStat.objects.filter(action_state__in=['AG', 'DG', 'EG', 'SA', 'SE', 'SD', 'IG']).values_list(
        'action_state', flat=True)

    # instruments
    rigs = Rig.objects.exclude(host_address='')
    num_rigs = len(rigs)
    if num_rigs > 1:
        pool = Pool(processes=min(num_rigs, 50))
        instruments = pool.map(get_instrument_info, rigs)
    else:
        instruments = [get_instrument_info(rig) for rig in rigs]

    instr_connected = sum([instr['status'] == CONNECTED for instr in instruments])
    instr_offline = sum([instr['status'] == OFFLINE for instr in instruments])
    instr_alarm = sum([instr['status'] == ALARM for instr in instruments])

    summary_context = {
        # Summary Section
        "summary": {
            "ts_version": TS_version,
            "update_status": update_status,
            "instruments": {
                "connected": instr_connected,
                "offline": instr_offline,
                "alerts": instr_alarm,
            },
            "services": {
                "url": reverse("configure_services"),
                "number_services_down": len(services_down),
                "services_down": services_down,
                "show_cluster": True if show_cluster else False,
                "number_nodes_down": len(nodes_down) if show_cluster else "",
                "show_nas": storage['show_nas'],
                "nas_status": storage.get('nas_status', ''),
                "show_raid": storage['show_raid'],
                "raid_status": storage.get('raid_status', ''),
            },
            "data_management": {
                "url": reverse("datamanagement"),
                "disk_usage": disk_usage,
                "show_path": len(disk_usage) > 1,
                "dm_jobs": [
                    ("archive in progress", sum([s == 'AG' for s in dm_active_jobs])),
                    ("export in progress", sum([s == 'EG' for s in dm_active_jobs])),
                    ("delete in progress", sum([s == 'DG' for s in dm_active_jobs])),
                    ("import in progress", sum([s == 'IG' for s in dm_active_jobs])),
                    ("archive pending", sum([s == 'SA' for s in dm_active_jobs])),
                    ("export pending", sum([s == 'SE' for s in dm_active_jobs])),
                    ("delete pending", sum([s == 'SD' for s in dm_active_jobs])),
                ]
            }
        },
    }

    instruments_context = {
        "instruments": sorted(instruments, key=lambda x: (x['status'], x['name'].lower()))
    }

    return HttpResponse(json.dumps({
        "summary": render_to_string("rundb/home/fragments/summary.html", summary_context),
        "runs": render_to_string("rundb/home/fragments/runs.html", runs_context),
        "instruments": render_to_string("rundb/home/fragments/instruments.html", instruments_context)
    }), content_type="application/json")
コード例 #30
0
ファイル: kits_step_data.py プロジェクト: BENMFeng/TS
    def __init__(self, sh_type):
        super(KitsStepData, self).__init__(sh_type)
        self.resourcePath = 'rundb/plan/page_plan/page_plan_kits.html'
        self.prev_step_url = reverse("page_plan_application")
        self.next_step_url = reverse("page_plan_plugins")

        # 20130827-test
        # self._dependsOn.append(StepNames.IONREPORTER)

        self._dependsOn.append(StepNames.APPLICATION)
        self._dependsOn.append(StepNames.BARCODE_BY_SAMPLE)

        self.savedFields[KitsFieldNames.SAMPLE_PREPARATION_KIT] = None
        self.prepopulatedFields[
            KitsFieldNames.SAMPLE_PREP_KITS] = KitInfo.objects.filter(
                kitType='SamplePrepKit', isActive=True).order_by('description')

        self.savedFields[KitsFieldNames.LIBRARY_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.LIB_KITS] = KitInfo.objects.filter(
                kitType__in=["LibraryKit", "LibraryPrepKit"],
                isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.LIBRARY_KEY] = None
        self.prepopulatedFields[
            KitsFieldNames.FORWARD_LIB_KEYS] = LibraryKey.objects.filter(
                direction='Forward',
                runMode='single').order_by('-isDefault', 'name')
        self.savedFields[KitsFieldNames.LIBRARY_KEY] = self.prepopulatedFields[
            KitsFieldNames.FORWARD_LIB_KEYS][0].sequence

        self.savedFields[KitsFieldNames.TF_KEY] = GlobalConfig.get(
        ).default_test_fragment_key

        self.savedFields[KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[
            KitsFieldNames.
            FORWARD_3_ADAPTERS] = ThreePrimeadapter.objects.filter(
                direction='Forward',
                runMode='single').order_by('-isDefault', 'chemistryType',
                                           'name')
        self.savedFields[
            KitsFieldNames.FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[
                KitsFieldNames.FORWARD_3_ADAPTERS][0].sequence

        self.savedFields[KitsFieldNames.FLOW_ORDER] = None
        self.prepopulatedFields[
            KitsFieldNames.FLOW_ORDERS] = FlowOrder.objects.filter(
                isActive=True).order_by('-isDefault', 'description')
        self.savedFields[KitsFieldNames.FLOW_ORDER] = None

        self.savedFields[
            KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTER] = None
        self.prepopulatedFields[
            KitsFieldNames.
            AVALANCHE_FORWARD_3_PRIME_ADAPTERS] = ThreePrimeadapter.objects.filter(
                direction='Forward',
                runMode='single',
                chemistryType='avalanche').order_by('-isDefault', 'name')
        self.savedFields[
            KitsFieldNames.
            AVALANCHE_FORWARD_3_PRIME_ADAPTER] = self.prepopulatedFields[
                KitsFieldNames.AVALANCHE_FORWARD_3_PRIME_ADAPTERS][0].sequence

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        # no longer default to OneTouch
        # self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = KitsFieldNames.ONE_TOUCH
        self.savedFields[KitsFieldNames.TEMPLATE_KIT_TYPE] = None

        oneTouchDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(
                kitType__in=['TemplatingKit', 'AvalancheTemplateKit'],
                isActive=True).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None
        }

        ionChefDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(kitType='IonChefPrepKit',
                                   isActive=True).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None
        }

        oneTouchAvalancheDict = {
            KitsFieldNames.KIT_VALUES:
            KitInfo.objects.filter(kitType__in=['AvalancheTemplateKit'],
                                   isActive=True).order_by("description"),
            KitsFieldNames.APPLICATION_DEFAULT:
            None
        }

        self.prepopulatedFields[KitsFieldNames.TEMPLATE_KIT_TYPES] = {
            KitsFieldNames.ONE_TOUCH: oneTouchDict,
            KitsFieldNames.ION_CHEF: ionChefDict,
            KitsFieldNames.ONE_TOUCH_AVALANCHE: oneTouchAvalancheDict
        }

        self.savedFields[KitsFieldNames.SEQUENCE_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.SEQ_KITS] = KitInfo.objects.filter(
                kitType='SequencingKit', isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.TEMPLATE_KIT_NAME] = None
        self.prepopulatedFields[
            KitsFieldNames.TEMPLATE_KITS] = KitInfo.objects.filter(
                kitType__in=[
                    'TemplatingKit', 'AvalancheTemplateKit', 'IonChefPrepKit'
                ],
                isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CONTROL_SEQUENCE] = None
        self.prepopulatedFields[
            KitsFieldNames.CONTROL_SEQ_KITS] = KitInfo.objects.filter(
                kitType='ControlSequenceKit',
                isActive=True).order_by("description")

        self.savedFields[KitsFieldNames.CHIP_TYPE] = None
        self.prepopulatedFields[KitsFieldNames.INSTRUMENT_TYPES] = [
            (k, v) for k, v in Chip.ALLOWED_INSTRUMENT_TYPES if k
        ]
        self.prepopulatedFields[KitsFieldNames.CHIP_TYPES] = list(
            Chip.objects.filter(isActive=True).order_by(
                'description', 'name').distinct('description'))

        self.savedFields[KitsFieldNames.BARCODE_ID] = None
        self.prepopulatedFields[KitsFieldNames.BARCODES] = list(
            dnaBarcode.objects.filter(
                active=True).values('name').distinct().order_by('name'))

        gc = GlobalConfig.get()
        self.savedFields[
            KitsFieldNames.IS_DUPLICATED_READS] = gc.mark_duplicates

        self.savedFields[
            KitsFieldNames.BASE_RECALIBRATE] = gc.base_recalibration_mode

        self.prepopulatedFields[
            KitsFieldNames.BASE_RECALIBRATION_MODES] = OrderedDict()
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES][
            "standard_recal"] = "Default Calibration"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES][
            "panel_recal"] = "Enable Calibration Standard"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES][
            "blind_recal"] = "Blind Calibration"
        self.prepopulatedFields[KitsFieldNames.BASE_RECALIBRATION_MODES][
            "no_recal"] = "No Calibration"

        self.savedFields[KitsFieldNames.REALIGN] = gc.realign

        self.savedFields[KitsFieldNames.FLOWS] = 0
        self.savedFields[KitsFieldNames.LIBRARY_READ_LENGTH] = 0
        self.savedFields[KitsFieldNames.READ_LENGTH] = 0

        self.prepopulatedFields[
            KitsFieldNames.IS_BARCODE_KIT_SELECTION_REQUIRED] = False

        self.prepopulatedFields[KitsFieldNames.TEMPLATING_SIZE_CHOICES] = [
            "200", "400"
        ]
        self.savedFields[KitsFieldNames.TEMPLATING_SIZE] = ""
        # For raptor templating kits, templating size cannot be used to drive UI behavior or db persistence.  Use read length instead.
        self.prepopulatedFields[KitsFieldNames.READ_LENGTH_CHOICES] = [
            "200", "400"
        ]
        self.prepopulatedFields[
            KitsFieldNames.FLOWS_FROM_CATEGORY_RULES] = json.dumps(
                KitInfo._category_flowCount_rules)

        self.savedFields[KitsFieldNames.SAMPLE_PREP_PROTOCOL] = None
        self.prepopulatedFields[
            KitsFieldNames.SAMPLE_PREP_PROTOCOLS] = common_CV.objects.filter(
                isActive=True, cv_type="samplePrepProtocol").order_by('uid')

        self.sh_type = sh_type