예제 #1
0
def generate_csa(result_pk, monitor_pk=None):
    result = models.Results.objects.get(pk=result_pk)
    report_dir = result.get_report_dir()
    raw_data_dir = result.experiment.expDir

    try:
        monitor = models.FileMonitor.objects.get(pk=monitor_pk)
    except models.FileMonitor.DoesNotExist:
        monitor = models.FileMonitor()
        monitor.tags = "generate_csa"

    csa_file_name = "csa_{0:04d}.zip".format(int(result_pk))
    monitor.status = "Generating"
    monitor.local_dir = report_dir
    monitor.name = csa_file_name
    monitor.save()

    # Generate report PDF file.
    # This will create a file named report.pdf in results directory
    makePDF.write_report_pdf(result_pk)
    csa_path = makeCSA.makeCSA(report_dir, raw_data_dir, monitor.name)

    digest_hex, digest_64, size = md5_stats_file(csa_path)
    monitor.md5sum = digest_hex
    monitor.size = size
    monitor.status = "Generated"
    monitor.save()
예제 #2
0
파일: data_export.py 프로젝트: tw7649116/TS
def generate_csa(result_pk, monitor_pk=None):
    result = models.Results.objects.get(pk=result_pk)
    report_dir = result.get_report_dir()
    raw_data_dir = result.experiment.expDir

    try:
        monitor = models.FileMonitor.objects.get(pk=monitor_pk)
    except models.FileMonitor.DoesNotExist:
        monitor = models.FileMonitor()
        monitor.tags = "generate_csa"

    csa_file_name = "csa_{0:04d}.zip".format(int(result_pk))
    monitor.status = "Generating"
    monitor.local_dir = report_dir
    monitor.name = csa_file_name
    monitor.save()

    # Generate report PDF file.
    # This will create a file named report.pdf in results directory
    makePDF.write_report_pdf(result_pk)
    csa_path = makeCSA.makeCSA(report_dir, raw_data_dir, monitor.name)

    digest_hex, digest_64, size = md5_stats_file(csa_path)
    monitor.md5sum = digest_hex
    monitor.size = size
    monitor.status = "Generated"
    monitor.save()
예제 #3
0
def _create_archival_files(dmfilestat):
    logger.debug("Function: %s()" % sys._getframe().f_code.co_name)
    try:
        makePDF.makePDF(dmfilestat.result_id)
        csaFullPath = makeCSA.makeCSA(dmfilestat.result.get_report_dir(), dmfilestat.result.experiment.expDir)
    except:
        logger.error("Could not create Report PDF or CSA")
        raise
예제 #4
0
파일: dmactions.py 프로젝트: aidjek/TS
def _create_archival_files(dmfilestat):
    logger.debug("Function: %s()" % sys._getframe().f_code.co_name)
    try:
        makePDF.makePDF(dmfilestat.result_id)
        csaFullPath = makeCSA.makeCSA(dmfilestat.result.get_report_dir(),
                                      dmfilestat.result.experiment.expDir)
    except:
        logger.error("Could not create Report PDF or CSA")
        raise
예제 #5
0
파일: dmactions.py 프로젝트: biocyberman/TS
def _create_archival_files(dmfilestat):
    logger.debug("Function: %s()" % sys._getframe().f_code.co_name, extra=logid)

    # This check is needed for cases where src dir has been manually deleted
    report_dir = dmfilestat.result.get_report_dir()
    if not os.path.exists(report_dir):
        raise DMExceptions.SrcDirDoesNotExist(report_dir)

    try:
        makePDF.write_summary_pdf(dmfilestat.result_id)
    except:
        logger.error("Could not create Report PDF", extra=logid)
        raise

    try:
        makeCSA.makeCSA(dmfilestat.result.get_report_dir(), dmfilestat.result.experiment.expDir)
    except:
        logger.error("Could not create CSA", extra=logid)
        raise
예제 #6
0
파일: dmactions.py 프로젝트: basmaNasser/TS
def _create_archival_files(dmfilestat):
    logger.debug("Function: %s()" % sys._getframe().f_code.co_name, extra = logid)

    # This check is needed for cases where src dir has been manually deleted
    report_dir = dmfilestat.result.get_report_dir()
    if not os.path.exists(report_dir):
        raise DMExceptions.SrcDirDoesNotExist(report_dir)

    try:
        makePDF.write_summary_pdf(dmfilestat.result_id)
    except:
        logger.error("Could not create Report PDF", extra = logid)
        raise

    try:
        #TS-7385.  When executed by celery task, the files created are owned by root.root.
        #This hack sets the ownership to the same as the report directory.
        pdf_files = [
            'report.pdf',
            'plugins.pdf',
            'backupPDF.pdf',
            os.path.basename(dmfilestat.result.get_report_dir())+'-full.pdf',
            os.path.basename(dmfilestat.result.get_report_dir())+'.support.zip'
            ]
        report_path = dmfilestat.result.get_report_dir()
        uid = os.stat(report_path).st_uid
        gid = os.stat(report_path).st_gid
        for pdf_file in pdf_files:
            if os.path.exists(os.path.join(report_path, pdf_file)):
                os.chown(os.path.join(report_path, pdf_file), uid, gid)
    except:
        logger.warn("Something failed while changing ownership of pdf or support zip file", extra = logid)
        logger.debug(traceback.format_exc(), extra = logid)

    try:
        makeCSA.makeCSA(dmfilestat.result.get_report_dir(), dmfilestat.result.experiment.expDir)
    except:
        logger.error("Could not create CSA", extra = logid)
        raise
예제 #7
0
파일: dmactions.py 프로젝트: skner/TS
def _create_archival_files(dmfilestat):
    logger.debug("Function: %s()" % sys._getframe().f_code.co_name, extra = logid)

    # This check is needed for cases where src dir has been manually deleted
    report_dir = dmfilestat.result.get_report_dir()
    if not os.path.exists(report_dir):
        raise DMExceptions.SrcDirDoesNotExist(report_dir)

    try:
        makePDF.write_summary_pdf(dmfilestat.result_id)
    except:
        logger.error("Could not create Report PDF", extra = logid)
        raise

    try:
        #TS-7385.  When executed by celery task, the files created are owned by root.root.
        #This hack sets the ownership to the same as the report directory.
        pdf_files = [
            'report.pdf',
            'plugins.pdf',
            'backupPDF.pdf',
            os.path.basename(dmfilestat.result.get_report_dir())+'-full.pdf',
            os.path.basename(dmfilestat.result.get_report_dir())+'.support.zip'
            ]
        report_path = dmfilestat.result.get_report_dir()
        uid = os.stat(report_path).st_uid
        gid = os.stat(report_path).st_gid
        for pdf_file in pdf_files:
            if os.path.exists(os.path.join(report_path, pdf_file)):
                os.chown(os.path.join(report_path, pdf_file), uid, gid)
    except:
        logger.warn("Something failed while changing ownership of pdf or support zip file", extra = logid)
        logger.debug(traceback.format_exc(), extra = logid)

    try:
        makeCSA.makeCSA(dmfilestat.result.get_report_dir(), dmfilestat.result.experiment.expDir)
    except:
        logger.error("Could not create CSA", extra = logid)
        raise
예제 #8
0
def archiveReport(pkR, dest, comment, _logger):

    logger = _logger

    result = shortcuts.get_object_or_404(models.Results, pk=pkR)
    oldDir = result.get_report_dir()
    reportFolder = os.path.basename(oldDir)
    rawDataDir = result.experiment.expDir

    # This is the directory for the result.  Exported reports will create subdirs in it
    newReportDir = os.path.join(dest, reportFolder)
    if not os.path.isdir(newReportDir):
        old_umask = os.umask(0000)
        os.makedirs(newReportDir)
        os.umask(old_umask)
        logger.debug("Created dir: %s" % newReportDir)

    # make the Report PDF file now. backupPDF.pdf
    try:
        logger.debug("Making PDF of the report")
        makePDF.makePDF(pkR)
        logger.debug("Made PDF of the report")
    except:
        # TODO: Do we continue?  OR do we abort?
        logger.exception(traceback.format_exc())
        raise

    # make a customer support archive file
    try:
        logger.debug("Making CSA")
        csaFullPath = makeCSA.makeCSA(oldDir, rawDataDir)
        logger.debug("Made CSA")
    except:
        # TODO: Do we continue?  OR do we abort?
        logger.exception(traceback.format_exc())
        raise

    dt = datetime.datetime.now()
    dtForm = '%d_%02d_%02d_%02d_%02d_%02d' % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
    newDir = os.path.join(dest, reportFolder, dtForm)

    if getSize(oldDir) >= getSpace(newReportDir):
        raise Exception("Not enough space to archive report '%s' at %s" % (result.resultsName, newReportDir))

    result.updateMetaData(ARCHIVING, "Archiving report '%s' to %s" % (result.resultsName, newDir), 0, comment, logger=logger)
    # copy the entire directory from oldDir to the archive destination.
    try:
        # We desire to copy all files pointed to by symlinks; do not want just symlinks in the archive location
        shutil.copytree(oldDir, newDir, symlinks=False)
        result.updateMetaData(ARCHIVING, "Directory copy successful", 0, comment, logger=logger)
    except:
        logger.exception(traceback.format_exc())
        raise

    ##check the sizes of the old directory and the destination.
    total_size1 = getSize(oldDir)
    #total_size2 = getSize(newDir)
    #logger.debug('size old dir: %d size new dir: %d'%(total_size1, total_size2))

    ##if the sizes are identical, that means that all of the files were copied. If so, delete the old directory.
    #logger.debug('sizecheck complete, preparing to delete if sizes match')
    #if total_size1 == total_size2:
    try:
        # Remove the contents of the results directory
        # N.B. Execute as celery task for the sole reason of getting it executed with root permissions
        dir_del = removeDirContents.delay(oldDir)
        dir_del.get()
    except:
        logger.exception(traceback.format_exc())
        raise Exception("Archive created but there was an error in cleaning up source data.  See /var/log/ion/reportsLog.log.")

    for pdffile in ['backupPDF.pdf','report.pdf','plugins.pdf']:
        try:
            # Copy any pdf files
            shutil.copyfile(os.path.join(newDir, pdffile), os.path.join(oldDir, pdffile))
            os.chmod(os.path.join(oldDir, pdffile), 0777)
        except:
            logger.exception(traceback.format_exc())
    
    try:
        # Copy the customer support archive file from the archive location back to results directory
        shutil.copyfile(os.path.join(newDir, os.path.basename(csaFullPath)), csaFullPath)
        os.chmod(csaFullPath, 0777)

        result.updateMetaData(ARCHIVED, "Finished archiving.", total_size1, comment, logger=logger)
    except:
        logger.exception(traceback.format_exc())
예제 #9
0
    def launch(self, data=None):
        self.log.info("Launching Field Support.")

        with open('startplugin.json', 'r') as start_plugin_file:
            self.start_plugin = json.load(start_plugin_file)

        # Exit early if this is not a thumbnail run
        if self.start_plugin["runplugin"][
                "run_type"] != "thumbnail" and self.start_plugin['runinfo'][
                    'platform'] != "pgm":
            self.state["warning"] = "This plugin can only be run on thumbnail or PGM reports. " \
                                    "Please rerun this plugin on this run's thumbnail report."
            self.write_status()
            self.log.info("Field Support Aborted.")
            return False

        self.state["progress"] = 10
        self.write_status()

        results_dir = self.start_plugin['runinfo']['results_dir']
        zip_name = self.start_plugin["expmeta"][
            "results_name"] + ".FieldSupport.zip"
        zip_path = os.path.join(results_dir, zip_name)

        # Make CSA zip using pipeline utils makeCSA
        makeCSA.makeCSA(
            self.start_plugin["runinfo"]["report_root_dir"],
            self.start_plugin["runinfo"]["raw_data_dir"], zip_path,
            self.start_plugin.get('chefSummary',
                                  dict()).get('chefLogPath', ''))

        self.state["progress"] = 30
        self.write_status()

        os.mkdir(os.path.join(results_dir, "FieldSupport"))

        # Now run each rndplugin
        for name, options in self.plugin_options.items():
            self.run_rndplugin(name)

        self.state["progress"] = 70
        self.write_status()

        # Modify zip archive to include extra files
        with zipfile.ZipFile(zip_path,
                             mode='a',
                             compression=zipfile.ZIP_DEFLATED,
                             allowZip64=True) as f:
            # Write indicator
            f.writestr('FieldSupport/version', self.version)

            # Write the thumbnail report pdf if it is not present
            if "report.pdf" not in f.namelist():
                pdf_content = self.fetch_thumbnail_report_pdf()
                if pdf_content:
                    f.writestr('report.pdf', pdf_content)

            # Now we need to find the pk of a non thumbnail report
            if "full_report.pdf" not in f.namelist():
                try:
                    pdf_content = self.fetch_fullchip_report_pdf()
                    if pdf_content:
                        f.writestr('full_report.pdf', pdf_content)
                except Exception as e:
                    self.log.info(
                        "Failed to fetch full chip report. This will always fail on clusters."
                    )
                    self.log.exception(e)

            # Add rndplugin files
            for name, options in self.plugin_options.items():
                for root, _, file_names in os.walk(
                        os.path.join(results_dir, "FieldSupport", name)):
                    for pattern in options["files"]:
                        for file_name in fnmatch.filter(file_names, pattern):
                            f.write(
                                os.path.join(root, file_name),
                                os.path.join("FieldSupport", name, file_name))

        # Remove rndplugins output
        shutil.rmtree(os.path.join(results_dir, "FieldSupport"))

        self.state["progress"] = 90
        self.write_status()

        # Convert zip archive to tar.xz
        tar_name = self.start_plugin["expmeta"]["results_name"][0:(
            128 - 20)] + ".FieldSupport.tar.xz"
        temp_dir = os.path.join(results_dir, "temp")

        subprocess.check_call(["unzip", "-q", zip_path, "-d", temp_dir])
        subprocess.check_call(["tar", "cfJ", tar_name, "-C", temp_dir, "."],
                              env={"XZ_OPT": "-9"})

        # Remove temp dir and zip archive
        shutil.rmtree(temp_dir)
        os.unlink(zip_path)

        # Link up the zip
        self.state["download_link"] = tar_name

        self.state["progress"] = 100
        self.write_status()

        self.log.info("Field Support Complete.")
        return True
예제 #10
0
def archiveReport(pkR, dest, comment, _logger):

    logger = _logger

    result = shortcuts.get_object_or_404(models.Results, pk=pkR)
    oldDir = result.get_report_dir()
    reportFolder = os.path.basename(oldDir)
    rawDataDir = result.experiment.expDir

    # This is the directory for the result.  Exported reports will create subdirs in it
    newReportDir = os.path.join(dest, reportFolder)
    if not os.path.isdir(newReportDir):
        old_umask = os.umask(0000)
        os.makedirs(newReportDir)
        os.umask(old_umask)
        logger.debug("Created dir: %s" % newReportDir)

    # make the Report PDF file now. backupPDF.pdf
    try:
        logger.debug("Making PDF of the report")
        makePDF.makePDF(pkR)
        logger.debug("Made PDF of the report")
    except:
        # TODO: Do we continue?  OR do we abort?
        logger.exception(traceback.format_exc())
        raise

    # make a customer support archive file
    try:
        logger.debug("Making CSA")
        csaFullPath = makeCSA.makeCSA(oldDir, rawDataDir)
        logger.debug("Made CSA")
    except:
        # TODO: Do we continue?  OR do we abort?
        logger.exception(traceback.format_exc())
        raise

    dt = datetime.datetime.now()
    dtForm = '%d_%02d_%02d_%02d_%02d_%02d' % (dt.year, dt.month, dt.day,
                                              dt.hour, dt.minute, dt.second)
    newDir = os.path.join(dest, reportFolder, dtForm)

    if getSize(oldDir) >= getSpace(newReportDir):
        raise Exception("Not enough space to archive report '%s' at %s" %
                        (result.resultsName, newReportDir))

    result.updateMetaData(ARCHIVING,
                          "Archiving report '%s' to %s" %
                          (result.resultsName, newDir),
                          0,
                          comment,
                          logger=logger)
    # copy the entire directory from oldDir to the archive destination.
    try:
        # We desire to copy all files pointed to by symlinks; do not want just symlinks in the archive location
        shutil.copytree(oldDir, newDir, symlinks=False)
        result.updateMetaData(ARCHIVING,
                              "Directory copy successful",
                              0,
                              comment,
                              logger=logger)
    except:
        logger.exception(traceback.format_exc())
        raise

    ##check the sizes of the old directory and the destination.
    total_size1 = getSize(oldDir)
    #total_size2 = getSize(newDir)
    #logger.debug('size old dir: %d size new dir: %d'%(total_size1, total_size2))

    ##if the sizes are identical, that means that all of the files were copied. If so, delete the old directory.
    #logger.debug('sizecheck complete, preparing to delete if sizes match')
    #if total_size1 == total_size2:
    try:
        # Remove the contents of the results directory
        # N.B. Execute as celery task for the sole reason of getting it executed with root permissions
        dir_del = removeDirContents.delay(oldDir)
        dir_del.get()
    except:
        logger.exception(traceback.format_exc())
        raise Exception(
            "Archive created but there was an error in cleaning up source data.  See /var/log/ion/reportsLog.log."
        )

    for pdffile in ['backupPDF.pdf', 'report.pdf', 'plugins.pdf']:
        try:
            # Copy any pdf files
            shutil.copyfile(os.path.join(newDir, pdffile),
                            os.path.join(oldDir, pdffile))
            os.chmod(os.path.join(oldDir, pdffile), 0777)
        except:
            logger.exception(traceback.format_exc())

    try:
        # Copy the customer support archive file from the archive location back to results directory
        shutil.copyfile(os.path.join(newDir, os.path.basename(csaFullPath)),
                        csaFullPath)
        os.chmod(csaFullPath, 0777)

        result.updateMetaData(ARCHIVED,
                              "Finished archiving.",
                              total_size1,
                              comment,
                              logger=logger)
    except:
        logger.exception(traceback.format_exc())
예제 #11
0
    def launch(self):
        self.log.info("Launching Field Support.")

        with open('startplugin.json', 'r') as start_plugin_file:
            self.start_plugin = json.load(start_plugin_file)

        # Exit early if this is not a thumbnail run
        if self.start_plugin["runplugin"]["run_type"] != "thumbnail":
            self.state["warning"] = "This plugin can only be run on thumbnail reports. " \
                                    "Please rerun this plugin on this run's thumbnail report."
            self.write_status()
            self.log.info("Field Support Aborted.")
            return False

        self.state["progress"] = 10
        self.write_status()

        results_dir = self.start_plugin['runinfo']['results_dir']
        zip_name = self.start_plugin["expmeta"]["results_name"] + ".FieldSupport.zip"
        zip_path = os.path.join(results_dir, zip_name)

        # Make CSA zip using pipeline utils makeCSA
        makeCSA.makeCSA(
            self.start_plugin["runinfo"]["report_root_dir"],
            self.start_plugin["runinfo"]["raw_data_dir"],
            zip_path
        )

        self.state["progress"] = 30
        self.write_status()

        os.mkdir(os.path.join(results_dir, "FieldSupport"))

        # Now run each rndplugin
        for name, options in self.plugin_options.items():
            self.run_rndplugin(name)

        self.state["progress"] = 70
        self.write_status()

        # Modify zip archive to include extra files
        with zipfile.ZipFile(zip_path, mode='a', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as f:
            # Write indicator
            f.writestr('FieldSupport/version', self.version)

            # Write the thumbnail report pdf if it is not present
            if "report.pdf" not in f.namelist():
                pdf_content = self.fetch_thumbnail_report_pdf()
                if pdf_content:
                    f.writestr('report.pdf', pdf_content)

            # Now we need to find the pk of a non thumbnail report
            if "full_report.pdf" not in f.namelist():
                try:
                    pdf_content = self.fetch_fullchip_report_pdf()
                    if pdf_content:
                        f.writestr('full_report.pdf', pdf_content)
                except Exception as e:
                    self.log.info("Failed to fetch full chip report. This will always fail on clusters.")
                    self.log.exception(e)

            # Add rndplugin files
            for name, options in self.plugin_options.items():
                for root, _, file_names in os.walk(os.path.join(results_dir, "FieldSupport", name)):
                    for pattern in options["files"]:
                        for file_name in fnmatch.filter(file_names, pattern):
                            f.write(os.path.join(root, file_name), os.path.join("FieldSupport", name, file_name))

        # Remove rndplugins output
        shutil.rmtree(os.path.join(results_dir, "FieldSupport"))

        self.state["progress"] = 90
        self.write_status()

        # Convert zip archive to tar.xz
        tar_name = self.start_plugin["expmeta"]["results_name"][0:(128 - 20)] + ".FieldSupport.tar.xz"
        temp_dir = os.path.join(results_dir, "temp")

        subprocess.check_call(["unzip", "-q", zip_path, "-d", temp_dir])
        subprocess.check_call(["tar", "cfJ", tar_name, "-C", temp_dir, "."], env={"XZ_OPT": "-9"})

        # Remove temp dir and zip archive
        shutil.rmtree(temp_dir)
        os.unlink(zip_path)

        # Link up the zip
        self.state["download_link"] = tar_name

        self.state["progress"] = 100
        self.write_status()

        self.log.info("Field Support Complete.")
        return True