예제 #1
0
def copy_files(frequency_id):
    s3_helper = S3Helper()
    # Look in the output directory
    LOGGER.info('directory_data: {0}'.format(CHILES_CLEAN_OUTPUT))
    for dir_name in os.listdir(CHILES_CLEAN_OUTPUT):
        LOGGER.info('dir_name: {0}'.format(dir_name))
        result_dir = join(CHILES_CLEAN_OUTPUT, dir_name)
        if isdir(result_dir) and dir_name.startswith('cube_') and dir_name.endswith('.image'):
            LOGGER.info('dir_name: {0}'.format(dir_name))
            output_tar_filename = join(CHILES_CLEAN_OUTPUT, dir_name + '.tar')

            if can_be_multipart_tar(result_dir):
                LOGGER.info('Using add_tar_to_bucket_multipart')
                s3_helper.add_tar_to_bucket_multipart(
                    CHILES_BUCKET_NAME,
                    '/CLEAN/{0}/{1}'.format(frequency_id, basename(output_tar_filename)),
                    result_dir)
            else:
                LOGGER.info('Using make_tarfile, then adding file to bucket')
                make_tarfile(output_tar_filename, result_dir)

                s3_helper.add_file_to_bucket(
                    CHILES_BUCKET_NAME,
                    'CVEL/{0}/{1}/data.tar'.format(frequency_id, basename(output_tar_filename)),
                    output_tar_filename)

                # Clean up
                os.remove(output_tar_filename)
예제 #2
0
def copy_files(date, vis_file):
    s3_helper = S3Helper()
    # Look in the output directory
    for root, dir_names, filenames in os.walk(CHILES_CVEL_OUTPUT):
        LOGGER.info('root: {0}, dir_names: {1}, filenames: {2}'.format(
            root, dir_names, filenames))
        for match in fnmatch.filter(dir_names, vis_file):
            result_dir = join(root, match)
            LOGGER.info('Working on: {0}'.format(result_dir))

            if can_be_multipart_tar(result_dir):
                LOGGER.info('Using add_tar_to_bucket_multipart')
                s3_helper.add_tar_to_bucket_multipart(
                    CHILES_BUCKET_NAME,
                    'CVEL/{0}/{1}/data.tar'.format(vis_file, date), result_dir)
            else:
                LOGGER.info('Using make_tarfile, then adding file to bucket')
                output_tar_filename = join(root, match + '.tar')
                make_tarfile(output_tar_filename, result_dir)

                s3_helper.add_file_to_bucket(
                    CHILES_BUCKET_NAME,
                    'CVEL/{0}/{1}/data.tar'.format(vis_file,
                                                   date), output_tar_filename)

                # Clean up
                os.remove(output_tar_filename)

            shutil.rmtree(result_dir, ignore_errors=True)
예제 #3
0
def fileDownload(self, *args):
    cl = client.TCPClient(LOG.getLogger("clientlog", "Agent"))
    testid = int(args[2])
    current_test = get_test(testid)
    test_logger = None
    try:
        if current_test:
            test_logger = LOG.gettestlogger(current_test, "STAT")
            lctx.debug("FILE DOWNLOAD | " + str(current_test.testid) +
                       " | START")
            lctx.debug("Preparing TAR file of system metric folder")
            test_logger.info("Preparing TAR file of system metric folder")
            common.make_tarfile(current_test.archivedir + "results.tgz",
                                current_test.resultsdir + "/")
            dest = current_test.tobj.testobj.TestInputData.stats_results_path[
                current_test.stathostip]
            download_file = current_test.archivedir + "results.tgz"
            test_logger.info("Sending TAR file to daytona host")
            cl.sendFile(current_test.serverip, current_test.serverport,
                        download_file, dest.strip())
            lctx.debug("FILE DOWNLOAD | " + str(current_test.testid) +
                       " | COMPLETE")
            return "SUCCESS"
        else:
            raise Exception("Invalid Test ID")

    except Exception as e:
        lctx.error(e)
        if test_logger:
            test_logger.error(e)
        return "ERROR"
예제 #4
0
def copy_files(date, vis_file):
    s3_helper = S3Helper()
    # Look in the output directory
    for root, dir_names, filenames in os.walk(CHILES_CVEL_OUTPUT):
        LOGGER.info('root: {0}, dir_names: {1}, filenames: {2}'.format(root, dir_names, filenames))
        for match in fnmatch.filter(dir_names, vis_file):
            result_dir = join(root, match)
            LOGGER.info('Working on: {0}'.format(result_dir))

            if can_be_multipart_tar(result_dir):
                LOGGER.info('Using add_tar_to_bucket_multipart')
                s3_helper.add_tar_to_bucket_multipart(
                    CHILES_BUCKET_NAME,
                    'CVEL/{0}/{1}/data.tar'.format(vis_file, date),
                    result_dir)
            else:
                LOGGER.info('Using make_tarfile, then adding file to bucket')
                output_tar_filename = join(root, match + '.tar')
                make_tarfile(output_tar_filename, result_dir)

                s3_helper.add_file_to_bucket(
                    CHILES_BUCKET_NAME,
                    'CVEL/{0}/{1}/data.tar'.format(vis_file, date),
                    output_tar_filename)

                # Clean up
                os.remove(output_tar_filename)

            shutil.rmtree(result_dir, ignore_errors=True)
예제 #5
0
def copy_files(cube):
    s3_helper = S3Helper()
    # Look in the output directory
    directory_to_save = join(CHILES_IMGCONCAT_OUTPUT, cube) + '.cube'
    if isdir(directory_to_save):
        LOGGER.info('dir_name: {0}'.format(directory_to_save))
        output_tar_filename = directory_to_save + '.tar'

        if can_be_multipart_tar(directory_to_save):
            LOGGER.info('Using add_tar_to_bucket_multipart')
            s3_helper.add_tar_to_bucket_multipart(
                CHILES_BUCKET_NAME,
                'IMGCONCAT/{0}'.format(basename(output_tar_filename)),
                directory_to_save,
                bufsize=20 * 1024 * 1024)
        else:
            LOGGER.info('Using make_tarfile, then adding file to bucket')
            make_tarfile(output_tar_filename, directory_to_save)

            s3_helper.add_file_to_bucket(
                CHILES_BUCKET_NAME,
                'IMGCONCAT/{0}'.format(basename(output_tar_filename)),
                output_tar_filename)

            # Clean up
            os.remove(output_tar_filename)
예제 #6
0
def copy_files(cube):
    s3_helper = S3Helper()
    # Look in the output directory
    directory_to_save = join(CHILES_IMGCONCAT_OUTPUT, cube) + '.cube'
    if isdir(directory_to_save):
        LOGGER.info('dir_name: {0}'.format(directory_to_save))
        output_tar_filename = directory_to_save + '.tar'

        if can_be_multipart_tar(directory_to_save):
            LOGGER.info('Using add_tar_to_bucket_multipart')
            s3_helper.add_tar_to_bucket_multipart(
                CHILES_BUCKET_NAME,
                'IMGCONCAT/{0}'.format(basename(output_tar_filename)),
                directory_to_save,
                bufsize=20 * 1024 * 1024)
        else:
            LOGGER.info('Using make_tarfile, then adding file to bucket')
            make_tarfile(output_tar_filename, directory_to_save)

            s3_helper.add_file_to_bucket(
                CHILES_BUCKET_NAME,
                'IMGCONCAT/{0}'.format(basename(output_tar_filename)),
                output_tar_filename)

            # Clean up
            os.remove(output_tar_filename)
예제 #7
0
def stopMonitor(self, *args):
    (obj, command, params, actionID, sync) = (args[0], args[1], args[2], args[3], args[4])
    testid = int(params)
    current_test = get_test(testid)

    try:
        if current_test:
            lctx.debug("MONITOR OFF | " + str(current_test.testid) + " | START")

            cfg = config.CFG("DaytonaHost", lctx)
            cfg.readCFG("config.ini")

            # stop the sar processes
            execline = cfg.daytona_mon_path + "/sar_gather_agent.pl --shutdown --root-dir=" + current_test.statsdir
            lctx.info(execline)
            exec_cmd(execline, command, sync, obj, actionID, current_test)

            # prepare mon results tarball here
            lctx.debug(current_test.statsdir)
            lctx.debug("removed monitor temp files from : " + current_test.archivedir)
            common.make_tarfile(current_test.archivedir + "results.tgz", current_test.resultsdir + "/")
	    current_test.status = "MONITOR_OFF"
            save_test(current_test.testid, current_test)
            lctx.debug("MONITOR OFF | " + str(current_test.testid) + " | COMPLETE")
            return "SUCCESS"
        else:
            raise Exception("Monitor is not running for TESTID : " + str(current_test.testid))

    except Exception as e:
        if current_test:
            current_test.status = "FAILED"
            save_test(current_test.testid, current_test)
        lctx.error(e)
        return "ERROR"
예제 #8
0
def copy_files(frequency_id):
    s3_helper = S3Helper()
    # Look in the output directory
    LOGGER.info('directory_data: {0}'.format(CHILES_CLEAN_OUTPUT))
    for dir_name in os.listdir(CHILES_CLEAN_OUTPUT):
        LOGGER.info('dir_name: {0}'.format(dir_name))
        result_dir = join(CHILES_CLEAN_OUTPUT, dir_name)
        if isdir(result_dir) and dir_name.startswith(
                'cube_') and dir_name.endswith('.image'):
            LOGGER.info('dir_name: {0}'.format(dir_name))
            output_tar_filename = join(CHILES_CLEAN_OUTPUT, dir_name + '.tar')

            if can_be_multipart_tar(result_dir):
                LOGGER.info('Using add_tar_to_bucket_multipart')
                s3_helper.add_tar_to_bucket_multipart(
                    CHILES_BUCKET_NAME,
                    '/CLEAN/{0}/{1}'.format(frequency_id,
                                            basename(output_tar_filename)),
                    result_dir)
            else:
                LOGGER.info('Using make_tarfile, then adding file to bucket')
                make_tarfile(output_tar_filename, result_dir)

                s3_helper.add_file_to_bucket(
                    CHILES_BUCKET_NAME, 'CVEL/{0}/{1}/data.tar'.format(
                        frequency_id, basename(output_tar_filename)),
                    output_tar_filename)

                # Clean up
                os.remove(output_tar_filename)
예제 #9
0
def stopMonitor( self, *args):
  (obj, command, test_serialized, actionID, sync) = (args[0], args[1], args[2], args[3], args[4])
  t2 = testobj.testDefn()
  t2.deserialize(test_serialized)
  lctx.debug("stop monitor for test : " + str(t2.testobj.TestInputData.testid))

  if current_test.testid != t2.testobj.TestInputData.testid:
    lctx.debug("start mon  :  TestID dont match")
    return "ERROR"

  cfg = config.CFG("DaytonaHost", lctx)
  cfg.readCFG("config.ini")

  #stop the sar processes
  execline = cfg.daytona_mon_path + "/sar_gather_agent.pl --shutdown --root-dir=" + current_test.statsdir
  lctx.info(execline)
  exec_cmd(execline, command, sync, obj, actionID)

  #prepare mon results tarball here
  lctx.debug(current_test.statsdir)
  #os.remove(current_test.statsdir + "/sar.dat")
  os.remove(current_test.statsdir + "/sar_gather_agent_debug.out")
  #os.remove(current_test.statsdir + "/*.pid")
  lctx.debug("removed monitor temp files from : " + current_test.archivedir)

  common.make_tarfile(current_test.archivedir + "results_stats.tgz", current_test.archivedir)

  lctx.debug("Completed stop monitor")
  return "SUCCESS"
예제 #10
0
def fileDownload(self, *args):
    """
    On test completion, agent execute this procedure when it receive DAYTONA_FILE_DOWNLOAD message from scheduler.
    We create a TAR file called results.tgz and save it test location, then we send this file to scheduler and save it
    in scheduler side file system

    """
    cl = client.TCPClient(LOG.getLogger("clientlog", "Agent"))
    testid = int(args[2])
    current_test = get_test(testid)
    test_logger = None
    try:
        if current_test:
	    test_logger = LOG.gettestlogger(current_test, "STAT")
            lctx.debug("FILE DOWNLOAD | " + str(current_test.testid) + " | START")
	    lctx.debug("Preparing TAR file of system metric folder")
	    test_logger.info("Preparing TAR file of system metric folder")
            common.make_tarfile(current_test.archivedir + "results.tgz", current_test.resultsdir + "/")
            dest = current_test.tobj.testobj.TestInputData.stats_results_path[current_test.stathostip]
            download_file = current_test.archivedir + "results.tgz"
	    test_logger.info("Sending TAR file to daytona host")
            cl.sendFile(current_test.serverip, current_test.serverport, download_file, dest.strip())
            lctx.debug("FILE DOWNLOAD | " + str(current_test.testid) + " | COMPLETE")
            return "SUCCESS"
        else:
            raise Exception("Invalid Test ID")

    except Exception as e:
        lctx.error(e)
	if test_logger:
	    test_logger.error(e)
        return "ERROR"
예제 #11
0
def prepareResults(self, *args):
  (obj, command, test_serialized, actionID, sync) = (args[0], args[1], args[2], args[3], args[4])
  t2 = testobj.testDefn()
  t2.deserialize(test_serialized)
  lctx.debug("preparing results for test : " + str(t2.testobj.TestInputData.testid))

  if current_test.testid != t2.testobj.TestInputData.testid:
    lctx.debug("start mon  :  TestID dont match")
    return "ERROR"

  #stop the sar processes

  #prepare results tarball here
  common.make_tarfile(current_test.archivedir + "results.tgz", current_test.archivedir)

  lctx.debug(command + "[" + str(actionID) + "]")
  return "SUCCESS"