Exemplo n.º 1
0
def task_set_lowering_data_directory_permissions(gearman_worker, gearman_job):
    """
    Set the permissions for the specified lowering ID
    """

    job_results = {'parts': []}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 5, 10)

    if os.path.isdir(gearman_worker.lowering_dir):
        logging.info("Clear read permissions")
        set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            gearman_worker.lowering_dir)
        job_results['parts'].append({
            "partName": "Set Directory Permissions for current lowering",
            "result": "Pass"
        })

    job_results['parts'].append({
        "partName": "Set LoweringData Directory Permissions",
        "result": "Pass"
    })
    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 2
0
def task_set_cruise_data_directory_permissions(gearman_worker, gearman_job):
    """
    Set the permissions for the specified cruise ID
    """

    job_results = {'parts': []}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 5, 10)

    if gearman_worker.ovdm.show_only_current_cruise_dir() is True:
        logging.info(
            "Clear read permissions for all directories within CruiseData")
        lockdown_directory(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir'],
            gearman_worker.cruise_dir)
        job_results['parts'].append({
            "partName": "Clear CruiseData Directory Read Permissions",
            "result": "Pass"
        })

    gearman_worker.send_job_status(gearman_job, 8, 10)

    if os.path.isdir(gearman_worker.cruise_dir):
        logging.info(
            "Set ownership and read/write permissions for current cruise directory within CruiseData"
        )
        set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            gearman_worker.cruise_dir)
        job_results['parts'].append({
            "partName": "Set Directory Permissions for current cruise",
            "result": "Pass"
        })

    job_results['parts'].append({
        "partName": "Set CruiseData Directory Permissions",
        "result": "Pass"
    })
    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 3
0
def task_setup_new_lowering(gearman_worker, gearman_job):
    """
    Setup a new lowering
    """
    job_results = {'parts':[]}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    lowering_config_filepath = os.path.join(gearman_worker.lowering_dir, DEFAULT_LOWERING_CONFIG_FN)

    gearman_worker.send_job_status(gearman_job, 1, 10)

    gm_client = python3_gearman.GearmanClient([gearman_worker.ovdm.get_gearman_server()])

    logging.info("Creating lowering data directory")
    completed_job_request = gm_client.submit_job("createLoweringDirectory", gearman_job.data)

    result_obj = json.loads(completed_job_request.result)

    if result_obj['parts'][-1]['result'] == "Pass": # Final Verdict
        job_results['parts'].append({"partName": "Create lowering data directory structure", "result": "Pass"})
    else:
        logging.error("Failed to create lowering data directory")
        job_results['parts'].append({"partName": "Create lowering data directory structure", "result": "Fail", "reason": result_obj['parts'][-1]['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 5, 10)

    #build lowering Config file
    logging.info("Exporting Lowering Configuration")
    output_results = export_lowering_config(gearman_worker, lowering_config_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Export lowering config data to file", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Export lowering config data to file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], lowering_config_filepath)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Set lowering config file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 9, 10)

    logging.info("Updating Lowering Size")
    lowering_size_proc = subprocess.run(['du','-sb', gearman_worker.lowering_dir], capture_output=True, text=True, check=False)
    if lowering_size_proc.returncode == 0:
        logging.info("Cruise Size: %s", lowering_size_proc.stdout.split()[0])
        gearman_worker.ovdm.set_lowering_size(lowering_size_proc.stdout.split()[0])
    else:
        gearman_worker.ovdm.set_lowering_size("0")

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 4
0
def task_export_ovdm_config(gearman_worker, gearman_job):
    """
    Export the OpenVDM configuration to file
    """
    job_results = {'parts':[]}

    ovdm_config_file_path = os.path.join(gearman_worker.cruise_dir, DEFAULT_CRUISE_CONFIG_FN)

    gearman_worker.send_job_status(gearman_job, 1, 10)

    logging.info("Verifying cruise directory exists")
    if os.path.exists(gearman_worker.cruise_dir):
        job_results['parts'].append({"partName": "Verify cruise directory exists", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Verify cruise directory exists", "result": "Fail", "reason": "Unable to locate the cruise directory: " + gearman_worker.cruise_dir})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 3, 10)

    #build OpenVDM Config file
    logging.info("Exporting OpenVDM Configuration")
    output_results = export_ovdm_config(gearman_worker, ovdm_config_file_path)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 6, 10)

    logging.info("Setting file ownership/permissions")
    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], ovdm_config_file_path)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set file ownership/permissions", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Set file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)
Exemplo n.º 5
0
def task_create_lowering_directory(gearman_worker, gearman_job):
    """
    Setup the lowering directory for the specified lowering ID
    """

    job_results = {'parts': []}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 1, 10)

    cruise_dir = os.path.join(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir'],
        gearman_worker.cruise_id)
    lowering_data_base_dir = os.path.join(
        cruise_dir,
        gearman_worker.shipboard_data_warehouse_config['loweringDataBaseDir'])

    if os.path.exists(cruise_dir):
        job_results['parts'].append({
            "partName": "Verify Cruise Directory exists",
            "result": "Pass"
        })
    else:
        logging.error("Failed to find cruise directory: %s", cruise_dir)
        job_results['parts'].append({
            "partName":
            "Verify Cruise Directory exists",
            "result":
            "Fail",
            "reason":
            "Unable to find cruise directory: " + cruise_dir
        })
        return json.dumps(job_results)

    if os.path.exists(lowering_data_base_dir):
        job_results['parts'].append({
            "partName": "Verify Lowering Data Directory exists",
            "result": "Pass"
        })
    else:
        logging.error("Lowering Data Directory doesn not exist: %s",
                      lowering_data_base_dir)
        job_results['parts'].append({
            "partName":
            "Verify Lowering Data Directory exists",
            "result":
            "Fail",
            "reason":
            "Unable to find lowering data base directory: " +
            lowering_data_base_dir
        })
        return json.dumps(job_results)

    if not os.path.exists(gearman_worker.lowering_dir):
        job_results['parts'].append({
            "partName": "Verify Lowering Directory does not exists",
            "result": "Pass"
        })
    else:
        logging.error("Lowering directory already exists: %s",
                      gearman_worker.lowering_dir)
        job_results['parts'].append({
            "partName":
            "Verify Lowering Directory does not exists",
            "result":
            "Fail",
            "reason":
            "Lowering directory " + gearman_worker.lowering_dir +
            " already exists"
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 2, 10)

    directorylist = build_directorylist(gearman_worker)
    logging.debug("Directory List: %s", json.dumps(directorylist, indent=2))

    if len(directorylist) > 0:
        job_results['parts'].append({
            "partName": "Build Directory List",
            "result": "Pass"
        })
    else:
        logging.warning("Directory list is empty")
        job_results['parts'].append({
            "partName":
            "Build Directory List",
            "result":
            "Fail",
            "reason":
            "Empty list of directories to create"
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 5, 10)

    output_results = create_directories(directorylist)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Pass"
        })
    else:
        logging.error(
            "Failed to create any/all of the lowering data directory structure"
        )
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Fail",
            "reason": output_results['reason']
        })

    gearman_worker.send_job_status(gearman_job, 8, 10)

    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        gearman_worker.lowering_dir)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Set cruise directory ownership/permissions",
            "result": "Pass"
        })
    else:
        job_results['parts'].append({
            "partName": "Set cruise directory ownership/permissions",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
def transfer_local_dest_dir(gearman_worker, gearman_job):  # pylint: disable=too-many-locals,too-many-statements
    """
    Copy cruise data to a local directory
    """

    logging.debug("Transfer to Local Directory")

    cruise_dir = os.path.join(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir'],
        gearman_worker.cruise_id)
    dest_dir = gearman_worker.cruise_data_transfer['destDir'].rstrip('/')

    logging.debug('Destination Dir: %s', dest_dir)

    logging.debug("Building file list")
    files = build_filelist(gearman_worker, cruise_dir)

    # Create temp directory
    tmpdir = tempfile.mkdtemp()
    rsync_exclude_list_filepath = os.path.join(tmpdir, 'rsyncExcludeList.txt')

    try:
        with open(rsync_exclude_list_filepath, 'w') as rsync_excludelist_file:
            rsync_excludelist_file.write('\n'.join(files['exclude']))

    except IOError:
        logging.error("Error Saving temporary rsync filelist file")

        # Cleanup
        shutil.rmtree(tmpdir)
        return False

    file_index = 0
    file_count = 1  # avoids divide by 0 error
    command = [
        'rsync', '-trimnv', '--stats',
        '--exclude-from=' + rsync_exclude_list_filepath, cruise_dir, dest_dir
    ]

    logging.debug('File count Command: %s', ' '.join(command))

    proc = subprocess.run(command, capture_output=True, text=True, check=False)

    for line in proc.stdout:
        if line.startswith('Number of regular files transferred:'):
            file_count = int(line.split(':')[1])

    bandwidth_imit = '--bwlimit=' + gearman_worker.cruise_data_transfer[
        'bandwidthLimit'] if gearman_worker.cruise_data_transfer[
            'bandwidthLimit'] != '0' else '--bwlimit=20000000'  # 20GB/s a.k.a. stupid big

    command = [
        'rsync', '-trimv', bandwidth_imit,
        '--exclude-from=' + rsync_exclude_list_filepath, cruise_dir, dest_dir
    ]

    logging.debug('Transfer Command: %s', ' '.join(command))

    proc = subprocess.Popen(command,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            text=True)
    while True:

        line = proc.stdout.readline().rstrip('\n')
        err_line = proc.stderr.readline().rstrip('\n')

        if err_line:
            logging.warning("Err Line: %s", err_line)
        if line:
            logging.debug("Line: %s", line)

        if proc.poll() is not None:
            break

        # if not line:
        #     continue

        if line.startswith('>f+++++++++'):
            filename = line.split(' ', 1)[1]
            files['new'].append(filename)
            gearman_worker.send_job_status(
                gearman_job,
                int(20 + 70 * float(file_index) / float(file_count)), 100)
            file_index += 1
        elif line.startswith('>f.'):
            filename = line.split(' ', 1)[1]
            files['updated'].append(filename)
            gearman_worker.send_job_status(
                gearman_job,
                int(20 + 70 * float(file_index) / float(file_count)), 100)
            file_index += 1

        if gearman_worker.stop:
            logging.debug("Stopping")
            proc.terminate()
            break

    # files['new'] = [os.path.join('/', gearman_worker.cruise_id, filename) for filename in files['new']]
    # files['updated'] = [os.path.join('/', gearman_worker.cruise_id, filename) for filename in files['updated']]

    logging.info("Setting file permissions")
    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        os.path.join(dest_dir, gearman_worker.cruise_id))

    # Cleanup
    shutil.rmtree(tmpdir)

    if not output_results['verdict']:
        logging.error(
            "Error setting ownership/permissions for cruise data at destination: %s",
            os.path.join(dest_dir, gearman_worker.cruise_id))
        return output_results

    return {'verdict': True, 'files': files}
Exemplo n.º 7
0
def task_rebuild_data_dashboard(gearman_worker, gearman_job):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
    """
    Rebuild the existing dashboard files
    """

    job_results = {'parts': [], 'files': {'new': [], 'updated': []}}

    payload_obj = json.loads(gearman_job.data)
    logging.debug('Payload: %s', json.dumps(payload_obj, indent=2))

    if os.path.exists(gearman_worker.data_dashboard_dir):
        job_results['parts'].append({
            "partName": "Verify Data Dashboard Directory exists",
            "result": "Pass"
        })
    else:
        logging.error("Data dashboard directory not found: %s",
                      gearman_worker.data_dashboard_dir)
        job_results['parts'].append({
            "partName":
            "Verify Data Dashboard Directory exists",
            "result":
            "Fail",
            "reason":
            "Unable to locate the data dashboard directory: " +
            gearman_worker.data_dashboard_dir
        })
        return json.dumps(job_results)

    collection_system_transfers = gearman_worker.ovdm.get_active_collection_system_transfers(
    )

    gearman_worker.send_job_status(gearman_job, 1, 100)

    new_manifest_entries = []

    collection_system_transfer_count = len(collection_system_transfers)
    collection_system_transfer_index = 0
    for collection_system_transfer in collection_system_transfers:  # pylint: disable=too-many-nested-blocks

        logging.info('Processing data from: %s',
                     collection_system_transfer['name'])

        processing_script_filename = os.path.join(
            gearman_worker.ovdm.get_plugin_dir(),
            collection_system_transfer['name'].lower() +
            gearman_worker.ovdm.get_plugin_suffix())
        logging.debug("Processing Script Filename: %s",
                      processing_script_filename)

        if not os.path.isfile(processing_script_filename):
            logging.warning(
                "Processing script for collection system %s not found, moving on.",
                collection_system_transfer['name'])
            gearman_worker.send_job_status(
                gearman_job,
                int(10 + (80 * float(collection_system_transfer_index) /
                          float(collection_system_transfer_count))), 100)
            collection_system_transfer_index += 1
            continue

        # collection_system_transferOutputDir = os.path.join(gearman_worker.data_dashboard_dir, collection_system_transfer['destDir'])

        #build filelist
        filelist = []
        if collection_system_transfer['cruiseOrLowering'] == "0":
            collection_system_transfer_input_dir = os.path.join(
                gearman_worker.cruise_dir,
                collection_system_transfer['destDir'])
            filelist.extend(
                build_filelist(collection_system_transfer_input_dir))
            filelist = [
                os.path.join(collection_system_transfer['destDir'], filename)
                for filename in filelist
            ]

        else:
            lowerings = gearman_worker.ovdm.get_lowerings()
            lowering_base_dir = gearman_worker.shipboard_data_warehouse_config[
                'loweringDataBaseDir']

            for lowering in lowerings:
                collection_system_transfer_input_dir = os.path.join(
                    gearman_worker.cruise_dir, lowering_base_dir, lowering,
                    collection_system_transfer['destDir'])
                lowering_filelist = build_filelist(
                    collection_system_transfer_input_dir)
                filelist.extend([
                    os.path.join(lowering_base_dir, lowering,
                                 collection_system_transfer['destDir'],
                                 filename) for filename in lowering_filelist
                ])

        logging.debug("FileList: %s", json.dumps(filelist, indent=2))

        file_count = len(filelist)
        file_index = 0
        logging.info("%s file(s) to process", file_count)

        for filename in filelist:

            if gearman_worker.stop:
                break

            logging.info("Processing file: %s", filename)
            json_filename = os.path.splitext(filename)[0] + '.json'
            logging.debug("jsonFileName: %s", json_filename)
            raw_filepath = os.path.join(gearman_worker.cruise_dir, filename)
            logging.debug("rawFilePath: %s", raw_filepath)
            json_filepath = os.path.join(gearman_worker.data_dashboard_dir,
                                         json_filename)
            logging.debug("jsonFilePath: %s", json_filepath)

            if os.stat(raw_filepath).st_size == 0:
                logging.warning("File %s is empty", filename)
                continue

            command = [
                PYTHON_BINARY, processing_script_filename, '--dataType',
                raw_filepath
            ]

            logging.debug("Get Datatype Command: %s", ' '.join(command))

            datatype_proc = subprocess.run(command,
                                           capture_output=True,
                                           text=True,
                                           check=False)

            if datatype_proc.stdout:
                dd_type = datatype_proc.stdout.rstrip('\n')
                logging.debug("Found to be type: %s", dd_type)

                command = [
                    PYTHON_BINARY, processing_script_filename, raw_filepath
                ]

                logging.debug("Processing Command: %s", ' '.join(command))

                data_proc = subprocess.run(command,
                                           capture_output=True,
                                           text=True,
                                           check=False)

                if data_proc.stdout:
                    try:
                        logging.debug("Parsing output")
                        # logging.debug(data_proc.stdout)
                        out_obj = json.loads(data_proc.stdout)
                    except Exception as err:
                        logging.error(str(err))
                        error_title = 'Error parsing output'
                        error_body = 'Invalid JSON output recieved from processing. Command: ' + ' '.join(
                            command)
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                        job_results['parts'].append({
                            "partName":
                            "Parsing JSON output " + filename,
                            "result":
                            "Fail",
                            "reason":
                            error_title + ':' + error_body
                        })
                    else:
                        if out_obj is None:
                            error_title = 'Error processing file'
                            error_body = 'No JSON output recieved from file. Processing Command: ' + ' '.join(
                                command)
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Parsing JSON output from file " + filename,
                                "result":
                                "Fail",
                                "reason":
                                error_title + ': ' + error_body
                            })

                            if data_proc.stderr:
                                logging.error('err: %s', data_proc.stderr)

                        elif 'error' in out_obj:
                            error_title = 'Error processing file'
                            error_body = out_obj['error']
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Processing Datafile " + filename,
                                "result":
                                "Fail",
                                "reason":
                                error_title + ':' + error_body
                            })

                        else:
                            #job_results['parts'].append({"partName": "Processing Datafile " + filename, "result": "Pass"})
                            output_results = output_json_data_to_file(
                                json_filepath, out_obj)

                            if output_results['verdict']:
                                job_results['parts'].append({
                                    "partName":
                                    "Writing DashboardData file: " + filename,
                                    "result":
                                    "Pass"
                                })
                            else:
                                error_title = 'Error writing file'
                                error_body = "Error Writing DashboardData file: " + filename
                                logging.error("%s: %s", error_title,
                                              error_body)
                                gearman_worker.ovdm.send_msg(
                                    error_title, error_body)

                                job_results['parts'].append({
                                    "partName":
                                    "Writing Dashboard file: " + filename,
                                    "result":
                                    "Fail",
                                    "reason":
                                    output_results['verdict']
                                })

                            new_manifest_entries.append({
                                "type":
                                dd_type,
                                "dd_json":
                                json_filepath.replace(
                                    gearman_worker.
                                    shipboard_data_warehouse_config[
                                        'shipboardDataWarehouseBaseDir'] + '/',
                                    ''),
                                "raw_data":
                                raw_filepath.replace(
                                    gearman_worker.
                                    shipboard_data_warehouse_config[
                                        'shipboardDataWarehouseBaseDir'] + '/',
                                    '')
                            })
                else:
                    error_title = 'Error processing file'
                    error_body = 'No JSON output recieved from file. Processing Command: ' + ' '.join(
                        command)
                    logging.error("%s: %s", error_title, error_body)
                    gearman_worker.ovdm.send_msg(error_title, error_body)
                    job_results['parts'].append({
                        "partName":
                        "Parsing JSON output from file " + filename,
                        "result":
                        "Fail",
                        "reason":
                        error_title + ': ' + error_body
                    })

                    if data_proc.stderr:
                        logging.error('err: %s', data_proc.stderr)

            else:
                logging.warning("File is of unknown datatype, moving on")

                if datatype_proc.stderr:
                    logging.error('err: %s', datatype_proc.stderr)

            gearman_worker.send_job_status(
                gearman_job,
                int(10 + 70 * float(file_index) / float(file_count)), 100)
            file_index += 1

        collection_system_transfer_index += 1

    gearman_worker.send_job_status(gearman_job, 90, 100)

    logging.info("Update Dashboard Manifest file")
    output_results = output_json_data_to_file(
        gearman_worker.data_dashboard_manifest_file_path, new_manifest_entries)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Updating manifest file",
            "result": "Pass"
        })
    else:
        logging.error("Error updating manifest file %s",
                      gearman_worker.data_dashboard_manifest_file_path)
        job_results['parts'].append({
            "partName": "Updating manifest file",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 95, 100)

    logging.info("Setting file ownership/permissions")
    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        gearman_worker.data_dashboard_dir)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Setting file/directory ownership",
            "result": "Pass"
        })
    else:
        logging.error("Error Setting file/directory ownership/permissions")
        job_results['parts'].append({
            "partName": "Setting file/directory ownership",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 99, 100)

    data_dashboard_dest_dir = gearman_worker.ovdm.get_required_extra_directory_by_name(
        'Dashboard_Data')['destDir']
    job_results['files']['updated'] = [
        os.path.join(data_dashboard_dest_dir, filepath)
        for filepath in build_filelist(gearman_worker.data_dashboard_dir)
    ]  # might need to remove cruise_dir from begining of filepaths

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 8
0
def task_update_data_dashboard(gearman_worker, gearman_job):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
    """
    Update the existing dashboard files with new/updated raw data
    """
    job_results = {'parts': [], 'files': {'new': [], 'updated': []}}

    payload_obj = json.loads(gearman_job.data)
    logging.debug('Payload: %s', json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 5, 100)

    logging.info('Collection System Transfer: %s',
                 gearman_worker.collection_system_transfer['name'])

    new_manifest_entries = []
    remove_manifest_entries = []

    #check for processing file
    processing_script_filename = os.path.join(
        gearman_worker.ovdm.get_plugin_dir(),
        gearman_worker.collection_system_transfer['name'].lower() +
        gearman_worker.ovdm.get_plugin_suffix())
    logging.debug("Processing Script Filename: %s", processing_script_filename)

    if os.path.isfile(processing_script_filename):
        job_results['parts'].append({
            "partName": "Dashboard Processing File Located",
            "result": "Pass"
        })
    else:
        logging.warning("Processing script not found: %s",
                        processing_script_filename)
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 100)

    #build filelist
    filelist = []

    if payload_obj['files']['new'] or payload_obj['files']['updated']:
        filelist = payload_obj['files']['new']
        filelist += payload_obj['files']['updated']
        logging.debug('File List: %s', json.dumps(filelist, indent=2))
        job_results['parts'].append({
            "partName": "Retrieve Filelist",
            "result": "Pass"
        })

    else:
        logging.warning("No new or updated files to process")
        job_results['parts'].append({
            "partName": "Retrieve Filelist",
            "result": "Pass"
        })
        return json.dumps(job_results)

    file_count = len(filelist)
    file_index = 0
    for filename in filelist:  # pylint: disable=too-many-nested-blocks

        if gearman_worker.stop:
            break

        logging.info("Processing file: %s", filename)
        json_filename = os.path.splitext(filename)[0] + '.json'
        raw_filepath = os.path.join(gearman_worker.cruise_dir, filename)
        json_filepath = os.path.join(gearman_worker.data_dashboard_dir,
                                     json_filename)

        if not os.path.isfile(raw_filepath):
            job_results['parts'].append({
                "partName":
                "Verify data file exists",
                "result":
                "Fail",
                "reason":
                "Unable to find data file: " + filename
            })
            logging.warning("File not found %s, skipping", filename)
            continue

        if os.stat(raw_filepath).st_size == 0:
            logging.warning("File is empty %s, skipping", filename)
            continue

        command = [
            PYTHON_BINARY, processing_script_filename, '--dataType',
            raw_filepath
        ]

        logging.debug("DataType Retrieval Command: %s", ' '.join(command))

        datatype_proc = subprocess.run(command,
                                       capture_output=True,
                                       text=True,
                                       check=False)

        if datatype_proc.stdout:
            dd_type = datatype_proc.stdout.rstrip('\n')
            logging.debug("DataType found to be: %s", dd_type)

            command = [PYTHON_BINARY, processing_script_filename, raw_filepath]

            logging.debug("Data Processing Command: %s", ' '.join(command))

            data_proc = subprocess.run(command,
                                       capture_output=True,
                                       text=True,
                                       check=False)

            if data_proc.stdout:
                try:
                    logging.debug("Verifying output")
                    out_obj = json.loads(data_proc.stdout)
                except Exception as err:
                    logging.error("Error parsing JSON output from file: %s",
                                  filename)
                    logging.debug(str(err))
                    job_results['parts'].append({
                        "partName":
                        "Parsing JSON output from file " + filename,
                        "result":
                        "Fail",
                        "reason":
                        "Error parsing JSON output from file: " + filename
                    })
                    continue
                else:
                    if not out_obj:
                        error_title = 'Datafile Parsing error'
                        error_body = "Parser returned no output. Parsing command: {}", ' '.join(
                            command)
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                    elif 'error' in out_obj:
                        error_title = 'Datafile Parsing error'
                        error_body = out_obj['error']
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                    else:
                        output_results = output_json_data_to_file(
                            json_filepath, out_obj)

                        if output_results['verdict']:
                            job_results['parts'].append({
                                "partName":
                                "Writing DashboardData file: " + filename,
                                "result":
                                "Pass"
                            })
                        else:
                            error_title = 'Data Dashboard Processing failed'
                            error_body = "Error Writing DashboardData file: " + filename + ". Reason: " + output_results[
                                'reason']
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Writing Dashboard file: " + filename,
                                "result":
                                "Fail",
                                "reason":
                                output_results['reason']
                            })

                        new_manifest_entries.append({
                            "type":
                            dd_type,
                            "dd_json":
                            json_filepath.replace(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'] + '/',
                                ''),
                            "raw_data":
                            raw_filepath.replace(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'] + '/', '')
                        })
            else:
                error_title = 'Data Dashboard Processing failed'
                error_body = 'No JSON output recieved from file.  Parsing Command: ' + ' '.join(
                    command)
                logging.error("%s: %s", error_title, error_body)
                gearman_worker.ovdm.send_msg(error_title, error_body)
                remove_manifest_entries.append({
                    "dd_json":
                    json_filepath.replace(
                        gearman_worker.shipboard_data_warehouse_config[
                            'shipboardDataWarehouseBaseDir'] + '/', ''),
                    "raw_data":
                    raw_filepath.replace(
                        gearman_worker.shipboard_data_warehouse_config[
                            'shipboardDataWarehouseBaseDir'] + '/', '')
                })

                #job_results['parts'].append({"partName": "Parsing JSON output from file " + filename, "result": "Fail"})
                if data_proc.stderr:
                    logging.error("Err: %s", data_proc.stderr)
        else:
            logging.warning("File is of unknown datatype: %s", raw_filepath)
            remove_manifest_entries.append({
                "dd_json":
                json_filepath.replace(
                    gearman_worker.shipboard_data_warehouse_config[
                        'shipboardDataWarehouseBaseDir'] + '/', ''),
                "raw_data":
                raw_filepath.replace(
                    gearman_worker.shipboard_data_warehouse_config[
                        'shipboardDataWarehouseBaseDir'] + '/', '')
            })

            if datatype_proc.stderr:
                logging.error("Err: %s", datatype_proc.stderr)

        gearman_worker.send_job_status(
            gearman_job, int(10 + 70 * float(file_index) / float(file_count)),
            100)
        file_index += 1

    gearman_worker.send_job_status(gearman_job, 8, 10)

    if len(new_manifest_entries) > 0:
        logging.info("Updating Manifest file: %s",
                     gearman_worker.data_dashboard_manifest_file_path)

        rows_removed = 0

        existing_manifest_entries = []

        try:
            with open(gearman_worker.data_dashboard_manifest_file_path,
                      'r') as dashboard_manifest_file:
                existing_manifest_entries = json.load(dashboard_manifest_file)

            job_results['parts'].append({
                "partName": "Reading pre-existing Dashboard manifest file",
                "result": "Pass"
            })

        except IOError:
            logging.warning("Error Reading Dashboard Manifest file %s",
                            gearman_worker.data_dashboard_manifest_file_path)
            pass
        except Exception as err:
            logging.error(str(err))
            job_results['parts'].append({
                "partName":
                "Reading pre-existing Dashboard manifest file",
                "result":
                "Fail",
                "reason":
                "Error reading dashboard manifest file: " +
                gearman_worker.data_dashboard_manifest_file_path
            })
            return json.dumps(job_results)

        logging.debug("Entries to remove: %s",
                      json.dumps(remove_manifest_entries, indent=2))
        for remove_entry in remove_manifest_entries:
            for idx, existing_entry in enumerate(existing_manifest_entries):
                if remove_entry['raw_data'] == existing_entry['raw_data']:
                    del existing_manifest_entries[idx]
                    rows_removed += 1

                    if os.path.isfile(
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json'])):
                        logging.info(
                            "Deleting orphaned dd_json file %s",
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json']))
                        os.remove(
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json']))
                    break

        logging.debug("Entries to add/update: %s",
                      json.dumps(new_manifest_entries, indent=2))
        for new_entry in new_manifest_entries:
            updated = False
            for existing_entry in existing_manifest_entries:
                if new_entry['raw_data'] == existing_entry['raw_data']:
                    updated = True
                    job_results['files']['updated'].append(
                        new_entry['dd_json'].replace(
                            gearman_worker.cruise_id + '/', ''))
                    break

            if not updated:  #added
                job_results['files']['new'].append(
                    new_entry['dd_json'].replace(
                        gearman_worker.cruise_id + '/', ''))
                existing_manifest_entries.append(new_entry)

        if len(job_results['files']['new']) > 0:
            logging.info("%s row(s) added", len(job_results['files']['new']))
        if len(job_results['files']['updated']) > 0:
            logging.info("%s row(s) updated",
                         len(job_results['files']['updated']))
        if rows_removed:
            logging.info("%s row(s) removed", rows_removed)

        output_results = output_json_data_to_file(
            gearman_worker.data_dashboard_manifest_file_path,
            existing_manifest_entries)

        if not output_results['verdict']:
            logging.error("Error Writing Dashboard manifest file: %s",
                          gearman_worker.data_dashboard_manifest_file_path)
            job_results['parts'].append({
                "partName": "Writing Dashboard manifest file",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

        job_results['parts'].append({
            "partName": "Writing Dashboard manifest file",
            "result": "Pass"
        })
        job_results['files']['updated'].append(
            os.path.join(
                gearman_worker.ovdm.get_required_extra_directory_by_name(
                    'Dashboard_Data')['destDir'],
                DEFAULT_DATA_DASHBOARD_MANIFEST_FN))

        gearman_worker.send_job_status(gearman_job, 9, 10)

        logging.info("Setting file ownership/permissions")
        output_results = set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            gearman_worker.data_dashboard_dir)

        if output_results['verdict']:
            job_results['parts'].append({
                "partName": "Set file/directory ownership",
                "result": "Pass"
            })
        else:
            job_results['parts'].append({
                "partName": "Set file/directory ownership",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 9
0
def task_rsync_publicdata_to_cruise_data(gearman_worker, gearman_job):
    """
    Sync the contents of the PublicData share to the from_PublicData Extra Directory
    """
    job_results = {'parts':[]}

    publicdata_dir = gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehousePublicDataDir']
    from_publicdata_dir = os.path.join(gearman_worker.cruise_dir, gearman_worker.ovdm.get_required_extra_directory_by_name('From_PublicData')['destDir'])

    gearman_worker.send_job_status(gearman_job, 1, 10)

    if os.path.exists(from_publicdata_dir):
        job_results['parts'].append({"partName": "Verify From_PublicData directory exists", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Verify From_PublicData directory exists", "result": "Fail", "reason": "Unable to locate the From_PublicData directory: " + from_publicdata_dir})
        return json.dumps(job_results)

    if os.path.exists(publicdata_dir):
        job_results['parts'].append({"partName": "Verify PublicData directory exists", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Verify PublicData directory exists", "result": "Fail", "reason": "Unable to locate the PublicData directory: " + publicdata_dir})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 5, 10)

    logging.info("Transferring files from PublicData to the cruise data directory")
    output_results = transfer_publicdata_dir(gearman_worker, gearman_job)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Transfer files", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Transfer files", "result": "Pass"})

    files = output_results['files']

    logging.debug("Files Transferred: %s",json.dumps(files, indent=2))

    gearman_worker.send_job_status(gearman_job, 8, 10)

    if len(files['new']) > 0 or len(files['updated']) > 0:

        logging.info("Setting file permissions")
        output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], from_publicdata_dir)

        if output_results['verdict']:
            job_results['parts'].append({"partName": "Set file/directory ownership/permissions", "result": "Pass"})
        else:
            job_results['parts'].append({"partName": "Set file/directory ownership/permissions", "result": "Fail", "reason": output_results['reason']})
            return json.dumps(job_results)

        gearman_worker.send_job_status(gearman_job, 9, 100)

        logging.info("Initiating MD5 Summary Task")

        gm_client = python3_gearman.GearmanClient([gearman_worker.ovdm.get_gearman_server()])
        gm_data = {}
        gm_data['cruiseID'] = gearman_worker.cruise_id
        gm_data['files'] = files
        gm_data['files']['new'] = [os.path.join(from_publicdata_dir,filename) for filename in gm_data['files']['new']]
        gm_data['files']['updated'] = [os.path.join(from_publicdata_dir,filename) for filename in gm_data['files']['updated']]

        gm_client.submit_job("updateMD5Summary", json.dumps(gm_data))

        logging.info("MD5 Summary Task Complete")

    # need to verify update MD5 completed successfully

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)
Exemplo n.º 10
0
def task_finalize_current_lowering(gearman_worker, gearman_job):
    """
    Finalize the current lowering
    """
    job_results = {'parts':[]}

    gearman_worker.send_job_status(gearman_job, 1, 10)

    lowering_config_filepath = os.path.join(gearman_worker.lowering_dir, DEFAULT_LOWERING_CONFIG_FN)

    if os.path.exists(gearman_worker.lowering_dir) and (gearman_worker.lowering_id != ''):
        job_results['parts'].append({"partName": "Verify Lowering Directory exists", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Verify Lowering Directory exists", "result": "Fail", "reason": "Lowering directory: " + gearman_worker.lowering_dir + " could not be found"})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 3, 10)
    logging.info("Queuing Collection System Transfers")

    gm_client = python3_gearman.GearmanClient([gearman_worker.ovdm.get_gearman_server()])

    gm_data = {
        'loweringID': gearman_worker.lowering_id,
        'loweringStartDate': gearman_worker.lowering_start_date,
        'systemStatus': "On",
        'collectionSystemTransfer': {}
    }

    collection_system_transfer_jobs = []

    collection_system_transfers = gearman_worker.ovdm.get_active_collection_system_transfers(cruise=False)

    for collection_system_transfer in collection_system_transfers:

        logging.debug("Queuing runCollectionSystemTransfer job for %s", collection_system_transfer['name'])
        gm_data['collectionSystemTransfer']['collectionSystemTransferID'] = collection_system_transfer['collectionSystemTransferID']

        collection_system_transfer_jobs.append( {"task": "runCollectionSystemTransfer", "data": json.dumps(gm_data)} )

    gearman_worker.send_job_status(gearman_job, 5, 10)

    if len(collection_system_transfer_jobs) > 0:
        logging.info("Submitting runCollectionSystemTransfer jobs")
        submitted_job_request = gm_client.submit_multiple_jobs(collection_system_transfer_jobs, background=False, wait_until_complete=False)

        gearman_worker.send_job_status(gearman_job, 7, 10)

        time.sleep(1)
        gm_client.wait_until_jobs_completed(submitted_job_request)
        logging.info("Completed runCollectionSystemTransfers jobs")

    gearman_worker.send_job_status(gearman_job, 9, 10)

    #build Lowering Config file
    logging.info("Exporting Lowering Configuration")
    output_results = export_lowering_config(gearman_worker, lowering_config_filepath, finalize=True)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Export Lowering config data to file", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Export Lowering config data to file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], lowering_config_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set Lowering config file ownership/permissions", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Set Lowering config file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    # need to add code for updating MD5

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)
Exemplo n.º 11
0
def task_setup_new_cruise(gearman_worker, gearman_job): # pylint: disable=too-many-return-statements,too-many-statements
    """
    Setup a new cruise
    """
    job_results = {'parts':[]}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    ovdm_config_file_path = os.path.join(gearman_worker.cruise_dir, DEFAULT_CRUISE_CONFIG_FN)

    gearman_worker.send_job_status(gearman_job, 1, 10)

    gm_client = python3_gearman.GearmanClient([gearman_worker.ovdm.get_gearman_server()])

    logging.info("Set ownership/permissions for the CruiseData directory")
    completed_job_request = gm_client.submit_job("setCruiseDataDirectoryPermissions", gearman_job.data)

    result_obj = json.loads(completed_job_request.result)

    if result_obj['parts'][-1]['result'] != "Pass": # Final Verdict
        logging.error("Failed to lockdown the CruiseData directory")
        job_results['parts'].append({"partName": "Set ownership/permissions for CruiseData directory", "result": "Fail", "reason": result_obj['parts'][-1]['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Set ownership/permissions for CruiseData directory", "result": "Pass"})

    logging.info("Creating cruise data directory")
    completed_job_request = gm_client.submit_job("createCruiseDirectory", gearman_job.data)

    result_obj = json.loads(completed_job_request.result)

    if result_obj['parts'][-1]['result'] != "Pass": # Final Verdict
        logging.error("Failed to create cruise data directory")
        job_results['parts'].append({"partName": "Create cruise data directory structure", "result": "Fail", "reason": result_obj['parts'][-1]['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Create cruise data directory structure", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 5, 10)

    #build OpenVDM Config file
    logging.info("Exporting Cruise Configuration")
    output_results = export_ovdm_config(gearman_worker, ovdm_config_file_path)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Pass"})

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], ovdm_config_file_path)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Set OpenVDM config file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 7, 10)

    logging.info("Creating MD5 summary files")
    completed_job_request = gm_client.submit_job("rebuildMD5Summary", gearman_job.data)

    result_obj = json.loads(completed_job_request.result)

    if result_obj['parts'][-1]['result'] != "Pass": # Final Verdict
        logging.error("Failed to create MD5 summary files")
        job_results['parts'].append({"partName": "Create MD5 summary files", "result": "Fail", "reason": result_obj['parts'][-1]['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Create MD5 summary files", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 8, 10)

    logging.info("Creating data dashboard directory structure and manifest file")
    completed_job_request = gm_client.submit_job("rebuildDataDashboard", gearman_job.data)

    result_obj = json.loads(completed_job_request.result)

    if result_obj['parts'][-1]['result'] != "Pass": # Final Verdict
        logging.error("Failed to create data dashboard directory structure and/or manifest file")
        job_results['parts'].append({"partName": "Create data dashboard directory structure and manifest file", "result": "Fail", "reason": result_obj['parts'][-1]['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Create data dashboard directory structure and manifest file", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 9, 10)

    logging.info("Updating Cruise Size")
    cruise_size_proc = subprocess.run(['du','-sb', gearman_worker.cruise_dir], capture_output=True, text=True, check=False)
    if cruise_size_proc.returncode == 0:
        logging.info("Cruise Size: %s", cruise_size_proc.stdout.split()[0])
        gearman_worker.ovdm.set_cruise_size(cruise_size_proc.stdout.split()[0])
    else:
        gearman_worker.ovdm.set_cruise_size("0")

    gearman_worker.ovdm.set_lowering_size("0")

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
def task_run_ship_to_shore_transfer(gearman_worker, current_job):  # pylint: disable=too-many-statements
    """
    Perform the ship-to-shore transfer
    """

    time.sleep(randint(0, 2))

    job_results = {
        'parts': [{
            "partName": "Transfer In-Progress",
            "result": "Pass"
        }, {
            "partName": "Transfer Enabled",
            "result": "Pass"
        }],
        'files': {}
    }

    logging.debug("Setting transfer status to 'Running'")
    gearman_worker.ovdm.set_running_cruise_data_transfer(
        gearman_worker.cruise_data_transfer['cruiseDataTransferID'],
        os.getpid(), current_job.handle)

    logging.info("Testing configuration")
    gearman_worker.send_job_status(current_job, 1, 10)

    gm_client = python3_gearman.GearmanClient(
        [gearman_worker.ovdm.get_gearman_server()])

    gm_data = {
        'cruiseDataTransfer': gearman_worker.cruise_data_transfer,
        'cruiseID': gearman_worker.cruise_id
    }

    completed_job_request = gm_client.submit_job("testCruiseDataTransfer",
                                                 json.dumps(gm_data))
    results_obj = json.loads(completed_job_request.result)

    logging.debug('Connection Test Results: %s',
                  json.dumps(results_obj, indent=2))

    if results_obj['parts'][-1]['result'] == "Pass":  # Final Verdict
        logging.debug("Connection test passed")
        job_results['parts'].append({
            "partName": "Connection Test",
            "result": "Pass"
        })
    else:
        logging.warning("Connection test failed, quitting job")
        job_results['parts'].append({
            "partName":
            "Connection Test",
            "result":
            "Fail",
            "reason":
            results_obj['parts'][-1]['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(current_job, 2, 10)

    logging.info("Transferring files")
    output_results = None
    if gearman_worker.cruise_data_transfer[
            'transferType'] == "4":  # SSH Server
        output_results = transfer_ssh_dest_dir(gearman_worker, current_job)
    else:
        logging.error("Unknown Transfer Type")
        job_results['parts'].append({
            "partName": "Transfer Files",
            "result": "Fail",
            "reason": "Unknown transfer type"
        })
        return json.dumps(job_results)

    if not output_results['verdict']:
        logging.error("Transfer of remote files failed: %s",
                      output_results['reason'])
        job_results['parts'].append({
            "partName": "Transfer Files",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return job_results

    logging.debug("Transfer completed successfully")
    job_results['files'] = output_results['files']
    job_results['parts'].append({
        "partName": "Transfer Files",
        "result": "Pass"
    })

    if len(job_results['files']['new']) > 0:
        logging.debug("%s file(s) added", len(job_results['files']['new']))
    if len(job_results['files']['updated']) > 0:
        logging.debug("%s file(s) updated",
                      len(job_results['files']['updated']))
    if len(job_results['files']['exclude']) > 0:
        logging.debug("%s file(s) intentionally skipped",
                      len(job_results['files']['exclude']))

    gearman_worker.send_job_status(current_job, 9, 10)

    if job_results['files']['new'] or job_results['files']['updated']:

        logging.debug("Building logfiles")

        logfile_filename = gearman_worker.cruise_data_transfer[
            'name'] + '_' + gearman_worker.transfer_start_date + '.log'

        log_contents = {
            'files': {
                'new': job_results['files']['new'],
                'updated': job_results['files']['updated']
            }
        }

        output_results = output_json_data_to_file(
            os.path.join(build_logfile_dirpath(gearman_worker),
                         logfile_filename), log_contents['files'])

        if output_results['verdict']:
            job_results['parts'].append({
                "partName": "Write transfer logfile",
                "result": "Pass"
            })
        else:
            logging.error("Error writing transfer logfile: %s",
                          logfile_filename)
            job_results['parts'].append({
                "partName": "Write transfer logfile",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

        output_results = set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            os.path.join(build_logfile_dirpath(gearman_worker),
                         logfile_filename))

        if not output_results['verdict']:
            job_results['parts'].append({
                "partName": "Set OpenVDM config file ownership/permissions",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

    gearman_worker.send_job_status(current_job, 10, 10)

    time.sleep(2)

    return json.dumps(job_results)
Exemplo n.º 13
0
def task_create_cruise_directory(gearman_worker, gearman_job):
    """
    Setup the cruise directory for the specified cruise ID
    """

    job_results = {'parts': []}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 1, 10)

    if os.path.exists(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir']):
        job_results['parts'].append({
            "partName": "Verify Base Directory exists",
            "result": "Pass"
        })
    else:
        logging.error(
            "Failed to find base directory: %s", gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir'])
        job_results['parts'].append({
            "partName":
            "Verify Base Directory exists",
            "result":
            "Fail",
            "reason":
            "Failed to find base directory: " + gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir']
        })
        return json.dumps(job_results)

    if not os.path.exists(gearman_worker.cruise_dir):
        job_results['parts'].append({
            "partName": "Verify Cruise Directory does not exists",
            "result": "Pass"
        })
    else:
        logging.error("Cruise directory already exists: %s",
                      gearman_worker.cruise_dir)
        job_results['parts'].append({
            "partName":
            "Verify Cruise Directory does not exists",
            "result":
            "Fail",
            "reason":
            "Cruise directory " + gearman_worker.cruise_dir + " already exists"
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 2, 10)

    directorylist = build_directorylist(gearman_worker)
    logging.debug("Directory List: %s", json.dumps(directorylist, indent=2))

    if len(directorylist) > 0:
        job_results['parts'].append({
            "partName": "Build Directory List",
            "result": "Pass"
        })
    else:
        logging.warning("Directory list is empty")
        job_results['parts'].append({
            "partName":
            "Build Directory List",
            "result":
            "Fail",
            "reason":
            "Empty list of directories to create"
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 5, 10)

    output_results = create_directories(directorylist)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Pass"
        })
    else:
        logging.error(
            "Failed to create any/all of the cruise data directory structure")
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Fail",
            "reason": output_results['reason']
        })

    gearman_worker.send_job_status(gearman_job, 7, 10)

    if gearman_worker.ovdm.show_only_current_cruise_dir() is True:
        logging.info("Clear read permissions for all cruise directories")
        lockdown_directory(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseBaseDir'],
            gearman_worker.cruise_dir)

        job_results['parts'].append({
            "partName": "Clear CruiseData Directory Read Permissions",
            "result": "Pass"
        })

    gearman_worker.send_job_status(gearman_job, 8, 10)

    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        gearman_worker.cruise_dir)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Set cruise directory ownership/permissions",
            "result": "Pass"
        })
    else:
        job_results['parts'].append({
            "partName": "Set cruise directory ownership/permissions",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 14
0
def task_rebuild_md5_summary(gearman_worker, gearman_job): # pylint: disable=too-many-statements
    """
    Rebuild the existing MD5 summary files
    """

    job_results = {'parts':[]}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 1, 10)

    if os.path.exists(gearman_worker.cruise_dir):
        job_results['parts'].append({"partName": "Verify Cruise Directory exists", "result": "Pass"})
    else:
        logging.error("Cruise directory not found")
        job_results['parts'].append({"partName": "Verify Cruise Directory exists", "result": "Fail", "reason": "Unable to locate the cruise directory: " + gearman_worker.cruise_dir})
        return json.dumps(job_results)

    logging.info("Building filelist")
    filelist = build_filelist(gearman_worker.cruise_dir)
    logging.debug('Filelist: %s', json.dumps(filelist, indent=2))

    job_results['parts'].append({"partName": "Retrieve Filelist", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 2, 10)

    logging.info("Building hashes")
    new_hashes = build_md5_hashes(gearman_worker, gearman_job, filelist)
    logging.debug("Hashes: %s", json.dumps(new_hashes, indent=2))

    if gearman_worker.stop:
        job_results['parts'].append({"partName": "Calculate Hashes", "result": "Fail", "reason": "Job was stopped by user"})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Calculate Hashes", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 85, 100)

    logging.debug("Sorting Hashes")
    sorted_hashes = sorted(new_hashes, key=lambda hashes: hashes['filename'])

    gearman_worker.send_job_status(gearman_job, 9, 10)

    logging.info("Building MD5 Summary file")
    try:
        #logging.debug("Saving new MD5 Summary file")
        with open(gearman_worker.md5_summary_filepath, 'w') as md5_summary_file:

            for filehash in sorted_hashes:
                md5_summary_file.write(filehash['hash'] + ' ' + filehash['filename'] + '\n')

        job_results['parts'].append({"partName": "Writing MD5 Summary file", "result": "Pass"})

    except IOError:
        logging.error("Error saving MD5 Summary file: %s", gearman_worker.md5_summary_filepath)
        job_results['parts'].append({"partName": "Writing MD5 Summary file", "result": "Fail", "reason": "Error saving MD5 Summary file: " + gearman_worker.md5_summary_filepath})
        return json.dumps(job_results)

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], gearman_worker.md5_summary_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set MD5 Summary file ownership/permissions", "result": "Pass"})
    else:
        logging.error("Failed to set directory ownership")
        job_results['parts'].append({"partName": "Set MD5 Summary file ownership/permissions", "result": "Fail", "reason": output_results['reason']})

    gearman_worker.send_job_status(gearman_job, 95, 100)

    logging.info("Building MD5 Summary MD5 file")

    output_results = build_md5_summary_md5(gearman_worker)
    if output_results['verdict']:
        job_results['parts'].append({"partName": "Writing MD5 Summary MD5 file", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Writing MD5 Summary MD5 file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], gearman_worker.md5_summary_md5_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set MD5 Summary MD5 file ownership/permissions", "result": "Pass"})
    else:
        logging.error("Failed to set directory ownership")
        job_results['parts'].append({"partName": "Set MD5 Summary MD5 file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)
Exemplo n.º 15
0
def task_update_md5_summary(gearman_worker, gearman_job): # pylint: disable=too-many-branches,too-many-statements,too-many-locals
    """
    Update the existing MD5 summary files
    """

    job_results = {'parts':[]}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 1, 10)

    logging.debug("Building filelist")
    filelist = []

    job_results['parts'].append({"partName": "Retrieve Filelist", "result": "Pass"})

    if payload_obj['files']['new'] or payload_obj['files']['updated']:
        filelist.extend(payload_obj['files']['new'])
        filelist.extend(payload_obj['files']['updated'])
    else:
        return json.dumps(job_results)

    #filelist = [os.path.join(gearman_worker.cruiseID, filename) for filename in filelist]
    logging.debug('Filelist: %s', json.dumps(filelist, indent=2))

    gearman_worker.send_job_status(gearman_job, 2, 10)

    logging.debug("Building hashes")
    new_hashes = build_md5_hashes(gearman_worker, gearman_job, filelist)
    logging.debug('Hashes: %s', json.dumps(new_hashes, indent=2))

    gearman_worker.send_job_status(gearman_job, 8, 10)

    if gearman_worker.stop:
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Calculate Hashes", "result": "Pass"})

    existing_hashes = []

    logging.debug("Processing existing MD5 summary file")

    try:
        with open(gearman_worker.md5_summary_filepath, 'r') as md5_summary_file:

            for line in md5_summary_file:
                (md5_hash, filename) = line.split(' ', 1)
                existing_hashes.append({'hash': md5_hash, 'filename': filename.rstrip('\n')})

    except IOError:
        logging.error("Error Reading pre-existing MD5 Summary file: %s", gearman_worker.md5_summary_filepath)
        job_results['parts'].append({"partName": "Reading pre-existing MD5 Summary file", "result": "Fail", "reason": "Error Reading pre-existing MD5 Summary file: " + gearman_worker.md5_summary_filepath})
        return json.dumps(job_results)

    #logging.debug('Existing Hashes:', json.dumps(existing_hashes, indent=2))
    job_results['parts'].append({"partName": "Reading pre-existing MD5 Summary file", "result": "Pass"})

    row_added = 0
    row_updated = 0

    for new_hash in new_hashes:
        updated = False
        for existing_hash in existing_hashes:
            if new_hash['filename'] == existing_hash['filename']:
                existing_hash['hash'] = new_hash['hash']
                updated = True
                row_updated += 1
                break

        if not updated:
            existing_hashes.append({'hash': new_hash['hash'], 'filename': new_hash['filename']})
            row_added += 1

    if row_added > 0:
        logging.debug("%s row(s) added", row_added)
    if row_updated > 0:
        logging.debug("%s row(s) updated", row_updated)

    gearman_worker.send_job_status(gearman_job, 85, 100)

    #logging.debug("Sorting hashes")
    sorted_hashes = sorted(existing_hashes, key=lambda hashes: hashes['filename'])

    logging.debug("Building MD5 Summary file")
    try:
        with open(gearman_worker.md5_summary_filepath, 'w') as md5_summary_file:

            for filehash in sorted_hashes:
                md5_summary_file.write(filehash['hash'] + ' ' + filehash['filename'] + '\n')

        job_results['parts'].append({"partName": "Writing MD5 Summary file", "result": "Pass"})

    except IOError:
        logging.error("Error updating MD5 Summary file: %s", gearman_worker.md5_summary_filepath)
        job_results['parts'].append({"partName": "Writing MD5 Summary file", "result": "Fail", "reason": "Error updating MD5 Summary file: " + gearman_worker.md5_summary_filepath})
        return json.dumps(job_results)

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], gearman_worker.md5_summary_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set MD5 Summary file ownership/permissions", "result": "Pass"})
    else:
        logging.error("Failed to set directory ownership")
        job_results['parts'].append({"partName": "Set MD5 Summary file ownership/permissions", "result": "Fail", "reason": output_results['reason']})

    gearman_worker.send_job_status(gearman_job, 9, 10)

    logging.debug("Building MD5 Summary MD5 file")

    output_results = build_md5_summary_md5(gearman_worker)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Writing MD5 Summary MD5 file", "result": "Pass"})
    else:
        job_results['parts'].append({"partName": "Writing MD5 Summary MD5 file", "result": "Fail", "reason": output_results['reason']})

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], gearman_worker.md5_summary_md5_filepath)

    if output_results['verdict']:
        job_results['parts'].append({"partName": "Set MD5 Summary MD5 file ownership/permissions", "result": "Pass"})
    else:
        logging.error("Failed to set directory ownership")
        job_results['parts'].append({"partName": "Set MD5 Summary MD5 file ownership/permissions", "result": "Fail", "reason": output_results['reason']})

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)
Exemplo n.º 16
0
def task_rebuild_lowering_directory(gearman_worker, gearman_job):
    """
    Verify and create if necessary all the lowering sub-directories
    """

    job_results = {'parts': []}

    payload_obj = json.loads(gearman_job.data)
    logging.debug("Payload: %s", json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 1, 10)

    if not os.path.exists(gearman_worker.lowering_dir):
        logging.error("Lowering directory not found")
        job_results['parts'].append({
            "partName":
            "Verify Lowering Directory exists",
            "result":
            "Fail",
            "reason":
            "Unable to find lowering directory: " + gearman_worker.lowering_dir
        })
        return json.dumps(job_results)

    job_results['parts'].append({
        "partName": "Verify Lowering Directory exists",
        "result": "Pass"
    })

    gearman_worker.send_job_status(gearman_job, 2, 10)

    logging.info("Build directory list")
    directorylist = build_directorylist(gearman_worker)
    logging.debug("Directory List: %s", json.dumps(directorylist, indent=2))

    if len(directorylist) == 0:
        logging.error("Directory list is empty")
        job_results['parts'].append({
            "partName":
            "Build Directory List",
            "result":
            "Fail",
            "reason":
            "Empty list of directories to create"
        })
        return json.dumps(job_results)

    job_results['parts'].append({
        "partName": "Build Directory List",
        "result": "Pass"
    })

    gearman_worker.send_job_status(gearman_job, 5, 10)

    logging.info("Create directories")

    output_results = create_directories(directorylist)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Pass"
        })
    else:
        logging.error(
            "Unable to create any/all of the lowering data directory structure"
        )
        job_results['parts'].append({
            "partName": "Create Directories",
            "result": "Fail",
            "reason": output_results['reason']
        })

    gearman_worker.send_job_status(gearman_job, 7, 10)

    logging.info("Set directory ownership/permissions")

    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        gearman_worker.lowering_dir)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Set Directory ownership/permissions",
            "result": "Pass"
        })
    else:
        logging.error("Failed to set directory ownership")
        job_results['parts'].append({
            "partName": "Set Directory ownership/permissions",
            "result": "Fail",
            "reason": output_results['reason']
        })

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Exemplo n.º 17
0
def task_finalize_current_cruise(gearman_worker, gearman_job): # pylint: disable=too-many-return-statements,too-many-statements
    """
    Finalize the current cruise
    """
    job_results = {'parts':[]}

    gearman_worker.send_job_status(gearman_job, 1, 10)

    publicdata_dir = gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehousePublicDataDir']
    from_publicdata_dir = os.path.join(gearman_worker.cruise_dir, gearman_worker.ovdm.get_required_extra_directory_by_name('From_PublicData')['destDir'])

    ovdm_config_file_path = os.path.join(gearman_worker.cruise_dir, DEFAULT_CRUISE_CONFIG_FN)

    if not os.path.exists(gearman_worker.cruise_dir):
        job_results['parts'].append({"partName": "Verify cruise directory exists", "result": "Fail", "reason": "Cruise directory: " + gearman_worker.cruise_dir + " could not be found"})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Verify cruise directory exists", "result": "Pass"})

    gearman_worker.send_job_status(gearman_job, 2, 10)
    logging.info("Queuing Collection System Transfers")

    gm_client = python3_gearman.GearmanClient([gearman_worker.ovdm.get_gearman_server()])

    gm_data = {
        'cruiseID': gearman_worker.cruise_id,
        'cruiseStartDate': gearman_worker.cruise_start_date,
        'systemStatus': "On",
        'collectionSystemTransfer': {}
    }

    collection_system_transfer_jobs = []

    collection_system_transfers = gearman_worker.ovdm.get_active_collection_system_transfers(lowering=False)

    for collection_system_transfer in collection_system_transfers:

        logging.debug("Queuing runCollectionSystemTransfer job for %s", collection_system_transfer['name'])
        gm_data['collectionSystemTransfer']['collectionSystemTransferID'] = collection_system_transfer['collectionSystemTransferID']

        collection_system_transfer_jobs.append( {"task": "runCollectionSystemTransfer", "data": json.dumps(gm_data)} )

    gearman_worker.send_job_status(gearman_job, 3, 10)

    if len(collection_system_transfer_jobs) > 0:
        logging.info("Submitting runCollectionSystemTransfer jobs")
        submitted_job_request = gm_client.submit_multiple_jobs(collection_system_transfer_jobs, background=False, wait_until_complete=False)

        gearman_worker.send_job_status(gearman_job, 4, 10)

        time.sleep(1)
        gm_client.wait_until_jobs_completed(submitted_job_request)
        logging.info("Completed runCollectionSystemTransfers jobs")

    gearman_worker.send_job_status(gearman_job, 5, 10)

    if gearman_worker.ovdm.get_transfer_public_data():

        logging.info("Transferring files from PublicData to the cruise data directory")

        logging.debug("Verify From_PublicData directory exists within the cruise data directory")
        if not os.path.exists(from_publicdata_dir):
            job_results['parts'].append({"partName": "Verify From_PublicData directory exists", "result": "Fail", "reason": "From_PublicData directory: " + from_publicdata_dir + " could not be found"})
            return json.dumps(job_results)

        job_results['parts'].append({"partName": "Verify From_PublicData directory exists", "result": "Pass"})

        logging.debug("Verify PublicData Directory exists")
        if not os.path.exists(publicdata_dir):
            job_results['parts'].append({"partName": "Verify PublicData directory exists", "result": "Fail", "reason": "PublicData directory: " + publicdata_dir + " could not be found"})
            return json.dumps(job_results)

        job_results['parts'].append({"partName": "Verify PublicData directory exists", "result": "Pass"})

        logging.debug("Transferring files")
        output_results = transfer_publicdata_dir(gearman_worker, gearman_job)
        logging.debug("Transfer Complete")

        if not output_results['verdict']:
            job_results['parts'].append({"partName": "Transfer PublicData files", "result": "Fail", "reason": output_results['reason']})
            return json.dumps(job_results)

        job_results['parts'].append({"partName": "Transfer PublicData files", "result": "Pass"})

        files = output_results['files']

        logging.debug("PublicData Files Transferred: %s", json.dumps(files, indent=2))

        logging.info("Clearing files from PublicData")
        output_results = clear_directory(publicdata_dir)
        logging.debug("Clearing Complete")

        if not output_results['verdict']:
            job_results['parts'].append({"partName": "Clear out PublicData files", "result": "Fail", "reason": output_results['reason']})
            return json.dumps(job_results)

        job_results['parts'].append({"partName": "Clear out PublicData files", "result": "Pass"})

        gearman_worker.send_job_status(gearman_job, 9, 10)

        if len(files['new']) > 0 or len(files['updated']) > 0:

            output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], from_publicdata_dir)

            if not output_results['verdict']:
                job_results['parts'].append({"partName": "Set file/directory ownership/permissions", "result": "Fail", "reason": output_results['reason']})
                return json.dumps(job_results)

            job_results['parts'].append({"partName": "Set file/directory ownership/permissions", "result": "Pass"})
    else:
        files = { 'include':[], 'exclude':[], 'new':[], 'updated':[] }

    gearman_worker.send_job_status(gearman_job, 95, 100)

    #build OpenVDM Config file
    logging.info("Exporting OpenVDM Configuration")
    output_results = export_ovdm_config(gearman_worker, ovdm_config_file_path, finalize=True)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Export OpenVDM config data to file", "result": "Pass"})

    output_results = set_owner_group_permissions(gearman_worker.shipboard_data_warehouse_config['shipboardDataWarehouseUsername'], ovdm_config_file_path)

    if not output_results['verdict']:
        job_results['parts'].append({"partName": "Set OpenVDM config file ownership/permissions", "result": "Fail", "reason": output_results['reason']})
        return json.dumps(job_results)

    job_results['parts'].append({"partName": "Set OpenVDM config file ownership/permissions", "result": "Pass"})

    logging.info("Initiating MD5 Summary Task")

    gm_data = {
        'cruiseID': gearman_worker.cruise_id,
        'files': files
    }

    # gm_data['files']['new'] = [from_publicdata_dir.replace(gearman_worker.cruise_dir, '') + '/' + filename for filename in gm_data['files']['new']]
    # gm_data['files']['updated'] = [from_publicdata_dir.replace(gearman_worker.cruise_dir, '') + '/' + filename for filename in gm_data['files']['updated']]
    gm_data['files']['updated'].append(DEFAULT_CRUISE_CONFIG_FN)

    gm_client.submit_job("updateMD5Summary", json.dumps(gm_data))

    logging.debug("MD5 Summary Task Complete")

    # need to add code for cruise data transfers

    gearman_worker.send_job_status(gearman_job, 10, 10)
    return json.dumps(job_results)