Esempio n. 1
0
def export_ovdm_config(gearman_worker, ovdm_config_file_path, finalize=False):
    """
    Export the current OpenVDM configuration to the specified filepath
    """

    ovdm_config = gearman_worker.ovdm.get_ovdm_config()

    if finalize:
        ovdm_config['cruiseFinalizedOn'] = ovdm_config['configCreatedOn']
    elif os.path.isfile(ovdm_config_file_path):
        logging.info("Reading existing configuration file")
        try:
            with open(ovdm_config_file_path) as json_file:
                data = json.load(json_file)
                if "cruiseFinalizedOn" in data:
                    ovdm_config['cruiseFinalizedOn'] = data['cruiseFinalizedOn']

        except OSError as err:
            logging.debug(str(err))
            return {'verdict': False, 'reason': "Unable to read existing configuration file"}

    for transfer in ovdm_config['cruiseDataTransfersConfig']:
        del transfer['sshPass']
        del transfer['rsyncPass']
        del transfer['smbPass']

    for transfer in ovdm_config['collectionSystemTransfersConfig']:
        del transfer['sshPass']
        del transfer['rsyncPass']
        del transfer['smbPass']

    return output_json_data_to_file(ovdm_config_file_path, ovdm_config)
Esempio n. 2
0
def task_rebuild_data_dashboard(gearman_worker, gearman_job):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
    """
    Rebuild the existing dashboard files
    """

    job_results = {'parts': [], 'files': {'new': [], 'updated': []}}

    payload_obj = json.loads(gearman_job.data)
    logging.debug('Payload: %s', json.dumps(payload_obj, indent=2))

    if os.path.exists(gearman_worker.data_dashboard_dir):
        job_results['parts'].append({
            "partName": "Verify Data Dashboard Directory exists",
            "result": "Pass"
        })
    else:
        logging.error("Data dashboard directory not found: %s",
                      gearman_worker.data_dashboard_dir)
        job_results['parts'].append({
            "partName":
            "Verify Data Dashboard Directory exists",
            "result":
            "Fail",
            "reason":
            "Unable to locate the data dashboard directory: " +
            gearman_worker.data_dashboard_dir
        })
        return json.dumps(job_results)

    collection_system_transfers = gearman_worker.ovdm.get_active_collection_system_transfers(
    )

    gearman_worker.send_job_status(gearman_job, 1, 100)

    new_manifest_entries = []

    collection_system_transfer_count = len(collection_system_transfers)
    collection_system_transfer_index = 0
    for collection_system_transfer in collection_system_transfers:  # pylint: disable=too-many-nested-blocks

        logging.info('Processing data from: %s',
                     collection_system_transfer['name'])

        processing_script_filename = os.path.join(
            gearman_worker.ovdm.get_plugin_dir(),
            collection_system_transfer['name'].lower() +
            gearman_worker.ovdm.get_plugin_suffix())
        logging.debug("Processing Script Filename: %s",
                      processing_script_filename)

        if not os.path.isfile(processing_script_filename):
            logging.warning(
                "Processing script for collection system %s not found, moving on.",
                collection_system_transfer['name'])
            gearman_worker.send_job_status(
                gearman_job,
                int(10 + (80 * float(collection_system_transfer_index) /
                          float(collection_system_transfer_count))), 100)
            collection_system_transfer_index += 1
            continue

        # collection_system_transferOutputDir = os.path.join(gearman_worker.data_dashboard_dir, collection_system_transfer['destDir'])

        #build filelist
        filelist = []
        if collection_system_transfer['cruiseOrLowering'] == "0":
            collection_system_transfer_input_dir = os.path.join(
                gearman_worker.cruise_dir,
                collection_system_transfer['destDir'])
            filelist.extend(
                build_filelist(collection_system_transfer_input_dir))
            filelist = [
                os.path.join(collection_system_transfer['destDir'], filename)
                for filename in filelist
            ]

        else:
            lowerings = gearman_worker.ovdm.get_lowerings()
            lowering_base_dir = gearman_worker.shipboard_data_warehouse_config[
                'loweringDataBaseDir']

            for lowering in lowerings:
                collection_system_transfer_input_dir = os.path.join(
                    gearman_worker.cruise_dir, lowering_base_dir, lowering,
                    collection_system_transfer['destDir'])
                lowering_filelist = build_filelist(
                    collection_system_transfer_input_dir)
                filelist.extend([
                    os.path.join(lowering_base_dir, lowering,
                                 collection_system_transfer['destDir'],
                                 filename) for filename in lowering_filelist
                ])

        logging.debug("FileList: %s", json.dumps(filelist, indent=2))

        file_count = len(filelist)
        file_index = 0
        logging.info("%s file(s) to process", file_count)

        for filename in filelist:

            if gearman_worker.stop:
                break

            logging.info("Processing file: %s", filename)
            json_filename = os.path.splitext(filename)[0] + '.json'
            logging.debug("jsonFileName: %s", json_filename)
            raw_filepath = os.path.join(gearman_worker.cruise_dir, filename)
            logging.debug("rawFilePath: %s", raw_filepath)
            json_filepath = os.path.join(gearman_worker.data_dashboard_dir,
                                         json_filename)
            logging.debug("jsonFilePath: %s", json_filepath)

            if os.stat(raw_filepath).st_size == 0:
                logging.warning("File %s is empty", filename)
                continue

            command = [
                PYTHON_BINARY, processing_script_filename, '--dataType',
                raw_filepath
            ]

            logging.debug("Get Datatype Command: %s", ' '.join(command))

            datatype_proc = subprocess.run(command,
                                           capture_output=True,
                                           text=True,
                                           check=False)

            if datatype_proc.stdout:
                dd_type = datatype_proc.stdout.rstrip('\n')
                logging.debug("Found to be type: %s", dd_type)

                command = [
                    PYTHON_BINARY, processing_script_filename, raw_filepath
                ]

                logging.debug("Processing Command: %s", ' '.join(command))

                data_proc = subprocess.run(command,
                                           capture_output=True,
                                           text=True,
                                           check=False)

                if data_proc.stdout:
                    try:
                        logging.debug("Parsing output")
                        # logging.debug(data_proc.stdout)
                        out_obj = json.loads(data_proc.stdout)
                    except Exception as err:
                        logging.error(str(err))
                        error_title = 'Error parsing output'
                        error_body = 'Invalid JSON output recieved from processing. Command: ' + ' '.join(
                            command)
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                        job_results['parts'].append({
                            "partName":
                            "Parsing JSON output " + filename,
                            "result":
                            "Fail",
                            "reason":
                            error_title + ':' + error_body
                        })
                    else:
                        if out_obj is None:
                            error_title = 'Error processing file'
                            error_body = 'No JSON output recieved from file. Processing Command: ' + ' '.join(
                                command)
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Parsing JSON output from file " + filename,
                                "result":
                                "Fail",
                                "reason":
                                error_title + ': ' + error_body
                            })

                            if data_proc.stderr:
                                logging.error('err: %s', data_proc.stderr)

                        elif 'error' in out_obj:
                            error_title = 'Error processing file'
                            error_body = out_obj['error']
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Processing Datafile " + filename,
                                "result":
                                "Fail",
                                "reason":
                                error_title + ':' + error_body
                            })

                        else:
                            #job_results['parts'].append({"partName": "Processing Datafile " + filename, "result": "Pass"})
                            output_results = output_json_data_to_file(
                                json_filepath, out_obj)

                            if output_results['verdict']:
                                job_results['parts'].append({
                                    "partName":
                                    "Writing DashboardData file: " + filename,
                                    "result":
                                    "Pass"
                                })
                            else:
                                error_title = 'Error writing file'
                                error_body = "Error Writing DashboardData file: " + filename
                                logging.error("%s: %s", error_title,
                                              error_body)
                                gearman_worker.ovdm.send_msg(
                                    error_title, error_body)

                                job_results['parts'].append({
                                    "partName":
                                    "Writing Dashboard file: " + filename,
                                    "result":
                                    "Fail",
                                    "reason":
                                    output_results['verdict']
                                })

                            new_manifest_entries.append({
                                "type":
                                dd_type,
                                "dd_json":
                                json_filepath.replace(
                                    gearman_worker.
                                    shipboard_data_warehouse_config[
                                        'shipboardDataWarehouseBaseDir'] + '/',
                                    ''),
                                "raw_data":
                                raw_filepath.replace(
                                    gearman_worker.
                                    shipboard_data_warehouse_config[
                                        'shipboardDataWarehouseBaseDir'] + '/',
                                    '')
                            })
                else:
                    error_title = 'Error processing file'
                    error_body = 'No JSON output recieved from file. Processing Command: ' + ' '.join(
                        command)
                    logging.error("%s: %s", error_title, error_body)
                    gearman_worker.ovdm.send_msg(error_title, error_body)
                    job_results['parts'].append({
                        "partName":
                        "Parsing JSON output from file " + filename,
                        "result":
                        "Fail",
                        "reason":
                        error_title + ': ' + error_body
                    })

                    if data_proc.stderr:
                        logging.error('err: %s', data_proc.stderr)

            else:
                logging.warning("File is of unknown datatype, moving on")

                if datatype_proc.stderr:
                    logging.error('err: %s', datatype_proc.stderr)

            gearman_worker.send_job_status(
                gearman_job,
                int(10 + 70 * float(file_index) / float(file_count)), 100)
            file_index += 1

        collection_system_transfer_index += 1

    gearman_worker.send_job_status(gearman_job, 90, 100)

    logging.info("Update Dashboard Manifest file")
    output_results = output_json_data_to_file(
        gearman_worker.data_dashboard_manifest_file_path, new_manifest_entries)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Updating manifest file",
            "result": "Pass"
        })
    else:
        logging.error("Error updating manifest file %s",
                      gearman_worker.data_dashboard_manifest_file_path)
        job_results['parts'].append({
            "partName": "Updating manifest file",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 95, 100)

    logging.info("Setting file ownership/permissions")
    output_results = set_owner_group_permissions(
        gearman_worker.
        shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
        gearman_worker.data_dashboard_dir)

    if output_results['verdict']:
        job_results['parts'].append({
            "partName": "Setting file/directory ownership",
            "result": "Pass"
        })
    else:
        logging.error("Error Setting file/directory ownership/permissions")
        job_results['parts'].append({
            "partName": "Setting file/directory ownership",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 99, 100)

    data_dashboard_dest_dir = gearman_worker.ovdm.get_required_extra_directory_by_name(
        'Dashboard_Data')['destDir']
    job_results['files']['updated'] = [
        os.path.join(data_dashboard_dest_dir, filepath)
        for filepath in build_filelist(gearman_worker.data_dashboard_dir)
    ]  # might need to remove cruise_dir from begining of filepaths

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
Esempio n. 3
0
def task_update_data_dashboard(gearman_worker, gearman_job):  # pylint: disable=too-many-locals,too-many-branches,too-many-statements
    """
    Update the existing dashboard files with new/updated raw data
    """
    job_results = {'parts': [], 'files': {'new': [], 'updated': []}}

    payload_obj = json.loads(gearman_job.data)
    logging.debug('Payload: %s', json.dumps(payload_obj, indent=2))

    gearman_worker.send_job_status(gearman_job, 5, 100)

    logging.info('Collection System Transfer: %s',
                 gearman_worker.collection_system_transfer['name'])

    new_manifest_entries = []
    remove_manifest_entries = []

    #check for processing file
    processing_script_filename = os.path.join(
        gearman_worker.ovdm.get_plugin_dir(),
        gearman_worker.collection_system_transfer['name'].lower() +
        gearman_worker.ovdm.get_plugin_suffix())
    logging.debug("Processing Script Filename: %s", processing_script_filename)

    if os.path.isfile(processing_script_filename):
        job_results['parts'].append({
            "partName": "Dashboard Processing File Located",
            "result": "Pass"
        })
    else:
        logging.warning("Processing script not found: %s",
                        processing_script_filename)
        return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 100)

    #build filelist
    filelist = []

    if payload_obj['files']['new'] or payload_obj['files']['updated']:
        filelist = payload_obj['files']['new']
        filelist += payload_obj['files']['updated']
        logging.debug('File List: %s', json.dumps(filelist, indent=2))
        job_results['parts'].append({
            "partName": "Retrieve Filelist",
            "result": "Pass"
        })

    else:
        logging.warning("No new or updated files to process")
        job_results['parts'].append({
            "partName": "Retrieve Filelist",
            "result": "Pass"
        })
        return json.dumps(job_results)

    file_count = len(filelist)
    file_index = 0
    for filename in filelist:  # pylint: disable=too-many-nested-blocks

        if gearman_worker.stop:
            break

        logging.info("Processing file: %s", filename)
        json_filename = os.path.splitext(filename)[0] + '.json'
        raw_filepath = os.path.join(gearman_worker.cruise_dir, filename)
        json_filepath = os.path.join(gearman_worker.data_dashboard_dir,
                                     json_filename)

        if not os.path.isfile(raw_filepath):
            job_results['parts'].append({
                "partName":
                "Verify data file exists",
                "result":
                "Fail",
                "reason":
                "Unable to find data file: " + filename
            })
            logging.warning("File not found %s, skipping", filename)
            continue

        if os.stat(raw_filepath).st_size == 0:
            logging.warning("File is empty %s, skipping", filename)
            continue

        command = [
            PYTHON_BINARY, processing_script_filename, '--dataType',
            raw_filepath
        ]

        logging.debug("DataType Retrieval Command: %s", ' '.join(command))

        datatype_proc = subprocess.run(command,
                                       capture_output=True,
                                       text=True,
                                       check=False)

        if datatype_proc.stdout:
            dd_type = datatype_proc.stdout.rstrip('\n')
            logging.debug("DataType found to be: %s", dd_type)

            command = [PYTHON_BINARY, processing_script_filename, raw_filepath]

            logging.debug("Data Processing Command: %s", ' '.join(command))

            data_proc = subprocess.run(command,
                                       capture_output=True,
                                       text=True,
                                       check=False)

            if data_proc.stdout:
                try:
                    logging.debug("Verifying output")
                    out_obj = json.loads(data_proc.stdout)
                except Exception as err:
                    logging.error("Error parsing JSON output from file: %s",
                                  filename)
                    logging.debug(str(err))
                    job_results['parts'].append({
                        "partName":
                        "Parsing JSON output from file " + filename,
                        "result":
                        "Fail",
                        "reason":
                        "Error parsing JSON output from file: " + filename
                    })
                    continue
                else:
                    if not out_obj:
                        error_title = 'Datafile Parsing error'
                        error_body = "Parser returned no output. Parsing command: {}", ' '.join(
                            command)
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                    elif 'error' in out_obj:
                        error_title = 'Datafile Parsing error'
                        error_body = out_obj['error']
                        logging.error("%s: %s", error_title, error_body)
                        gearman_worker.ovdm.send_msg(error_title, error_body)
                    else:
                        output_results = output_json_data_to_file(
                            json_filepath, out_obj)

                        if output_results['verdict']:
                            job_results['parts'].append({
                                "partName":
                                "Writing DashboardData file: " + filename,
                                "result":
                                "Pass"
                            })
                        else:
                            error_title = 'Data Dashboard Processing failed'
                            error_body = "Error Writing DashboardData file: " + filename + ". Reason: " + output_results[
                                'reason']
                            logging.error("%s: %s", error_title, error_body)
                            gearman_worker.ovdm.send_msg(
                                error_title, error_body)
                            job_results['parts'].append({
                                "partName":
                                "Writing Dashboard file: " + filename,
                                "result":
                                "Fail",
                                "reason":
                                output_results['reason']
                            })

                        new_manifest_entries.append({
                            "type":
                            dd_type,
                            "dd_json":
                            json_filepath.replace(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'] + '/',
                                ''),
                            "raw_data":
                            raw_filepath.replace(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'] + '/', '')
                        })
            else:
                error_title = 'Data Dashboard Processing failed'
                error_body = 'No JSON output recieved from file.  Parsing Command: ' + ' '.join(
                    command)
                logging.error("%s: %s", error_title, error_body)
                gearman_worker.ovdm.send_msg(error_title, error_body)
                remove_manifest_entries.append({
                    "dd_json":
                    json_filepath.replace(
                        gearman_worker.shipboard_data_warehouse_config[
                            'shipboardDataWarehouseBaseDir'] + '/', ''),
                    "raw_data":
                    raw_filepath.replace(
                        gearman_worker.shipboard_data_warehouse_config[
                            'shipboardDataWarehouseBaseDir'] + '/', '')
                })

                #job_results['parts'].append({"partName": "Parsing JSON output from file " + filename, "result": "Fail"})
                if data_proc.stderr:
                    logging.error("Err: %s", data_proc.stderr)
        else:
            logging.warning("File is of unknown datatype: %s", raw_filepath)
            remove_manifest_entries.append({
                "dd_json":
                json_filepath.replace(
                    gearman_worker.shipboard_data_warehouse_config[
                        'shipboardDataWarehouseBaseDir'] + '/', ''),
                "raw_data":
                raw_filepath.replace(
                    gearman_worker.shipboard_data_warehouse_config[
                        'shipboardDataWarehouseBaseDir'] + '/', '')
            })

            if datatype_proc.stderr:
                logging.error("Err: %s", datatype_proc.stderr)

        gearman_worker.send_job_status(
            gearman_job, int(10 + 70 * float(file_index) / float(file_count)),
            100)
        file_index += 1

    gearman_worker.send_job_status(gearman_job, 8, 10)

    if len(new_manifest_entries) > 0:
        logging.info("Updating Manifest file: %s",
                     gearman_worker.data_dashboard_manifest_file_path)

        rows_removed = 0

        existing_manifest_entries = []

        try:
            with open(gearman_worker.data_dashboard_manifest_file_path,
                      'r') as dashboard_manifest_file:
                existing_manifest_entries = json.load(dashboard_manifest_file)

            job_results['parts'].append({
                "partName": "Reading pre-existing Dashboard manifest file",
                "result": "Pass"
            })

        except IOError:
            logging.warning("Error Reading Dashboard Manifest file %s",
                            gearman_worker.data_dashboard_manifest_file_path)
            pass
        except Exception as err:
            logging.error(str(err))
            job_results['parts'].append({
                "partName":
                "Reading pre-existing Dashboard manifest file",
                "result":
                "Fail",
                "reason":
                "Error reading dashboard manifest file: " +
                gearman_worker.data_dashboard_manifest_file_path
            })
            return json.dumps(job_results)

        logging.debug("Entries to remove: %s",
                      json.dumps(remove_manifest_entries, indent=2))
        for remove_entry in remove_manifest_entries:
            for idx, existing_entry in enumerate(existing_manifest_entries):
                if remove_entry['raw_data'] == existing_entry['raw_data']:
                    del existing_manifest_entries[idx]
                    rows_removed += 1

                    if os.path.isfile(
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json'])):
                        logging.info(
                            "Deleting orphaned dd_json file %s",
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json']))
                        os.remove(
                            os.path.join(
                                gearman_worker.shipboard_data_warehouse_config[
                                    'shipboardDataWarehouseBaseDir'],
                                remove_entry['dd_json']))
                    break

        logging.debug("Entries to add/update: %s",
                      json.dumps(new_manifest_entries, indent=2))
        for new_entry in new_manifest_entries:
            updated = False
            for existing_entry in existing_manifest_entries:
                if new_entry['raw_data'] == existing_entry['raw_data']:
                    updated = True
                    job_results['files']['updated'].append(
                        new_entry['dd_json'].replace(
                            gearman_worker.cruise_id + '/', ''))
                    break

            if not updated:  #added
                job_results['files']['new'].append(
                    new_entry['dd_json'].replace(
                        gearman_worker.cruise_id + '/', ''))
                existing_manifest_entries.append(new_entry)

        if len(job_results['files']['new']) > 0:
            logging.info("%s row(s) added", len(job_results['files']['new']))
        if len(job_results['files']['updated']) > 0:
            logging.info("%s row(s) updated",
                         len(job_results['files']['updated']))
        if rows_removed:
            logging.info("%s row(s) removed", rows_removed)

        output_results = output_json_data_to_file(
            gearman_worker.data_dashboard_manifest_file_path,
            existing_manifest_entries)

        if not output_results['verdict']:
            logging.error("Error Writing Dashboard manifest file: %s",
                          gearman_worker.data_dashboard_manifest_file_path)
            job_results['parts'].append({
                "partName": "Writing Dashboard manifest file",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

        job_results['parts'].append({
            "partName": "Writing Dashboard manifest file",
            "result": "Pass"
        })
        job_results['files']['updated'].append(
            os.path.join(
                gearman_worker.ovdm.get_required_extra_directory_by_name(
                    'Dashboard_Data')['destDir'],
                DEFAULT_DATA_DASHBOARD_MANIFEST_FN))

        gearman_worker.send_job_status(gearman_job, 9, 10)

        logging.info("Setting file ownership/permissions")
        output_results = set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            gearman_worker.data_dashboard_dir)

        if output_results['verdict']:
            job_results['parts'].append({
                "partName": "Set file/directory ownership",
                "result": "Pass"
            })
        else:
            job_results['parts'].append({
                "partName": "Set file/directory ownership",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

    gearman_worker.send_job_status(gearman_job, 10, 10)

    return json.dumps(job_results)
def task_run_ship_to_shore_transfer(gearman_worker, current_job):  # pylint: disable=too-many-statements
    """
    Perform the ship-to-shore transfer
    """

    time.sleep(randint(0, 2))

    job_results = {
        'parts': [{
            "partName": "Transfer In-Progress",
            "result": "Pass"
        }, {
            "partName": "Transfer Enabled",
            "result": "Pass"
        }],
        'files': {}
    }

    logging.debug("Setting transfer status to 'Running'")
    gearman_worker.ovdm.set_running_cruise_data_transfer(
        gearman_worker.cruise_data_transfer['cruiseDataTransferID'],
        os.getpid(), current_job.handle)

    logging.info("Testing configuration")
    gearman_worker.send_job_status(current_job, 1, 10)

    gm_client = python3_gearman.GearmanClient(
        [gearman_worker.ovdm.get_gearman_server()])

    gm_data = {
        'cruiseDataTransfer': gearman_worker.cruise_data_transfer,
        'cruiseID': gearman_worker.cruise_id
    }

    completed_job_request = gm_client.submit_job("testCruiseDataTransfer",
                                                 json.dumps(gm_data))
    results_obj = json.loads(completed_job_request.result)

    logging.debug('Connection Test Results: %s',
                  json.dumps(results_obj, indent=2))

    if results_obj['parts'][-1]['result'] == "Pass":  # Final Verdict
        logging.debug("Connection test passed")
        job_results['parts'].append({
            "partName": "Connection Test",
            "result": "Pass"
        })
    else:
        logging.warning("Connection test failed, quitting job")
        job_results['parts'].append({
            "partName":
            "Connection Test",
            "result":
            "Fail",
            "reason":
            results_obj['parts'][-1]['reason']
        })
        return json.dumps(job_results)

    gearman_worker.send_job_status(current_job, 2, 10)

    logging.info("Transferring files")
    output_results = None
    if gearman_worker.cruise_data_transfer[
            'transferType'] == "4":  # SSH Server
        output_results = transfer_ssh_dest_dir(gearman_worker, current_job)
    else:
        logging.error("Unknown Transfer Type")
        job_results['parts'].append({
            "partName": "Transfer Files",
            "result": "Fail",
            "reason": "Unknown transfer type"
        })
        return json.dumps(job_results)

    if not output_results['verdict']:
        logging.error("Transfer of remote files failed: %s",
                      output_results['reason'])
        job_results['parts'].append({
            "partName": "Transfer Files",
            "result": "Fail",
            "reason": output_results['reason']
        })
        return job_results

    logging.debug("Transfer completed successfully")
    job_results['files'] = output_results['files']
    job_results['parts'].append({
        "partName": "Transfer Files",
        "result": "Pass"
    })

    if len(job_results['files']['new']) > 0:
        logging.debug("%s file(s) added", len(job_results['files']['new']))
    if len(job_results['files']['updated']) > 0:
        logging.debug("%s file(s) updated",
                      len(job_results['files']['updated']))
    if len(job_results['files']['exclude']) > 0:
        logging.debug("%s file(s) intentionally skipped",
                      len(job_results['files']['exclude']))

    gearman_worker.send_job_status(current_job, 9, 10)

    if job_results['files']['new'] or job_results['files']['updated']:

        logging.debug("Building logfiles")

        logfile_filename = gearman_worker.cruise_data_transfer[
            'name'] + '_' + gearman_worker.transfer_start_date + '.log'

        log_contents = {
            'files': {
                'new': job_results['files']['new'],
                'updated': job_results['files']['updated']
            }
        }

        output_results = output_json_data_to_file(
            os.path.join(build_logfile_dirpath(gearman_worker),
                         logfile_filename), log_contents['files'])

        if output_results['verdict']:
            job_results['parts'].append({
                "partName": "Write transfer logfile",
                "result": "Pass"
            })
        else:
            logging.error("Error writing transfer logfile: %s",
                          logfile_filename)
            job_results['parts'].append({
                "partName": "Write transfer logfile",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

        output_results = set_owner_group_permissions(
            gearman_worker.
            shipboard_data_warehouse_config['shipboardDataWarehouseUsername'],
            os.path.join(build_logfile_dirpath(gearman_worker),
                         logfile_filename))

        if not output_results['verdict']:
            job_results['parts'].append({
                "partName": "Set OpenVDM config file ownership/permissions",
                "result": "Fail",
                "reason": output_results['reason']
            })
            return json.dumps(job_results)

    gearman_worker.send_job_status(current_job, 10, 10)

    time.sleep(2)

    return json.dumps(job_results)