예제 #1
0
def generate_result_file(algorithm_info, to_detect, file_type, temp=False):
    output_file_name = algorithm_info[lookup.uuid_descriptor]
    cmd = lookup.get_cmd(algorithm_info)

    if algorithm_info[
            lookup.
            COMMAND_TYPE] == lookup.DOCKER and lookup.PIPE_OUTPUT not in algorithm_info:  #hacky fix for piping output
        output_directory = lookup.result_dir
    else:
        output_directory = abspath(
            lookup.get_algo_asset_dirs()[lookup.detector])

    if lookup.INPUT_IMAGE_PATH in cmd:
        output_file_name += '_' + fs.get_filename(
            to_detect[lookup.INPUT_IMAGE_PATH], extension=False)

    elif lookup.INPUT_IMAGE_DIRECTORY in cmd:
        output_file_name += '_' + fs.get_filename(
            to_detect[lookup.INPUT_IMAGE_DIRECTORY])

    if temp:
        output_file_name += '-temp'

    output_file_name += '.' + file_type
    return join(output_directory, output_file_name)
예제 #2
0
def preprocess_docker(algorithm_info, to_verify_list):
    """starts docker command and updates parameters appropriately"""
    image_name = algorithm_info[lookup.DOCKER_IMAGE]
    cmd = lookup.get_verify_cmd(algorithm_info)
    volumes = {}

    if lookup.VERIFY_TXT_FILE in cmd:
        asset_directory = abspath(
            lookup.get_algo_asset_dirs()[lookup.embeddor])
        volumes[asset_directory] = {'bind': lookup.asset_dir, 'mode': 'rw'}
        for to_verify in to_verify_list:
            to_verify[lookup.VERIFY_TXT_FILE] = generator.generate_verify_file(
                algorithm_info, to_verify)

    for to_verify in to_verify_list:
        assert (lookup.INPUT_IMAGE_PATH in to_verify)
        original_input_path = to_verify[lookup.INPUT_IMAGE_PATH]
        original_input_path = abspath(original_input_path)

        local_input_dir = fs.get_directory(original_input_path)
        volumes[local_input_dir] = {'bind': lookup.input_dir, 'mode': 'rw'}

        input_filename = fs.get_filename(original_input_path)
        new_input_path = join(lookup.input_dir, input_filename)
        to_verify[lookup.INPUT_IMAGE_PATH] = new_input_path

    container_id = runner.start_docker(image_name, volumes=volumes)
    for to_verify in to_verify_list:
        to_verify[lookup.container_id] = container_id

    return [], to_verify_list
예제 #3
0
def terimination_docker(algorithm_info, verified_list):
    termination_cmds = []
    cmd = lookup.get_verify_cmd(algorithm_info)

    docker_containers = list(
        set(
            list(
                map(lambda verified: verified[lookup.container_id],
                    verified_list))))
    for container_id in docker_containers:
        termination_cmds.append({
            lookup.COMMAND_TYPE: lookup.END_DOCKER,
            lookup.COMMAND: [container_id]
        })

    for verified in verified_list:
        asset_file_name = fs.get_filename(
            generator.generate_verify_file(algorithm_info, verified))
        asset_directory = lookup.get_algo_asset_dirs()[algorithm_info[
            lookup.ALGORITHM_TYPE]]

        old_asset_file_path = join(asset_directory, asset_file_name)
        removal_cmd = ' '.join([lookup.removal_prefix, old_asset_file_path])

        termination_cmds.append({
            lookup.COMMAND_TYPE: lookup.NATIVE,
            lookup.COMMAND: [removal_cmd]
        })

    return termination_cmds
예제 #4
0
def compile_txt_results(algorithm_info, source_db):
	algorithm_uuid = algorithm_info[lookup.uuid_descriptor]
	asset_dir = abspath(lookup.get_algo_asset_dirs()[lookup.detector])
	
	image_files = lookup.get_image_list(source_db)

	result_file_func = lambda file: join(asset_dir, algorithm_uuid + '_' + fs.get_filename(file[lookup.file_path], extension=False) + '.txt')
	result_files = [{lookup.file_path: result_file_func(file), lookup.label: file[lookup.label]} for file in image_files]

	results = []

	for idx, result_file_info in enumerate(result_files):
		file_result = fs.read_txt_file(result_file_info[lookup.file_path])
		file_result = ''.join(file_result)

		result = None
		if algorithm_info[lookup.DETECTOR_TYPE] == lookup.binary_detector:
			yes_filter = algorithm_info[lookup.regex_filter_yes]
			no_filter = algorithm_info[lookup.regex_filter_no]

			stego = re.search(yes_filter, file_result)
			cover = re.search(no_filter, file_result)
			assert (stego or cover and not (stego and cover))

			if stego: 
				result = lookup.stego
			else:
				result = lookup.cover
		else:
			result = float(file_result)

		result_file_info.update({lookup.result: result})
		results.append(result_file_info)

	return results
예제 #5
0
def get_set_files(algorithm_type: str):
    set_file_directory = lookup.get_algo_set_dirs()[algorithm_type]
    sets = [{
        lookup.uuid_descriptor: fs.get_filename(file, extension=False),
        lookup.filepath_descriptor: abspath(join(set_file_directory, file))
    } for file in listdir(set_file_directory)]

    return sets
예제 #6
0
	def get_image_info(file_name):
		transform = lambda img: img[lookup.file_path]
		if lookup.OUTPUT_FILE in algorithm_info:
			if algorithm_info[lookup.OUTPUT_FILE] == lookup.INPUT_IMAGE_NAME:
				transform = lambda img: fs.get_filename(img[lookup.file_path])

		filtered_list = list(filter(lambda img: transform(img) == file_name, image_list))
		assert(len(filtered_list) == 1)
		return filtered_list[0]
예제 #7
0
def compile_csv_results(algorithm_info, source_db):
	algorithm_uuid = algorithm_info[lookup.uuid_descriptor]
	asset_dir = abspath(lookup.get_algo_asset_dirs()[lookup.detector])

	image_files = lookup.get_image_list(source_db)
	image_filepath = [abspath(file[lookup.file_path]) for file in image_files]

	result_files = [algorithm_uuid + '_' + fs.get_filename(file[lookup.file_path], extension=False) + '.csv' for file in image_files]
	result_files = [join(asset_dir, result_file) for result_file in result_files]

	raise NotImplementedError
예제 #8
0
def generate_verify_file(algorithm_info, to_verify):
    command_type = algorithm_info[lookup.COMMAND_TYPE]
    if command_type == lookup.DOCKER:
        file_dir = lookup.asset_dir
    else:
        file_dir = abspath(lookup.get_algo_asset_dirs()[lookup.embeddor])

    file_name = algorithm_info[lookup.uuid_descriptor] + '_' + fs.get_filename(
        to_verify[lookup.INPUT_IMAGE_PATH], extension=False) + '.txt'
    file_path = join(file_dir, file_name)

    return file_path
예제 #9
0
def compile_csv_directory(algorithm_info, source_db):
	algorithm_uuid = algorithm_info[lookup.uuid_descriptor]
	asset_dir = abspath(lookup.get_algo_asset_dirs()[lookup.detector])

	image_list = lookup.get_image_list(source_db)

	directory = list(set([fs.get_directory(image[lookup.file_path]) for image in image_list]))
	assert(len(directory) == 1)
	directory = directory[0]
	result_csv_file = algorithm_uuid + '_' + fs.get_filename(directory) + '.csv'
	result_csv_file = join(asset_dir, result_csv_file)

	data = fs.read_csv_file(result_csv_file)
	results = []

	def get_image_info(file_name):
		transform = lambda img: img[lookup.file_path]
		if lookup.OUTPUT_FILE in algorithm_info:
			if algorithm_info[lookup.OUTPUT_FILE] == lookup.INPUT_IMAGE_NAME:
				transform = lambda img: fs.get_filename(img[lookup.file_path])

		filtered_list = list(filter(lambda img: transform(img) == file_name, image_list))
		assert(len(filtered_list) == 1)
		return filtered_list[0]

	for result in data:
		result_info = get_image_info(result[0])
		file_result = result[1]

		if algorithm_info[lookup.DETECTOR_TYPE] == lookup.binary_detector:
			yes_filter = algorithm_info[lookup.regex_filter_yes]
			no_filter = algorithm_info[lookup.regex_filter_no]

			stego = re.search(yes_filter, file_result)
			cover = re.search(no_filter, file_result)
			assert (stego or cover and not (stego and cover))

			if stego: 
				result = lookup.stego
			else:
				result = lookup.cover
		else:
			result = float(file_result)
		
		result_info.update({lookup.result: result})
		results.append(result_info)

	return results
예제 #10
0
def verify_embedding(verify_db, embeddors):
	embeddor_results = defaultdict(list)
	image_files = lookup.get_image_list(verify_db)

	for image_file in image_files:
		image_file[lookup.INPUT_IMAGE_PATH] = image_file[lookup.file_path]
		embeddor_uuid = image_file[lookup.uuid_descriptor]
		verify_txt_file = generator.generate_verify_file(embeddors[embeddor_uuid], image_file)

		asset_file_name = fs.get_filename(verify_txt_file)
		asset_directory = lookup.get_algo_asset_dirs()[lookup.embeddor]
		verify_file_path = abspath(join(asset_directory, asset_file_name))

		data = fs.read_txt_file(verify_file_path)

		if (len(data[0])) == int(image_file[lookup.secret_txt_length]):
			verification_result = True
		else:
			verification_result = False

		embeddor_results[embeddor_uuid].append({lookup.INPUT_IMAGE_PATH: image_file[lookup.INPUT_IMAGE_PATH], lookup.result: verification_result})

	return embeddor_results
예제 #11
0
def preprocess_docker(algorithm_info, to_detect_list):
    """starts docker command and updates parameters appropriately"""
    image_name = algorithm_info[lookup.DOCKER_IMAGE]
    cmd = lookup.get_cmd(algorithm_info)
    volumes = {}

    if lookup.INPUT_IMAGE_DIRECTORY in cmd:
        updated_detect_list = generator.get_directories(to_detect_list)
        for updated_detect in updated_detect_list:
            docker_directory = '/' + fs.get_uuid()
            volumes[updated_detect[lookup.INPUT_IMAGE_DIRECTORY]] = {
                'bind': docker_directory,
                'mode': 'rw'
            }
            updated_detect[lookup.INPUT_IMAGE_DIRECTORY] = docker_directory
    elif lookup.INPUT_IMAGE_PATH in cmd:
        for to_detect in to_detect_list:
            original_input_path = to_detect[lookup.INPUT_IMAGE_PATH]
            original_input_path = abspath(original_input_path)

            local_input_dir = fs.get_directory(original_input_path)
            volumes[local_input_dir] = {'bind': lookup.input_dir, 'mode': 'rw'}

            input_filename = fs.get_filename(original_input_path)
            new_input_path = join(lookup.input_dir, input_filename)
            to_detect[lookup.INPUT_IMAGE_PATH] = new_input_path

    result_directory = abspath(lookup.get_algo_asset_dirs()[lookup.detector])
    assert (fs.dir_exists(result_directory))

    volumes[result_directory] = {'bind': lookup.result_dir, 'mode': 'rw'}

    container_id = runner.start_docker(image_name, volumes=volumes)
    for to_detect in to_detect_list:
        to_detect[lookup.container_id] = container_id

    return [], to_detect_list
예제 #12
0
def generate_output_list(algorithm_info, output_directory: str,
                         input_list: dict):
    target_directory = output_directory

    output_list = []
    for file in input_list:
        file_type = file[lookup.image_type]

        if lookup.OUTPUT_FILE in algorithm_info:
            replacements = {
                lookup.INPUT_IMAGE_NAME:
                fs.get_filename(abspath(file[lookup.file_path]),
                                extension=False)
            }
            output_file_name = replace(algorithm_info[lookup.OUTPUT_FILE],
                                       replacements)
        else:
            output_file_name = fs.get_uuid()

        output_file = fs.create_name_from_uuid(output_file_name, file_type)
        output_file_path = join(target_directory, output_file)
        output_list.append(output_file_path)

    return output_list