Ejemplo n.º 1
0
def _load_submission(contest_path, submission, ext='.py'):
    global language_list
    if not ext in ALLOWED_EXT: return None
    submission_path = '%s/%s%s' % (contest_path, submission, ext)
    contest = _context_name(contest_path)
    submission_module = None
    if language_list is not None and ext[1:] not in language_list:
        return None
    if ext == '.py':
        submission_module = imp.load_source(
            'submission_%s_%s' % (contest, submission), submission_path)
        classes = inspect.getmembers(submission_module, inspect.isclass)
        for _, cls_submission in classes:
            if issubclass(cls_submission,
                          Submission) and cls_submission not in (
                              Submission, SubmissionOld, SubmissionWrapper):
                return cls_submission()
    elif ext == '.c' and (forced_mode or is_tool('gcc')):
        return SubmissionC(submission_path)
    elif ext == '.cpp' and (forced_mode or is_tool('g++')):
        return SubmissionCpp(submission_path)
    elif ext == '.go' and (forced_mode or is_tool('go')):
        return SubmissionGo(submission_path)
    elif ext == '.js' and (forced_mode or is_tool('node')):
        return SubmissionJs(submission_path)
    elif ext == '.rb' and (forced_mode or is_tool('ruby')):
        return SubmissionRb(submission_path)
    return None
Ejemplo n.º 2
0
def encode_to_hevc(fn, out):
    param_line = "crf=18.0:limit-sao=1:bframes=8:aq-mode=3:psy-rd=1.0"

    detail_menu = TerminalMenu([
        "(Recommended if you dont know) One Setting to rule them all",
        "(e.g Your Name) Flat, slow anime (slice of life, everything is well lit)",
        "(e.g Kimetsu no Yaiba) Some dark scene, some battle scene (shonen, historical, etc.)",
        "(Rarely used) [TV Series] Movie-tier dark scene, complex grain/detail",
        "(Rarely used) [Movie] Movie-tier dark scene, complex grain/detail",
    ],
                               title="Choose the encode options")

    choice = detail_menu.show()
    # Flat, slow anime (slice of life, everything is well lit)
    if choice == 1:
        param_line = "crf=19.0:bframes=8:aq-mode=3:psy-rd=1:aq-strength=0.8:deblock=1,1"
    #Some dark scene, some battle scene (shonen, historical, etc.)
    elif choice == 2:
        param_line = "crf=18.0:bframes=8:aq-mode=3:psy-rd=1.5:psy-rdoq=2"
    #[TV Series] Movie-tier dark scene, complex grain/detail
    elif choice == 3:
        param_line = "crf=18.0:limit-sao=1:bframes=8:aq-mode=3:psy-rd=1.5:psy-rdoq=3.5"
    #[Movie] Movie-tier dark scene, complex grain/detail
    elif choice == 4:
        param_line = "crf=16.0:limit-sao=1:bframes=8:aq-mode=3:psy-rd=1.5:psy-rdoq=3.5"

    if is_tool("ffmpeg-bar"):
        binary = "ffmpeg-bar"
    else:
        binary = "ffmpeg"

    files = []
    if os.path.isdir(fn):
        for file in glob.glob(os.path.join(fn, "*.mkv")):
            files.append(os.path.join(file))
    if len(files) == 0:
        cmd = [
            binary, "-hide_banner", "-i", fn, "-c:v", "libx265", "-profile:v",
            "main10", "-pix_fmt", "yuv420p10le", "-preset", "slow",
            "-x265-params", param_line, "-map", "0:v:0", "-f", "matroska",
            '-vf', 'scale=out_color_matrix=bt709', '-color_primaries', 'bt709',
            '-color_trc', 'bt709', '-colorspace', 'bt709', out
        ]
        subprocess.call(cmd)
    else:
        for f in files:
            clear()
            name = f.split("/")
            name = name[len(name) - 1]
            cmd = [
                binary, "-hide_banner", "-i", f, "-c:v", "libx265",
                "-profile:v", "main10", "-pix_fmt", "yuv420p10le", "-preset",
                "slow", "-x265-params", param_line, "-map", "0:v:0", "-f",
                "matroska", '-vf', 'scale=out_color_matrix=bt709',
                '-color_primaries', 'bt709', '-color_trc', 'bt709',
                '-colorspace', 'bt709',
                os.path.join(out, name)
            ]
            subprocess.call(cmd)
Ejemplo n.º 3
0
import sys, os
import argparse
from utils import is_tool, credz, str2bool
from extract_audio import extract_audio
from extract_subs import extract_subs
from mux import mux
from shader import shader
from encode import encode_to_hevc
from splitter import split_by_seconds, get_video_length

credz()

if not is_tool("mkvextract"):
    print("mkvnixtool not installed. Please install it")
    sys.exit(-3)
if not is_tool("ffmpeg"):
    print("ffmpeg is not installed. Please install")
    sys.exit(-3)

if not is_tool("mpv"):
    print("mpv is not installed. Please install a new version")
    sys.exit(-3)

parser = argparse.ArgumentParser(
    description='Upshader Animes to 4K automagically.')
parser.add_argument(
    "-m",
    "--mode",
    required=True,
    help="Mode: choose from audio, subs, shader, or mux, split")
parser.add_argument("-ew",
Ejemplo n.º 4
0
# utils
from tabulate import tabulate
from utils import is_tool, tool_for_lang

show_debug = True
author_list = None
language_list = None
except_list = None
forced_mode = False
restricted_mode = False

DAY_PATH_PATTERN = 'day-[0-9]*'
CONTEST_PATH_PATTERN = 'part-[0-9]*'
ALLOWED_EXT = ['.c', '.cpp', '.go', '.js', '.py', '.rb', '.rs', '.sh']
SUPPORTED_LANGUAGES = [
    ext[1:] for ext in ALLOWED_EXT if is_tool(tool_for_lang(ext[1:]))
]


# To print colors in terminal
class bcolors:
    RED = '\033[91m'
    GREEN = '\033[92m'
    YELLOW = '\033[93m'
    BLUE = '\033[94m'
    MAGENTA = '\033[95m'
    ENDC = '\033[0m'
    BOLD = '\033[1m'
    UNDERLINE = '\033[4m'

Ejemplo n.º 5
0
# To print colors in terminal
class bcolors:
    RED = '\033[91m'
    GREEN = '\033[92m'
    YELLOW = '\033[93m'
    BLUE = '\033[94m'
    MAGENTA = '\033[95m'
    ENDC = '\033[0m'
    BOLD = '\033[1m'
    UNDERLINE = '\033[4m'


DAY_PATH_PATTERN  = 'day-[0-9]*'
CONTEST_PATH_PATTERN = 'part-[0-9]*'
ALLOWED_EXT = ['.c', '.cpp', '.go', '.js', '.py', '.rb', '.rs']
SUPPORTED_LANGUAGES = [ ext[1:] for ext in ALLOWED_EXT if is_tool(tool_for_lang(ext[1:])) ]

class DifferentAnswersException(Exception):
    pass

def _context_name(context_path):
    return context_path.replace('/','_').replace('-','_')

# Return the list of the contests
# It should be the list of the directories matching day-<a number>
def _get_days():
    return sorted(glob.glob(DAY_PATH_PATTERN), key=lambda x: abs(int(x[-2:])))

# Return the list of the contests path for the given day path
def _get_contests_path_for_day(day_path):
    return sorted(glob.glob(day_path + '/' + CONTEST_PATH_PATTERN), key=lambda x: abs(int(x[-1:])))
Ejemplo n.º 6
0
def savnet_main(args):

    if args.grc == True:
        logger.warning("--grc argument is deprecated and ignored.")


    ##########
    # read sample conf
    sconf = sample_conf.Sample_conf()
    sconf.parse_file(args.sample_list_file, args.sv)

    ##########
    # check if the executables exist
    is_tool("bedtools")
    is_tool("tabix")
    is_tool("bgzip")
    if len(sconf.SJ_files) > 0: is_tool("junc_utils")
    if len(sconf.IR_files) > 0: is_tool("intron_retention_utils")
    if len(sconf.chimera_files) > 0: is_tool("chimera_utils")
    
    ##########
    output_prefix_dir = os.path.dirname(args.output_prefix)
    if output_prefix_dir != "" and not os.path.exists(output_prefix_dir):
       os.makedirs(output_prefix_dir)

    ##########
    logger.info("Merging mutation data.")
    if args.sv == False:
        preprocess.merge_mut2(sconf.mut_files, args.output_prefix + ".mut_merged.vcf", args.reference)
    else:
        preprocess.merge_sv(sconf.sv_files, args.output_prefix + ".sv_merged.txt")

    ##########
    # splicing_junction
    logger.info("Merging splicing junction data.")
    preprocess.merge_SJ2(sconf.SJ_files, args.output_prefix + ".SJ_merged.txt", args.SJ_pooled_control_file, args.SJ_num_thres, args.keep_annotated)

    logger.info("Adding annotation to splicing junction data.")
    annotate_commands = ["junc_utils", "annotate", args.output_prefix + ".SJ_merged.txt", args.output_prefix + ".SJ_merged.annot.txt",
                         "--genome_id", args.genome_id]
    # if args.grc: annotate_commands.append("--grc")
    subprocess.call(annotate_commands)

    logger.info("Checking association betweeen mutation and splicing junction data.")
    if args.sv == False:
        associate_commands = ["junc_utils", "associate", args.output_prefix + ".SJ_merged.annot.txt", args.output_prefix + ".mut_merged.vcf",
                              args.output_prefix + ".SJ_merged.associate.txt",
                              "--donor_size", args.donor_size, "--acceptor_size", args.acceptor_size,
                              "--genome_id", args.genome_id]
        # if args.branchpoint: associate_commands.append("--branchpoint")
        # if args.grc: associate_commands.append("--grc")

    else:
        associate_commands = ["junc_utils", "associate", args.output_prefix + ".SJ_merged.annot.txt", args.output_prefix + ".sv_merged.txt",
                              args.output_prefix + ".SJ_merged.associate.txt", "--sv"]

    subprocess.check_call(associate_commands)
    ##########

    ##########
    # intron_retention
    logger.info("Merging intron retention data.")
    preprocess.merge_intron_retention(sconf.IR_files, args.output_prefix + ".IR_merged.txt", 
                                 args.IR_pooled_control_file, args.IR_ratio_thres, args.IR_num_thres)

    logger.info("Checking association betweeen mutation and intron retention data.")
    if args.sv == False:
        associate_commands = ["intron_retention_utils", "associate", args.output_prefix + ".IR_merged.txt",
                              args.output_prefix + ".mut_merged.vcf", args.output_prefix + ".IR_merged.associate.txt",
                              "--donor_size", args.donor_size, "--acceptor_size", args.acceptor_size]
    else:
        associate_commands = ["intron_retention_utils", "associate", args.output_prefix + ".IR_merged.txt",
                              args.output_prefix + ".sv_merged.txt", args.output_prefix + ".IR_merged.associate.txt", "--sv"]

    subprocess.check_call(associate_commands)
    #########

    #########
    # chimera
    if args.sv:
        logger.info("Merging chimeric junction data.")
        preprocess.merge_chimera(sconf.chimera_files, args.output_prefix + ".chimera_merged.txt", 
                            args.chimera_pooled_control_file, args.chimera_num_thres, args.chimera_overhang_thres)

        logger.info("Checking association betweeen mutation and chimeric junction data.")
        associate_commands = ["chimera_utils", "associate", args.output_prefix + ".chimera_merged.txt",
                              args.output_prefix + ".sv_merged.txt", args.output_prefix + ".chimera_merged.associate.txt", "--genome_id", args.genome_id]
        # if args.grc: associate_commands.append("--grc")

        subprocess.check_call(associate_commands)
    ##########

    ##########
    # organize association
    if args.sv == False:
        logger.info("Organizing splicing association information.")
        preprocess.merge_SJ_IR_files(args.output_prefix + ".SJ_merged.associate.txt", 
                                     args.output_prefix + ".IR_merged.associate.txt",
                                     args.output_prefix + ".splicing.associate.txt")

        logger.info("Creating pickles of splicing association network instances.")
        analysis_network.create_network_list(args.output_prefix + ".splicing.associate.txt",
                                             args.output_prefix + ".splicing_mutation.network.pickles",
                                             args.output_prefix + ".mut_merged.vcf",
                                             sconf.sample_names, sconf.weights)

    else:
        logger.info("Organizing splicing association information.")
        preprocess.merge_SJ_IR_chimera_files_sv(args.output_prefix + ".SJ_merged.associate.txt",
                                                args.output_prefix + ".IR_merged.associate.txt",
                                                args.output_prefix + ".chimera_merged.associate.txt",
                                                args.output_prefix + ".splicing.associate.txt")

        logger.info("Creating pickles of splicing association network instances.")
        analysis_network.create_network_list(args.output_prefix + ".splicing.associate.txt", 
                                             args.output_prefix + ".splicing_mutation.network.pickles",
                                             args.output_prefix + ".sv_merged.txt",
                                             sconf.sample_names, sconf.weights, sv_mode = True)


    logger.info("Extracting splicing associated variants.")
    sav_list_target = analysis_network.extract_sav_list(args.output_prefix + ".splicing_mutation.network.pickles", 
                                                        args.effect_size_thres, 0.5, 0.5, args.log_BF_thres, 1, 
                                                        args.alpha0, args.beta0, args.alpha1, args.beta1, permutation = False)

    logger.info("Extracting of splicing associated variants on permutation pairs to estimate false positive ratios.")
    sav_lists_permutation = []
    for i in range(args.permutation_num):
        temp_sav_list = analysis_network.extract_sav_list(args.output_prefix + ".splicing_mutation.network.pickles", 
                                                          args.effect_size_thres, 0.5, 0.5, args.log_BF_thres, 1,
                                                          args.alpha0, args.beta0, args.alpha1, args.beta1, permutation = True)
        sav_lists_permutation.append(temp_sav_list)

    logger.info("Adding Q-values to splicing associated variants.")
    analysis_network.add_qvalue_to_sav_list(sav_list_target, sav_lists_permutation)

    logger.info("Generating final outputs.")
    with open(args.output_prefix + ".savnet.result.txt", 'w') as hout:
        if args.sv == False:
            print >> hout, Sav.print_header_mut 
        else:
            print >> hout, Sav.print_header_sv
        for sav in sav_list_target:
            print >> hout, '\n'.join(sav.print_records(sv_mode = args.sv, with_fdr = True))

    with open(args.output_prefix + ".splicing_mutation.count_summary.anno.perm_all.txt", 'w') as hout:
        if args.sv == False:
            print >> hout, "Permutation_Num" + '\t' + Sav.print_header_mut
        else:
            print >> hout, "Permutation_Num" + '\t' + Sav.print_header_sv

        for i in range(len(sav_lists_permutation)):
            for sav in sav_lists_permutation[i]:
                print >> hout, '\n'.join([str(i) + '\t' + x for x in sav.print_records(sv_mode = args.sv, with_fdr = False)])

    if args.debug == False:

        subprocess.call(["rm", "-rf", args.output_prefix + ".mut_merged.vcf"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".sv_merged.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".SJ_merged.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".SJ_merged.annot.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".SJ_merged.associate.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".IR_merged.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".IR_merged.associate.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".chimera_merged.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".chimera_merged.associate.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".splicing.associate.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".splicing_mutation.network.pickles"]) 
Ejemplo n.º 7
0
def savnet_main(args):

    if args.grc == True:
        logger.warning("--grc argument is deprecated and ignored.")

    ##########
    # read sample conf
    sconf = sample_conf.Sample_conf()
    sconf.parse_file(args.sample_list_file, args.sv)

    ##########
    # check if the executables exist
    is_tool("bedtools")
    is_tool("tabix")
    is_tool("bgzip")
    if len(sconf.SJ_files) > 0: is_tool("junc_utils")
    if len(sconf.IR_files) > 0: is_tool("intron_retention_utils")
    if len(sconf.chimera_files) > 0: is_tool("chimera_utils")

    ##########
    output_prefix_dir = os.path.dirname(args.output_prefix)
    if output_prefix_dir != "" and not os.path.exists(output_prefix_dir):
        os.makedirs(output_prefix_dir)

    ##########
    logger.info("Merging mutation data.")
    if args.sv == False:
        preprocess.merge_mut2(sconf.mut_files,
                              args.output_prefix + ".mut_merged.vcf",
                              args.reference)
    else:
        preprocess.merge_sv(sconf.sv_files,
                            args.output_prefix + ".sv_merged.txt")

    ##########
    # splicing_junction
    logger.info("Merging splicing junction data.")
    preprocess.merge_SJ2(sconf.SJ_files, args.output_prefix + ".SJ_merged.txt",
                         args.SJ_pooled_control_file, args.SJ_num_thres,
                         args.keep_annotated)

    logger.info("Adding annotation to splicing junction data.")
    annotate_commands = [
        "junc_utils", "annotate", args.output_prefix + ".SJ_merged.txt",
        args.output_prefix + ".SJ_merged.annot.txt", "--genome_id",
        args.genome_id
    ]
    # if args.grc: annotate_commands.append("--grc")
    subprocess.call(annotate_commands)

    logger.info(
        "Checking association betweeen mutation and splicing junction data.")
    if args.sv == False:
        associate_commands = [
            "junc_utils", "associate",
            args.output_prefix + ".SJ_merged.annot.txt",
            args.output_prefix + ".mut_merged.vcf",
            args.output_prefix + ".SJ_merged.associate.txt", "--donor_size",
            args.donor_size, "--acceptor_size", args.acceptor_size,
            "--genome_id", args.genome_id
        ]
        # if args.branchpoint: associate_commands.append("--branchpoint")
        # if args.grc: associate_commands.append("--grc")

    else:
        associate_commands = [
            "junc_utils", "associate",
            args.output_prefix + ".SJ_merged.annot.txt",
            args.output_prefix + ".sv_merged.txt",
            args.output_prefix + ".SJ_merged.associate.txt", "--sv"
        ]

    subprocess.check_call(associate_commands)
    ##########

    ##########
    # intron_retention
    logger.info("Merging intron retention data.")
    preprocess.merge_intron_retention(sconf.IR_files,
                                      args.output_prefix + ".IR_merged.txt",
                                      args.IR_pooled_control_file,
                                      args.IR_ratio_thres, args.IR_num_thres)

    logger.info(
        "Checking association betweeen mutation and intron retention data.")
    if args.sv == False:
        associate_commands = [
            "intron_retention_utils", "associate",
            args.output_prefix + ".IR_merged.txt",
            args.output_prefix + ".mut_merged.vcf",
            args.output_prefix + ".IR_merged.associate.txt", "--donor_size",
            args.donor_size, "--acceptor_size", args.acceptor_size
        ]
    else:
        associate_commands = [
            "intron_retention_utils", "associate",
            args.output_prefix + ".IR_merged.txt",
            args.output_prefix + ".sv_merged.txt",
            args.output_prefix + ".IR_merged.associate.txt", "--sv"
        ]

    subprocess.check_call(associate_commands)
    #########

    #########
    # chimera
    if args.sv:
        logger.info("Merging chimeric junction data.")
        preprocess.merge_chimera(sconf.chimera_files,
                                 args.output_prefix + ".chimera_merged.txt",
                                 args.chimera_pooled_control_file,
                                 args.chimera_num_thres,
                                 args.chimera_overhang_thres)

        logger.info(
            "Checking association betweeen mutation and chimeric junction data."
        )
        associate_commands = [
            "chimera_utils", "associate",
            args.output_prefix + ".chimera_merged.txt",
            args.output_prefix + ".sv_merged.txt",
            args.output_prefix + ".chimera_merged.associate.txt",
            "--genome_id", args.genome_id
        ]
        # if args.grc: associate_commands.append("--grc")

        subprocess.check_call(associate_commands)
    ##########

    ##########
    # organize association
    if args.sv == False:
        logger.info("Organizing splicing association information.")
        preprocess.merge_SJ_IR_files(
            args.output_prefix + ".SJ_merged.associate.txt",
            args.output_prefix + ".IR_merged.associate.txt",
            args.output_prefix + ".splicing.associate.txt")

        logger.info(
            "Creating pickles of splicing association network instances.")
        analysis_network.create_network_list(
            args.output_prefix + ".splicing.associate.txt",
            args.output_prefix + ".splicing_mutation.network.pickles",
            args.output_prefix + ".mut_merged.vcf", sconf.sample_names,
            sconf.weights)

    else:
        logger.info("Organizing splicing association information.")
        preprocess.merge_SJ_IR_chimera_files_sv(
            args.output_prefix + ".SJ_merged.associate.txt",
            args.output_prefix + ".IR_merged.associate.txt",
            args.output_prefix + ".chimera_merged.associate.txt",
            args.output_prefix + ".splicing.associate.txt")

        logger.info(
            "Creating pickles of splicing association network instances.")
        analysis_network.create_network_list(
            args.output_prefix + ".splicing.associate.txt",
            args.output_prefix + ".splicing_mutation.network.pickles",
            args.output_prefix + ".sv_merged.txt",
            sconf.sample_names,
            sconf.weights,
            sv_mode=True)

    logger.info("Extracting splicing associated variants.")
    sav_list_target = analysis_network.extract_sav_list(
        args.output_prefix + ".splicing_mutation.network.pickles",
        args.effect_size_thres,
        0.5,
        0.5,
        args.log_BF_thres,
        1,
        args.alpha0,
        args.beta0,
        args.alpha1,
        args.beta1,
        permutation=False)

    logger.info(
        "Extracting of splicing associated variants on permutation pairs to estimate false positive ratios."
    )
    sav_lists_permutation = []
    for i in range(args.permutation_num):
        temp_sav_list = analysis_network.extract_sav_list(
            args.output_prefix + ".splicing_mutation.network.pickles",
            args.effect_size_thres,
            0.5,
            0.5,
            args.log_BF_thres,
            1,
            args.alpha0,
            args.beta0,
            args.alpha1,
            args.beta1,
            permutation=True)
        sav_lists_permutation.append(temp_sav_list)

    logger.info("Adding Q-values to splicing associated variants.")
    analysis_network.add_qvalue_to_sav_list(sav_list_target,
                                            sav_lists_permutation)

    logger.info("Generating final outputs.")
    with open(args.output_prefix + ".savnet.result.txt", 'w') as hout:
        if args.sv == False:
            print >> hout, Sav.print_header_mut
        else:
            print >> hout, Sav.print_header_sv
        for sav in sav_list_target:
            print >> hout, '\n'.join(
                sav.print_records(sv_mode=args.sv, with_fdr=True))

    with open(
            args.output_prefix +
            ".splicing_mutation.count_summary.anno.perm_all.txt", 'w') as hout:
        if args.sv == False:
            print >> hout, "Permutation_Num" + '\t' + Sav.print_header_mut
        else:
            print >> hout, "Permutation_Num" + '\t' + Sav.print_header_sv

        for i in range(len(sav_lists_permutation)):
            for sav in sav_lists_permutation[i]:
                print >> hout, '\n'.join([
                    str(i) + '\t' + x
                    for x in sav.print_records(sv_mode=args.sv, with_fdr=False)
                ])

    if args.debug == False:

        subprocess.call(["rm", "-rf", args.output_prefix + ".mut_merged.vcf"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".sv_merged.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".SJ_merged.txt"])
        subprocess.call(
            ["rm", "-rf", args.output_prefix + ".SJ_merged.annot.txt"])
        subprocess.call(
            ["rm", "-rf", args.output_prefix + ".SJ_merged.associate.txt"])
        subprocess.call(["rm", "-rf", args.output_prefix + ".IR_merged.txt"])
        subprocess.call(
            ["rm", "-rf", args.output_prefix + ".IR_merged.associate.txt"])
        subprocess.call(
            ["rm", "-rf", args.output_prefix + ".chimera_merged.txt"])
        subprocess.call([
            "rm", "-rf", args.output_prefix + ".chimera_merged.associate.txt"
        ])
        subprocess.call(
            ["rm", "-rf", args.output_prefix + ".splicing.associate.txt"])
        subprocess.call([
            "rm", "-rf",
            args.output_prefix + ".splicing_mutation.network.pickles"
        ])