Exemple #1
0
def upload_images(cli, images, managed_dir, target_dataset, remote_conn):
    uploaded_image_ids = []
    uploaded_image_id = ''
    for image in images:
        print("Processing image: ID %s: %s" % (image.id, image.getName()))
        desc = image.getDescription()
        print "Description: ", desc
        temp_file = NamedTemporaryFile().name
        # TODO haven't tested an image with multiple files -see fileset.
        for f in image.getImportedImageFiles():
            file_loc = os.path.join(managed_dir, f.path, f.name)
            # This temp_file is a work around to get hold of the id of uploaded
            # images from stdout.
            with open(temp_file, 'wr') as tf, stdout_redirected(tf):
                if target_dataset:
                    cli.onecmd([
                        "import", file_loc, '-T', target_dataset,
                        '--description', desc
                    ])
                else:
                    cli.onecmd(["import", file_loc, '--description', desc])

            with open(temp_file, 'r') as tf:
                txt = tf.readline()
                # assert txt.startswith("Image:")
                uploaded_image_id = re.findall(r'\d+', txt)[0]
        uploaded_image_ids.append(uploaded_image_id)

        # TODO check what happens when an image has multiple files.
        remote_image = remote_conn.getObject("Image", uploaded_image_id)
        add_attachments(image, remote_image, remote_conn)

    print "ids are: ", uploaded_image_ids
    return uploaded_image_ids
def create_tag(cli, tag_name, tag_desc):
    label_cmd = 'textValue=' + tag_name
    desc_cmd = "description=" + tag_desc

    temp_file = NamedTemporaryFile().name
    # This temp_file is a work around to get hold of the id of uploaded
    # datasets from stdout.
    with open(temp_file, 'w+') as tf, stdout_redirected(tf):
        cli.onecmd(["obj", "new", "TagAnnotation", label_cmd, desc_cmd])

    with open(temp_file, 'r') as tf:
        txt = tf.readline()
        tag_id = re.findall(r'\d+', txt)[0]
    print(":".join(["uploaded tag ", tag_id]))

    return tag_id
def create_dataset(cli, dataset_name, dataset_desc):
    name_cmd = 'name=' + dataset_name
    desc_cmd = "description=" + dataset_desc

    temp_file = NamedTemporaryFile().name
    # This temp_file is a work around to get hold of the id of uploaded
    # datasets from stdout.
    with open(temp_file, 'w+') as tf, stdout_redirected(tf):
        cli.onecmd(["obj", "new", "Dataset", name_cmd, desc_cmd])

    with open(temp_file, 'r') as tf:
        txt = tf.readline()
        dataset_id = re.findall(r'\d+', txt)[0]
    print(":".join(["uploaded dataset ", dataset_id]))

    return dataset_id
Exemple #4
0
def upload_dataset(cli, ds, remote_conn, local_conn):
    temp_file = NamedTemporaryFile().name
    # This temp_file is a work around to get hold of the id of uploaded
    # datasets from stdout.

    name_cmd = 'name=' + ds.getName()
    desc_cmd = "description=" + ds.getDescription()
    with open(temp_file, 'w+') as tf, stdout_redirected(tf):
        # bin/omero obj new Dataset name='new_dataset'
        cli.onecmd(["obj", "new", "Dataset", name_cmd, desc_cmd])

    with open(temp_file, 'r') as tf:
        txt = tf.readline()
        uploaded_dataset_id = re.findall(r'\d+', txt)[0]
    print("uploaded dataset ", uploaded_dataset_id)
    remote_ds = remote_conn.getObject("Dataset", uploaded_dataset_id)
    # TODO add description and tags for dataset
    add_attachments(ds, remote_ds, remote_conn, local_conn)
    return uploaded_dataset_id
Exemple #5
0
def upload_images(cli,
                  images,
                  managed_dir,
                  target_dataset,
                  remote_conn,
                  local_conn,
                  remote_ds=None,
                  image_format="Hypercube"):
    uploaded_image_ids = []
    uploaded_image_id = ''

    for image in images:
        print(("Processing image: ID %s: %s" % (image.id, image.getName())))
        desc = image.getDescription()
        print("Description: ", desc)
        temp_file = NamedTemporaryFile().name

        if len(list(image.getImportedImageFiles())) > 0:
            # TODO haven't tested an image with multiple files -see fileset.
            for f in image.getImportedImageFiles():
                file_loc = os.path.join(managed_dir, f.path, f.name)
                # This temp_file is a work around to get hold of the id of uploaded
                # images from stdout.
                with open(temp_file, 'w+') as tf, stdout_redirected(tf):
                    if target_dataset:
                        cli.onecmd([
                            "import", file_loc, '-T', target_dataset,
                            '--description', desc, '--no-upgrade-check'
                        ])
                    else:
                        cli.onecmd([
                            "import", file_loc, '--description', desc,
                            '--no-upgrade-check'
                        ])

                with open(temp_file, 'r') as tf:
                    txt = tf.readline()
                    # assert txt.startswith("Image:")
                    uploaded_image_id = re.findall(r'\d+', txt)[0]
        else:
            ids = []

            # no imported images, so must generate the planes/channels instead
            pixels = image.getPrimaryPixels()
            if (pixels.getId() in ids):
                continue
            ids.append(pixels.getId())

            if image_format == 'Hypercube':
                uploaded_image_id = handle_pixels(local_conn, remote_conn,
                                                  images, image, managed_dir,
                                                  pixels, remote_ds,
                                                  image_format)
            else:
                file_loc = handle_pixels(local_conn, remote_conn, images,
                                         image, managed_dir, pixels, remote_ds,
                                         image_format)

                # write log for exported images (not needed for ome-tiff)
                name = 'Batch_Image_Export.txt'
                with open(os.path.join(managed_dir, name), 'w') as log_file:
                    for s in log_strings:
                        log_file.write(s)
                        log_file.write("\n")

                # This temp_file is a work around to get hold of the id of uploaded
                # images from stdout.
                with open(temp_file, 'w+') as tf, stdout_redirected(tf):
                    if target_dataset:
                        cli.onecmd([
                            "import", file_loc, '-T', target_dataset,
                            '--description', desc, '--no-upgrade-check'
                        ])
                    else:
                        cli.onecmd([
                            "import", file_loc, '--description', desc,
                            '--no-upgrade-check'
                        ])

                with open(temp_file, 'r') as tf:
                    txt = tf.readline()
                    # assert txt.startswith("Image:")
                    uploaded_image_id = re.findall(r'\d+', txt)[0]

        uploaded_image_ids.append(uploaded_image_id)

        # TODO check what happens when an image has multiple files.
        remote_image = remote_conn.getObject("Image", uploaded_image_id)
        add_channel_labels(image, remote_image, remote_conn)
        add_attachments(image, remote_image, remote_conn, local_conn,
                        remote_ds)

    print("ids are: ", uploaded_image_ids)
    return uploaded_image_ids
Exemple #6
0
def do_upload():

    call(["ls", "-l", DATA_PATH])

    dataset_id, full_dataset_name = None, None
    cur_subdir = None

    for subdir, dirs, files in os.walk(DATA_PATH):

        if subdir != cur_subdir:
            cur_subdir = subdir
            # it's a new subdirectory, therefore new dataset
            dataset_id, full_dataset_name = None, None

            # Check if the current sub directory has already been successfully uploaded
            upload_status = check_file_path_status(cur_subdir)

            if upload_status == UploadStatus.SUCCESS:
                print("skipping")
                continue

        for file in files:
            if file.endswith(tuple(PERMITTED_FILE_EXTS)):
                print file
                filepath = os.path.join(subdir, file)

                upload_status = check_file_path_status(filepath)

                if upload_status == UploadStatus.SUCCESS:
                    print("Status: SUCCESS!")
                    print(upload_status)
                    continue
                elif upload_status == UploadStatus.NOT_PRESENT:
                    # Set the status to 'FAILED' by default
                    print(upload_status)
                    update_status(filepath, subdir, None, None)

                print(filepath)
                path_parts = subdir.split(os.sep)
                path_parts_len = len(path_parts)
                strain = path_parts[path_parts_len - 1]

                dataset_name = path_parts[path_parts_len - 2]
                figure = path_parts[path_parts_len - 3]

                full_dataset_name = "_".join([figure, dataset_name,
                                              strain]).replace(" ", "")
                logging.debug(full_dataset_name)

                if dataset_id is None:
                    try:
                        # Connect to remote omero
                        c, cli, remote_conn = connect_to_remote(
                            OMERO_PASSWORD, OMERO_USER)

                        dataset_desc = "A dataset"
                        full_dataset_name = "_".join(
                            [figure, dataset_name, strain]).replace(" ", "")
                        logging.debug(full_dataset_name)

                        name_cmd = 'name=' + full_dataset_name
                        desc_cmd = "description=" + dataset_desc

                        temp_file = NamedTemporaryFile().name
                        # This temp_file is a work around to get hold of the id of uploaded
                        # datasets from stdout.
                        with open(temp_file,
                                  'w+') as tf, stdout_redirected(tf):
                            cli.onecmd(
                                ["obj", "new", "Dataset", name_cmd, desc_cmd])

                        with open(temp_file, 'r') as tf:
                            txt = tf.readline()
                            dataset_id = re.findall(r'\d+', txt)[0]
                        logging.info(":".join(
                            ["uploaded dataset ", dataset_id]))
                        remote_ds = remote_conn.getObject(
                            "Dataset", rlong(dataset_id))
                        logging.debug(remote_ds.getId())
                    finally:
                        close_remote_connection(c, cli, remote_conn)

                    logging.debug(file)
                    filepath = os.path.join(subdir, file)
                    logging.debug(filepath)

                if dataset_id is not None:
                    image_ids = None
                    try:
                        # Connect to remote omero
                        c, cli, remote_conn = connect_to_remote(
                            OMERO_PASSWORD, OMERO_USER)

                        # This temp_file is a work around to get hold of the id of uploaded
                        # images from stdout.
                        image_desc = "an image"
                        target_dataset = ":".join(
                            ["Dataset", "id", dataset_id])
                        # target_dataset = ":".join(["Dataset", "name", full_dataset_name])
                        if filepath:
                            #cli.onecmd(["import", filepath, '-T', target_dataset, "-g", OMERO_GROUP,
                            #            '--description', image_desc, '--no-upgrade-check'])
                            try:
                                image_ids = import_image(
                                    filepath, remote_conn, dataset_id,
                                    c.getSessionId())
                            except Exception as e:
                                print cur_subdir
                            logging.debug(image_ids)

                            update_status(filepath, subdir, image_ids,
                                          remote_conn)
                    except Exception as e:
                        print(e)
                    finally:
                        close_remote_connection(c, cli, remote_conn)