Ejemplo n.º 1
0
 def test_async_fail(self, mock_makedirs, mock_copytree, mock_check_async):
     ret = storage_service_upload.main(
         ss_url=SS_URL,
         ss_user=SS_USER_NAME,
         ss_api_key=SS_API_KEY,
         pipeline_uuid=PIPELINE_UUID,
         cp_location_uuid=CP_LOCATION_UUID,
         ds_location_uuid=DS_LOCATION_UUID,
         shared_directory=SHARED_DIRECTORY,
         dip_path=DIP_PATH,
         aip_uuid=AIP_UUID,
         delete_local_copy=True,
     )
     assert ret == 4
Ejemplo n.º 2
0
 def test_success(self, mock_makedirs, mock_copytree, mock_rmtree):
     ret = storage_service_upload.main(
         ss_url=SS_URL,
         ss_user=SS_USER_NAME,
         ss_api_key=SS_API_KEY,
         pipeline_uuid=PIPELINE_UUID,
         cp_location_uuid=CP_LOCATION_UUID,
         ds_location_uuid=DS_LOCATION_UUID,
         shared_directory=SHARED_DIRECTORY,
         dip_path=DIP_PATH,
         aip_uuid=AIP_UUID,
         delete_local_copy=True,
     )
     assert ret == 0
     upload_dip_path = os.path.join(
         SHARED_DIRECTORY,
         "watchedDirectories",
         "automationToolsDIPs",
         os.path.basename(DIP_PATH),
     )
     mock_rmtree.assert_has_calls(
         [mock.call(upload_dip_path),
          mock.call(DIP_PATH)])
Ejemplo n.º 3
0
def main(
    ss_url,
    ss_user,
    ss_api_key,
    location_uuid,
    tmp_dir,
    output_dir,
    database_file,
    delete_local_copy,
    upload_type,
    pipeline_uuid,
    cp_location_uuid,
    ds_location_uuid,
    shared_directory,
    atom_url,
    atom_email,
    atom_password,
    atom_slug,
    rsync_target,
):
    LOGGER.info("Processing AIPs in SS location: %s", location_uuid)

    # Idempotently create database and Aip table and create session
    try:
        session = models.init(database_file)
    except IOError:
        LOGGER.error("Could not create database in: %s", database_file)
        return 1

    # Get UPLOADED and VERIFIED AIPs from the SS
    try:
        am_client = amclient.AMClient(
            ss_url=ss_url, ss_user_name=ss_user, ss_api_key=ss_api_key
        )
        # There is an issue in the SS API that avoids
        # filtering the results by location. See:
        # https://github.com/artefactual/archivematica-storage-service/issues/298
        aips = am_client.aips({"status__in": "UPLOADED,VERIFIED"})
    except Exception as e:
        LOGGER.error(e)
        return 2

    # Get only AIPs from the specified location
    aip_uuids = filter_aips(aips, location_uuid)

    # Create DIPs for those AIPs
    for uuid in aip_uuids:
        try:
            # To avoid race conditions while checking for an existing AIP
            # and saving it, create the row directly and check for an
            # integrity error exception (the uuid is a unique column)
            db_aip = models.Aip(uuid=uuid)
            session.add(db_aip)
            session.commit()
        except exc.IntegrityError:
            session.rollback()
            LOGGER.debug("Skipping AIP (already processed/processing): %s", uuid)
            continue

        mets_type = "atom"
        if upload_type == "ss-upload":
            mets_type = "storage-service"

        dip_path = create_dip.main(
            ss_url=ss_url,
            ss_user=ss_user,
            ss_api_key=ss_api_key,
            aip_uuid=uuid,
            tmp_dir=tmp_dir,
            output_dir=output_dir,
            mets_type=mets_type,
        )

        # Do not try upload on creation error
        if type(dip_path) == int:
            LOGGER.error("Could not create DIP from AIP: %s", uuid)
            continue

        if upload_type == "ss-upload":
            storage_service_upload.main(
                ss_url=ss_url,
                ss_user=ss_user,
                ss_api_key=ss_api_key,
                pipeline_uuid=pipeline_uuid,
                cp_location_uuid=cp_location_uuid,
                ds_location_uuid=ds_location_uuid,
                shared_directory=shared_directory,
                dip_path=dip_path,
                aip_uuid=uuid,
                delete_local_copy=delete_local_copy,
            )
        elif upload_type == "atom-upload":
            atom_upload.main(
                atom_url=atom_url,
                atom_email=atom_email,
                atom_password=atom_password,
                atom_slug=atom_slug,
                rsync_target=rsync_target,
                dip_path=dip_path,
                delete_local_copy=delete_local_copy,
            )

    LOGGER.info("All AIPs have been processed")