Esempio n. 1
0
    def handle(self, *args, **options):
        if options['file'] is None:
            logger.error("You must specify a file.")
            return "1"

        if 's3://' in options["file"]:
            bucket, key = parse_s3_url(options["file"])
            s3 = boto3.resource('s3')
            try:
                filepath = "/tmp/input_" + str(uuid.uuid4()) + ".txt"
                s3.Bucket(bucket).download_file(key, filepath)
            except botocore.exceptions.ClientError as e:
                if e.response['Error']['Code'] == "404":
                    logger.error("The remote file does not exist.")
                    raise
                else:
                    raise
        else:
            filepath = options["file"]

        with open(filepath) as accession_file:
            for i, accession in enumerate(accession_file):
                if i < options["offset"]:
                    continue
                accession = accession.strip()
                try:
                    queue_surveyor_for_accession(accession)
                except Exception as e:
                    logger.exception(e)
    def handle(self, *args, **options):
        if options["file"] is None:
            logger.error("You must specify a file.")
            return "1"

        if "s3://" in options["file"]:
            bucket, key = parse_s3_url(options["file"])
            s3 = boto3.resource("s3")
            try:
                filepath = "/tmp/input_" + str(uuid.uuid4()) + ".txt"
                s3.Bucket(bucket).download_file(key, filepath)
            except botocore.exceptions.ClientError as e:
                if e.response["Error"]["Code"] == "404":
                    logger.error("The remote file does not exist.")
                    raise
                else:
                    raise
        else:
            filepath = options["file"]

        with open(filepath) as accession_file:
            for i, accession in enumerate(accession_file):
                if i < options["offset"]:
                    continue
                accession = accession.strip()
                try:
                    logger.info(f"Queuing surveyor job for {accession}.")
                    queue_surveyor_for_accession(accession)

                    # Sleep for 30 seconds so all surveyor jobs don't
                    # start at the exact same time and overload the
                    # database or ENA.
                    time.sleep(30)
                except Exception as e:
                    logger.exception(e)
Esempio n. 3
0
    def handle(self, *args, **options):
        if options["accession"] is None and options['file'] is None:
            logger.error("You must specify an experiment accession or file.")
            sys.exit(1)

        if not options["force"]:
            print(
                '-------------------------------------------------------------------------------'
            )
            print(
                'This will delete all objects in the database related to these accessions.'
                ' Are you sure you want to do this?')
            answer = input(
                'You must type "yes", all other input will be ignored: ')

            if answer != "yes":
                print('Not unsurveying because confirmation was denied.')
                sys.exit(1)

        accessions = []
        if options["file"]:
            if 's3://' in options["file"]:
                bucket, key = parse_s3_url(options["file"])
                s3 = boto3.resource('s3')
                try:
                    filepath = "/tmp/input_" + str(uuid.uuid4()) + ".txt"
                    s3.Bucket(bucket).download_file(key, filepath)
                except botocore.exceptions.ClientError as e:
                    if e.response['Error']['Code'] == "404":
                        logger.error("The remote file does not exist.")
                        raise
                    else:
                        raise
            else:
                filepath = options["file"]

            with open(filepath) as file:
                for accession in file:
                    accessions.append(accession.strip())
        else:
            accessions.append(options['accession'])

        for accession in accessions:
            logger.info("Purging Experiment with accession: %s", accession)
            try:
                purge_experiment(accession)
            except Exception as e:
                logger.exception(
                    "Exception caught while purging experiment with accession: %s",
                    accession)
Esempio n. 4
0
    def handle(self, *args, **options):
        if options["file"] is None and options[
                "accession"] is None and options["job_id"] is None:
            logger.error("You must specify an accession or file or job ID.")
            return "1"

        if options["file"]:
            if "s3://" in options["file"]:
                bucket, key = parse_s3_url(options["file"])
                s3 = boto3.resource("s3")
                try:
                    filepath = "/tmp/input_" + str(uuid.uuid4()) + ".txt"
                    s3.Bucket(bucket).download_file(key, filepath)
                except botocore.exceptions.ClientError as e:
                    if e.response["Error"]["Code"] == "404":
                        logger.error("The remote file does not exist.")
                        raise
                    else:
                        raise
            else:
                filepath = options["file"]
            with open(filepath) as accession_file:
                for i, accession in enumerate(accession_file):
                    if i < options["offset"]:
                        continue
                    accession = accession.strip()
                    try:
                        run_surveyor_for_accession(accession)
                    except Exception as e:
                        logger.exception(e)

        if options["accession"]:
            accession = options["accession"]
            try:
                run_surveyor_for_accession(accession)
            except Exception as e:
                logger.exception(e)

        if options["job_id"]:
            job_id = options["job_id"]
            try:
                survey_job = SurveyJob.objects.get(id=job_id)
                surveyor.run_job(survey_job)
            except Exception as e:
                logger.exception(e)
    def handle(self, *args, **options):
        okay = True
        if options["file"] is None:
            logger.error("You must specify a file to import metadata from")
            okay = False
        if options["source_name"] is None:
            logger.error("You must specify a source name")
            okay = False
        if options["methods_url"] is None:
            logger.error("You must specify a methods url")
            okay = False
        if not okay:
            sys.exit(1)

        if "s3://" in options["file"]:
            bucket, key = parse_s3_url(options["file"])
            s3 = boto3.resource("s3")
            try:
                filepath = "/tmp/keyword_" + str(uuid.uuid4()) + ".json"
                s3.Bucket(bucket).download_file(key, filepath)
            except botocore.exceptions.ClientError as e:
                if e.response["Error"]["Code"] == "404":
                    logger.error("The remote file does not exist.")
                    raise
                else:
                    raise
        else:
            filepath = options["file"]

        with open(filepath) as file:
            keywords = json.load(file)

        if type(keywords) != dict:
            logger.error(
                "The provided keywords file is not a dict with accession code keys"
            )
            sys.exit(1)

        source, _ = Contribution.objects.get_or_create(
            source_name=options["source_name"],
            methods_url=options["methods_url"])

        import_keywords(keywords, source)