def find_experiment_export(app_id): """Attempt to find a zipped export of an experiment with the ID provided and return its path. Returns None if not found. Search order: 1. local "data" subdirectory 2. user S3 bucket 3. Dallinger S3 bucket """ # Check locally first cwd = os.getcwd() data_filename = '{}-data.zip'.format(app_id) path_to_data = os.path.join(cwd, "data", data_filename) if os.path.exists(path_to_data): try: Data(path_to_data) except IOError: from dallinger import logger logger.exception( "Error reading local data file {}, checking remote.".format( path_to_data)) else: return path_to_data # Get remote file instead path_to_data = os.path.join(tempfile.mkdtemp(), data_filename) buckets = [ user_s3_bucket(), dallinger_s3_bucket(), ] for bucket in buckets: k = Key(bucket) k.key = data_filename try: k.get_contents_to_filename(path_to_data) except boto.exception.S3ResponseError: pass else: return path_to_data
def find_experiment_export(app_id): """Attempt to find a zipped export of an experiment with the ID provided and return its path. Returns None if not found. Search order: 1. local "data" subdirectory 2. user S3 bucket 3. Dallinger S3 bucket """ # Check locally first cwd = os.getcwd() data_filename = "{}-data.zip".format(app_id) path_to_data = os.path.join(cwd, "data", data_filename) if os.path.exists(path_to_data): try: Data(path_to_data) except IOError: from dallinger import logger logger.exception( "Error reading local data file {}, checking remote.".format( path_to_data)) else: return path_to_data # Get remote file instead path_to_data = os.path.join(tempfile.mkdtemp(), data_filename) buckets = [user_s3_bucket(), dallinger_s3_bucket()] for bucket in buckets: if bucket is None: continue try: bucket.download_file(data_filename, path_to_data) except botocore.exceptions.ClientError: pass else: return path_to_data