def test_S3_communication(self): 'test downloading/uploading from/to S3' from utils import s3_upload_file from utils import s3_download_file # Make sure the backup file still exists current_dir = os.path.dirname(os.path.realpath(__file__)) backup_file = '%s/adsabs_consul_kv.2015-10-21.json' % current_dir backup_copy = '%s/test_backup.json' % current_dir # make a copy to test shutil.copyfile(backup_file, backup_copy) self.assertTrue(os.path.exists(backup_file)) with mock_s3(): # Create the mocked S3 session object s3 = boto3.resource('s3') # See to it that the expected S3 bucket exists s3.create_bucket(Bucket=S3_bucket) # Upload the backup file to the mocked S3 s3_upload_file(s3, backup_copy, S3_bucket) # Is the file in the bucket bucket_contents = [o.key for o in s3.Bucket(S3_bucket).objects.all()] # Is it what we expect? expected_contents = [os.path.basename(backup_copy)] self.assertEqual(bucket_contents, expected_contents) # Now check if we can download the file os.remove(backup_copy) # It really is no longer there self.assertFalse(os.path.exists(backup_copy)) # Download the file from mocked S3 s3_download_file(s3, backup_copy, S3_bucket) # The file should be back self.assertTrue(os.path.exists(backup_copy)) # and be the same as the original self.assertTrue(filecmp.cmp(backup_file, backup_copy, shallow=False)) # Finally, remove the copy os.remove(backup_copy)
def test_S3_communication(self): 'test downloading/uploading from/to S3' from utils import s3_upload_file from utils import s3_download_file # Make sure the backup file still exists current_dir = os.path.dirname(os.path.realpath(__file__)) backup_file = '%s/adsabs_consul_kv.2015-10-21.json' % current_dir backup_copy = '%s/test_backup.json' % current_dir # make a copy to test shutil.copyfile(backup_file, backup_copy) self.assertTrue(os.path.exists(backup_file)) with mock_s3(): # Create the mocked S3 session object s3 = boto3.resource('s3') # See to it that the expected S3 bucket exists s3.create_bucket(Bucket=S3_bucket) # Upload the backup file to the mocked S3 s3_upload_file(s3, backup_copy, S3_bucket) # Is the file in the bucket bucket_contents = [ o.key for o in s3.Bucket(S3_bucket).objects.all() ] # Is it what we expect? expected_contents = [os.path.basename(backup_copy)] self.assertEqual(bucket_contents, expected_contents) # Now check if we can download the file os.remove(backup_copy) # It really is no longer there self.assertFalse(os.path.exists(backup_copy)) # Download the file from mocked S3 s3_download_file(s3, backup_copy, S3_bucket) # The file should be back self.assertTrue(os.path.exists(backup_copy)) # and be the same as the original self.assertTrue( filecmp.cmp(backup_file, backup_copy, shallow=False)) # Finally, remove the copy os.remove(backup_copy)
try: records = get_records_from_consul(session) except Exception, e: logging.error('Unable to retrieve records from Consul store: %s'%e) sys.exit(2) # Write the records to the backup file try: save_records(records, backup_file) except Exception, e: logging.error('Unable to write to backup file: %s (%s)'%(backup_file,e)) sys.exit(2) logging.info('Backup was written to: %s'%backup_file) # Now copy the backup to S3 s3 = get_s3_resource() try: s3_upload_file(s3, backup_file, backup_folder) except Exception, e: logging.error('Unable to move backup to S3: %s'%e) # Finally, remove the local copy logging.info('Removing local copy of backup file: %s' % backup_file) os.remove(backup_file) elif action == 'restore': # Construct the name of the backup file to retrieve fname = os.environ.get('BACKUP_FILE','adsabs_consul_kv') backup_file = '%s/%s.%s.json' % (tmp_dir,fname,restore_id) # Get the file from S3 s3 = get_s3_resource() try: s3_download_file(s3, backup_file, backup_folder) except Exception, e: logging.error('Unable to get backup file %s from S3: %s'%(backup_file,e))
'python3', os.path.join(thispath, 'trim_transcript.py'), tmpdir + fielddat['transcript'], '-ss', str(args.trimstart), '-te', str(args.trimend) ]) out_file = fielddat['transcript'][:-len('.json')] + '_trimmed.json' out_path = f'{tmpdir}/{out_file}' assert os.path.isfile(out_path), out_path getorpostcontent( 'transcript', fielddat['transcript'][:-len('.json')] + '_trimmed.json') full_key = f'{univ2lect_key}/{out_file}' s3_upload_file(bucket, full_key, out_path) else: print( "====================================== warning: no transcript for " + str(univ2lect)) else: print('skipping transcript') if TRIM_NOTES is True: if len(fielddat['time_block']) > 0: assert fielddat['time_block'].count('/') == 1, str( fielddat['time_block']) fielddat[ 'time_block'], timeblockmetajson = fielddat['time_block'].split( '/') # split "TB1/meta.json" --> "TB1", "meta.json" in_folder = fielddat['time_block']
try: records = get_records_from_consul(session) except Exception, e: logging.error('Unable to retrieve records from Consul store: %s'%e) sys.exit(2) # Write the records to the backup file try: save_records(records, backup_file) except Exception, e: logging.error('Unable to write to backup file: %s (%s)'%(backup_file,e)) sys.exit(2) logging.info('Backup was written to: %s'%backup_file) # Now copy the backup to S3 s3 = get_s3_resource() try: s3_upload_file(s3, backup_file, backup_folder) except Exception, e: logging.error('Unable to move backup to S3: %s'%e) # Finally, remove the local copy logging.info('Removing local copy of backup file: %s' % backup_file) os.remove(backup_file) elif action == 'restore': # Construct the name of the backup file to retrieve backup_file = '%s/adsabs_consul_kv.%s.json' % (tmp_dir,restore_id) # Get the file from S3 s3 = get_s3_resource() try: s3_download_file(s3, backup_file, backup_folder) except Exception, e: logging.error('Unable to get backup file %s from S3: %s'%(backup_file,e)) sys.exit(2)