def run(self): common.quiet_cmd([ 'aws', '--cli-read-timeout=3600', '--profile=' + config.aws_profile(), 's3', 'sync', self.staging_bucket, self.bucket ]) common.quiet_cmd([ 'aws', '--cli-read-timeout=3600', '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir ]) with open(self.flag_file(), 'w') as out_f: out_f.write('')
def _run(self): common.cmd(['mkdir', '-p', SEROLOGY_TEST_SYNC_DIR]) common.cmd(['aws', '--profile=' + config.aws_profile(), 's3', 'sync', SEROLOGY_TEST_BUCKET, SEROLOGY_TEST_SYNC_DIR])
def _run(self): common.cmd(['mkdir', '-p', CF_ACCESS_LOGS_DIR]) common.cmd(['aws', '--profile=' + config.aws_profile(), 's3', 'sync', CF_ACCESS_LOGS_BUCKET, CF_ACCESS_LOGS_DIR])
def run(self): common.cmd([ 'aws', '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir ]) with open(self.flag_file(), 'w') as out_f: out_f.write('')
def _run(self): common.cmd(['mkdir', '-p', self.local_dir]) common.cmd(['aws', '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir])
def run(self): common.cmd([self.aws, '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir]) with open(self.flag_file(), 'w') as out_f: out_f.write('')
def run(self): common.cmd(['aws', '--cli-read-timeout=3600', '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir]) with open(self.flag_file(), 'w') as out_f: out_f.write('')
def _run(self): sync_path = FILES_DIR target_bucket = 's3://%s/%s/' % (self.download_bucket, self.date_str) s3_cmd = [ 'aws', '--profile', config.aws_profile(), 's3', 'sync', sync_path, target_bucket, '--exclude "*"', '--include "*.zip"', '--include "*schema.json"' ] common.shell_cmd_quiet(' '.join(s3_cmd))
def run(self): sync_path = join(BASE_DIR, self.date_str) target_bucket = 's3://%s/%s/' % (self.download_bucket, self.date_str) s3_cmd = [ 'aws', '--profile', config.aws_profile(), 's3', 'sync', sync_path, target_bucket, '--exclude "*"', '--include "*.zip"', '--include "*schema.json"' ] common.shell_cmd(' '.join(s3_cmd)) common.shell_cmd('touch %s', self.output().path)
def _run(self): common.cmd(['mkdir', '-p', S3_ACCESS_LOGS_DIR]) common.cmd([ 'aws', '--profile=' + config.aws_profile(), 's3', 'sync', S3_ACCESS_LOGS_BUCKET, S3_ACCESS_LOGS_DIR ]) # Cut off S3 access logs at the point when CF logs became available for file in glob.glob(join(S3_ACCESS_LOGS_DIR, '*')): if arrow.get(os.path.split(file)[1][:10]) > S3_ACCESS_LOGS_CUTOFF: os.remove(file)
def run(self): sync_path = join(BASE_DIR, self.date_str) target_bucket = S3_BASE_BUCKET + '%s/' % self.date_str for data_path in self.output(): s3_cmd = [ 'aws', '--profile', config.aws_profile(), 's3', 'sync', sync_path, target_bucket, '--exclude "*"', '--include "*.zip"', '--include "*schema.json"' ] common.shell_cmd(' '.join(s3_cmd)) common.shell_cmd('touch %s', data_path.path)
def run(self): sync_path = join(BASE_DIR, self.date_str) target_bucket = 's3://%s/%s/' % (self.download_bucket, self.date_str) s3_cmd = [ 'aws', '--profile', config.aws_profile(), 's3', 'sync', sync_path, target_bucket, '--exclude "*"', '--include "*.zip"', '--include "*schema.json"'] common.shell_cmd(' '.join(s3_cmd)) common.shell_cmd('touch %s', self.output().path)
def _run(self): common.cmd(['mkdir', '-p', self.local_dir]) common.cmd([ 'aws', '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir ])
def run(self): common.quiet_cmd([ self.aws, '--profile=' + config.aws_profile(), 's3', 'sync', self.bucket, self.local_dir ])
def run(self): common.cmd(["aws", "--profile=" + config.aws_profile(), "s3", "sync", self.bucket, self.local_dir]) with open(self.flag_file(), "w") as out_f: out_f.write("")