for file_name in filenames: if self._matches_exclude(file_name): self.info("Excluding {} from virus scan".format(file_name)) else: self.info('Copying {} to {}'.format(file_name,self.dest_dir)) self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name)) self._scan_files() self.info('Emptying {}'.format(self.dest_dir)) self.rmtree(self.dest_dir) def _scan_files(self): """Scan the files we've collected. We do the download and scan concurrently to make it easier to have a coherent log afterwards. Uses the venv python.""" external_tools_path = os.path.join( os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools') self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'), '-j{}'.format(self.config['scan_parallelization']), 'clamscan', '--no-summary', '--', self.dest_dir]) def _matches_exclude(self, keyname): return any(re.search(exclude, keyname) for exclude in self.excludes) def mime_fix(self): """ Add mimetypes for custom extensions """ mimetypes.init() map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items()) if __name__ == '__main__': beet_mover = BeetMover(pop_aws_auth_from_env()) beet_mover.run_and_exit()
source, destination = item self.info("Copying {} to {}".format(source, destination)) return retry(bucket.copy_key, args=(destination, self.config["bucket_name"], source), sleeptime=5, max_sleeptime=60, retry_exceptions=(S3CopyError, S3ResponseError)) def find_release_files(): candidates_prefix = self._get_candidates_prefix() release_prefix = self._get_releases_prefix() self.info("Getting key names from candidates") for key in bucket.list(prefix=candidates_prefix): keyname = key.name if self._matches_exclude(keyname): self.debug("Excluding {}".format(keyname)) else: destination = keyname.replace(candidates_prefix, release_prefix) yield (keyname, destination) pool = ThreadPool(self.config["parallelization"]) pool.map(worker, find_release_files()) if __name__ == "__main__": myScript = ReleasePusher(pop_aws_auth_from_env()) myScript.run_and_exit()
self.warning( "{} already exists with the same content ({}), skipping copy".format( destination, dest_key.etag)) else: self.fatal( "{} already exists with the different content (src: {}, dest: {}), aborting".format( destination, source_key.etag, dest_key.etag)) return retry(copy_key, sleeptime=5, max_sleeptime=60, retry_exceptions=(S3CopyError, S3ResponseError)) def find_release_files(): candidates_prefix = self._get_candidates_prefix() release_prefix = self._get_releases_prefix() self.info("Getting key names from candidates") for key in bucket.list(prefix=candidates_prefix): keyname = key.name if self._matches_exclude(keyname): self.debug("Excluding {}".format(keyname)) else: destination = keyname.replace(candidates_prefix, release_prefix) yield (keyname, destination) pool = ThreadPool(self.config["parallelization"]) pool.map(worker, find_release_files()) if __name__ == "__main__": myScript = ReleasePusher(pop_aws_auth_from_env()) myScript.run_and_exit()