def test_class_constructor_only_accepts_py_modules_not_pyc(self): # Create a module with both *.py and *.pyc. self.py_with_pyc('foo.py') # Create another with a *.pyc but no *.py behind it. os.unlink(self.py_with_pyc('bar.py')) # Now: *.py takes precedence over *.pyc ... get = lambda name: os.path.basename(imp.find_module(name)[1]) self.assertTrue(get('foo'), 'foo.py') try: # ... and while *.pyc is importable ... self.assertTrue(get('bar'), 'bar.pyc') except ImportError: # (except on PyPy) # http://doc.pypy.org/en/latest/config/objspace.lonepycfiles.html self.assertEqual(platform.python_implementation(), 'PyPy') else: # ... we refuse it. with self.assertRaises(ValueError) as raised: PythonPackageArchive('bar') msg = raised.exception.args[0] self.assertTrue(msg.startswith('We need a *.py source file instead')) self.assertTrue(msg.endswith('bar.pyc')) # We readily ignore a *.pyc if a *.py exists. archive = PythonPackageArchive('foo') archive.close() self.assertEqual(archive.get_filenames(), ['foo.py']) with archive.get_reader() as reader: self.assertEqual('42', reader.read('foo.py'))
def test_lambda_cross_account(self): self.patch(CrossAccountAccessFilter, "executor_factory", MainThreadExecutor) session_factory = self.replay_flight_data("test_cross_account_lambda") client = session_factory().client("lambda") name = "c7n-cross-check" tmp_dir = tempfile.mkdtemp() self.addCleanup(os.rmdir, tmp_dir) archive = PythonPackageArchive() archive.add_contents("handler.py", LAMBDA_SRC) archive.close() func = LambdaFunction( { "runtime": "python2.7", "name": name, "description": "", "handler": "handler.handler", "memory_size": 128, "timeout": 5, "role": self.role, }, archive, ) manager = LambdaManager(session_factory) manager.publish(func) self.addCleanup(manager.remove, func) client.add_permission( FunctionName=name, StatementId="oops", Principal="*", Action="lambda:InvokeFunction", ) p = self.load_policy( { "name": "lambda-cross", "resource": "lambda", "filters": ["cross-account"], }, session_factory=session_factory, ) resources = p.run() self.assertEqual(len(resources), 1) self.assertEqual(resources[0]["FunctionName"], name)
def make_func(self, **kw): func_data = dict( name='test-foo-bar', handler='index.handler', memory_size=128, timeout=3, role=ROLE, runtime='python2.7', description='test') func_data.update(kw) archive = PythonPackageArchive() archive.add_contents('index.py', '''def handler(*a, **kw):\n print("Greetings, program!")''') archive.close() self.addCleanup(archive.remove) return LambdaFunction(func_data, archive)
def get_function(session_factory, options, groups): config = dict( name='cloud-maid-error-notify', handler='logsub.process_log_event', runtime='python2.7', memory_size=512, timeout=15, role=options.role, description='Maid Error Notify', events=[ CloudWatchLogSubscription( session_factory, groups, options.pattern)]) archive = PythonPackageArchive( # Directory to lambda file os.path.join( os.path.dirname(inspect.getabsfile(c7n)), 'logsub.py'), # Don't include virtualenv deps lib_filter=lambda x, y, z: ([], [])) archive.create() archive.add_contents( 'config.json', json.dumps({ 'topic': options.topic, 'subject': options.subject })) archive.close() return LambdaFunction(config, archive)
def get_function(session_factory, name, role, sns_topic, log_groups, subject="Lambda Error", pattern="Traceback"): """Lambda function provisioning. Self contained within the component, to allow for easier reuse. """ # Lazy import to avoid runtime dependency from c7n.mu import ( LambdaFunction, PythonPackageArchive, CloudWatchLogSubscription) config = dict( name=name, handler='logsub.process_log_event', runtime='python2.7', memory_size=512, timeout=15, role=role, description='Custodian Ops Error Notify', events=[ CloudWatchLogSubscription( session_factory, log_groups, pattern)]) archive = PythonPackageArchive() archive.add_py_file(__file__) archive.add_contents( 'config.json', json.dumps({ 'topic': sns_topic, 'subject': subject })) archive.close() return LambdaFunction(config, archive)
def get_function(session_factory, name, role, events): from c7n.mu import (LambdaFunction, PythonPackageArchive) config = dict( name=name, handler='helloworld.main', runtime='python2.7', memory_size=512, timeout=15, role=role, description='Hello World', events=events) archive = PythonPackageArchive() archive.add_py_file(__file__) archive.close() return LambdaFunction(config, archive)
def make_func(self, **kw): func_data = dict( name="test-foo-bar", handler="index.handler", memory_size=128, timeout=3, role=ROLE, runtime="python2.7", description="test", ) func_data.update(kw) archive = PythonPackageArchive() archive.add_contents( "index.py", """def handler(*a, **kw):\n print("Greetings, program!")""" ) archive.close() self.addCleanup(archive.remove) return LambdaFunction(func_data, archive)
def __init__(self, name, function_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') self.enable_ssl_cert = not distutils.util.strtobool( os.environ.get(ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION, 'no')) if not self.enable_ssl_cert: self.log.warning('SSL Certificate Validation is disabled')
def get_archive(config): archive = PythonPackageArchive( 'c7n_mailer', 'ldap3', 'pyasn1', 'jinja2', 'markupsafe', 'ruamel', 'redis', 'datadog', 'slackclient', 'requests') template_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'msg-templates')) for t in os.listdir(template_dir): with open(os.path.join(template_dir, t)) as fh: archive.add_contents('msg-templates/%s' % t, fh.read()) archive.add_contents('config.json', json.dumps(config)) archive.add_contents('periodic.py', entry_source) archive.close() return archive
def test_lambda_cross_account(self): self.patch( CrossAccountAccessFilter, 'executor_factory', MainThreadExecutor) session_factory = self.replay_flight_data('test_cross_account_lambda') client = session_factory().client('lambda') name = 'c7n-cross-check' tmp_dir = tempfile.mkdtemp() self.addCleanup(os.rmdir, tmp_dir) archive = PythonPackageArchive() archive.add_contents('handler.py', LAMBDA_SRC) archive.close() func = LambdaFunction({ 'runtime': 'python2.7', 'name': name, 'description': '', 'handler': 'handler.handler', 'memory_size': 128, 'timeout': 5, 'role': self.role}, archive) manager = LambdaManager(session_factory) info = manager.publish(func) self.addCleanup(manager.remove, func) client.add_permission( FunctionName=name, StatementId='oops', Principal='*', Action='lambda:InvokeFunction') p = self.load_policy( {'name': 'lambda-cross', 'resource': 'lambda', 'filters': ['cross-account']}, session_factory=session_factory) resources = p.run() self.assertEqual(len(resources), 1) self.assertEqual(resources[0]['FunctionName'], name)
def test_class_constructor_only_accepts_py_modules_not_pyc(self): # Create a module with both *.py and *.pyc. self.py_with_pyc("foo.py") # Create another with a *.pyc but no *.py behind it. os.unlink(self.py_with_pyc("bar.py")) # Now: *.py takes precedence over *.pyc ... def get(name): return os.path.basename(importlib.import_module(name).__file__) self.assertTrue(get("foo"), "foo.py") try: # ... and while *.pyc is importable ... self.assertTrue(get("bar"), "bar.pyc") except ImportError: try: # (except on PyPy) # http://doc.pypy.org/en/latest/config/objspace.lonepycfiles.html self.assertEqual(platform.python_implementation(), "PyPy") except AssertionError: # (... aaaaaand Python 3) self.assertEqual(platform.python_version_tuple()[0], "3") else: # ... we refuse it. with self.assertRaises(ValueError) as raised: PythonPackageArchive("bar") msg = raised.exception.args[0] self.assertTrue(msg.startswith("Could not find a *.py source file")) self.assertTrue(msg.endswith("bar.pyc")) # We readily ignore a *.pyc if a *.py exists. archive = PythonPackageArchive("foo") archive.close() self.assertEqual(archive.get_filenames(), ["foo.py"]) with archive.get_reader() as reader: self.assertEqual(b"42", reader.read("foo.py"))
def get_archive(config): archive = PythonPackageArchive( 'c7n_mailer', # core deps 'jinja2', 'markupsafe', 'ruamel', 'ldap3', 'pyasn1', 'redis', # transport datadog - recursive deps 'datadog', 'simplejson', 'decorator', # transport slack - recursive deps 'slackclient', 'websocket', # requests (recursive deps), needed by datadog and slackclient 'requests', 'urllib3', 'idna', 'chardet', 'certifi') template_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'msg-templates')) for t in os.listdir(template_dir): with open(os.path.join(template_dir, t)) as fh: archive.add_contents('msg-templates/%s' % t, fh.read()) archive.add_contents('config.json', json.dumps(config)) archive.add_contents('periodic.py', entry_source) archive.close() return archive
def create_function(self, client, name): archive = PythonPackageArchive() self.addCleanup(archive.remove) archive.add_contents('index.py', SAMPLE_FUNC) archive.close() lfunc = client.create_function( FunctionName=name, Runtime="python2.7", MemorySize=128, Handler='index.handler', Publish=True, Role='arn:aws:iam::644160558196:role/lambda_basic_execution', Code={'ZipFile': archive.get_bytes()}) self.addCleanup(client.delete_function, FunctionName=name) return lfunc
def get_function(session_factory, name, role, sns_topic, log_groups, subject="Lambda Error", pattern="Traceback"): """Lambda function provisioning. Self contained within the component, to allow for easier reuse. """ # Lazy import to avoid runtime dependency import inspect import os import c7n from c7n.mu import ( LambdaFunction, PythonPackageArchive, CloudWatchLogSubscription) config = dict( name=name, handler='logsub.process_log_event', runtime='python2.7', memory_size=512, timeout=15, role=role, description='Custodian Ops Error Notify', events=[ CloudWatchLogSubscription( session_factory, log_groups, pattern)]) archive = PythonPackageArchive( # Directory to lambda file os.path.join( os.path.dirname(inspect.getabsfile(c7n)), 'ufuncs', 'logsub.py'), # Don't include virtualenv deps lib_filter=lambda x, y, z: ([], [])) archive.create() archive.add_contents( 'config.json', json.dumps({ 'topic': sns_topic, 'subject': subject })) archive.close() return LambdaFunction(config, archive)
def get_function(session_factory, name, handler, runtime, role, log_groups, project, account_name, account_id, sentry_dsn, pattern="Traceback"): """Lambda function provisioning. Self contained within the component, to allow for easier reuse. """ # Lazy import to avoid runtime dependency from c7n.mu import ( LambdaFunction, PythonPackageArchive, CloudWatchLogSubscription) config = dict( name=name, handler=handler, runtime=runtime, memory_size=512, timeout=15, role=role, description='Custodian Sentry Relay', events=[ CloudWatchLogSubscription( session_factory, log_groups, pattern)]) archive = PythonPackageArchive('c7n_sentry') archive.add_contents( 'config.json', json.dumps({ 'project': project, 'account_name': account_name, 'account_id': account_id, 'sentry_dsn': sentry_dsn, })) archive.add_contents( 'handler.py', 'from c7n_sentry.c7nsentry import process_log_event' ) archive.close() return LambdaFunction(config, archive)
class FunctionPackage(object): def __init__(self, name, function_path=None, target_sub_ids=None, cache_override_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = None self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') self.cache_override_path = cache_override_path self.enable_ssl_cert = not distutils.util.strtobool( os.environ.get(ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION, 'no')) if target_sub_ids is not None: self.target_sub_ids = target_sub_ids else: self.target_sub_ids = [None] if not self.enable_ssl_cert: self.log.warning('SSL Certificate Validation is disabled') def _add_functions_required_files(self, policy, queue_name=None): s = local_session(Session) for target_sub_id in self.target_sub_ids: name = self.name + ("_" + target_sub_id if target_sub_id else "") # generate and add auth self.pkg.add_contents(dest=name + '/auth.json', contents=s.get_functions_auth_string(target_sub_id)) self.pkg.add_file(self.function_path, dest=name + '/function.py') self.pkg.add_contents(dest=name + '/__init__.py', contents='') if policy: config_contents = self.get_function_config(policy, queue_name) policy_contents = self._get_policy(policy) self.pkg.add_contents(dest=name + '/function.json', contents=config_contents) self.pkg.add_contents(dest=name + '/config.json', contents=policy_contents) self._add_host_config(policy['mode']['type']) else: self._add_host_config(None) def _add_host_config(self, mode): config = copy.deepcopy(FUNCTION_HOST_CONFIG) if mode == FUNCTION_EVENT_TRIGGER_MODE: config['extensionBundle'] = FUNCTION_EXTENSION_BUNDLE_CONFIG self.pkg.add_contents(dest='host.json', contents=json.dumps(config)) def get_function_config(self, policy, queue_name=None): config = \ { "scriptFile": "function.py", "bindings": [{ "direction": "in" }] } mode_type = policy['mode']['type'] binding = config['bindings'][0] if mode_type == FUNCTION_TIME_TRIGGER_MODE: binding['type'] = 'timerTrigger' binding['name'] = 'input' binding['schedule'] = policy['mode']['schedule'] elif mode_type == FUNCTION_EVENT_TRIGGER_MODE: binding['type'] = 'queueTrigger' binding['connection'] = 'AzureWebJobsStorage' binding['name'] = 'input' binding['queueName'] = queue_name else: self.log.error("Mode not yet supported for Azure functions (%s)" % mode_type) return json.dumps(config, indent=2) def _get_policy(self, policy): return json.dumps({'policies': [policy]}, indent=2) def _update_perms_package(self): os.chmod(self.pkg.path, 0o0644) @property def cache_folder(self): if self.cache_override_path: return self.cache_override_path c7n_azure_root = os.path.dirname(__file__) return os.path.join(c7n_azure_root, 'cache') def build(self, policy, modules, non_binary_packages, excluded_packages, queue_name=None): cache_zip_file = self.build_cache(modules, excluded_packages, non_binary_packages) self.pkg = PythonPackageArchive(cache_file=cache_zip_file) self.pkg.add_modules(None, [m.replace('-', '_') for m in modules]) # add config and policy self._add_functions_required_files(policy, queue_name) def build_cache(self, modules, excluded_packages, non_binary_packages): wheels_folder = os.path.join(self.cache_folder, 'wheels') wheels_install_folder = os.path.join(self.cache_folder, 'dependencies') cache_zip_file = os.path.join(self.cache_folder, 'cache.zip') cache_metadata_file = os.path.join(self.cache_folder, 'metadata.json') packages = DependencyManager.get_dependency_packages_list(modules, excluded_packages) if not DependencyManager.check_cache(cache_metadata_file, cache_zip_file, packages): cache_pkg = PythonPackageArchive() self.log.info("Cached packages not found or requirements were changed.") # If cache check fails, wipe all previous wheels, installations etc if os.path.exists(self.cache_folder): self.log.info("Removing cache folder...") shutil.rmtree(self.cache_folder) self.log.info("Preparing non binary wheels...") DependencyManager.prepare_non_binary_wheels(non_binary_packages, wheels_folder) self.log.info("Downloading wheels...") DependencyManager.download_wheels(packages, wheels_folder) self.log.info("Installing wheels...") DependencyManager.install_wheels(wheels_folder, wheels_install_folder) for root, _, files in os.walk(wheels_install_folder): arc_prefix = os.path.relpath(root, wheels_install_folder) for f in files: dest_path = os.path.join(arc_prefix, f) if f.endswith('.pyc') or f.endswith('.c'): continue f_path = os.path.join(root, f) cache_pkg.add_file(f_path, dest_path) self.log.info('Saving cache zip file...') cache_pkg.close() with open(cache_zip_file, 'wb') as fout: fout.write(cache_pkg.get_stream().read()) self.log.info("Removing temporary folders...") shutil.rmtree(wheels_folder) shutil.rmtree(wheels_install_folder) self.log.info("Updating metadata file...") DependencyManager.create_cache_metadata(cache_metadata_file, cache_zip_file, packages) return cache_zip_file def wait_for_status(self, deployment_creds, retries=10, delay=15): for r in range(retries): if self.status(deployment_creds): return True else: self.log.info('(%s/%s) Will retry Function App status check in %s seconds...' % (r + 1, retries, delay)) time.sleep(delay) return False def status(self, deployment_creds): status_url = '%s/api/deployments' % deployment_creds.scm_uri r = requests.get(status_url, verify=self.enable_ssl_cert) if r.status_code != 200: self.log.error("Application service returned an error.\n%s\n%s" % (r.status_code, r.text)) return False return True def publish(self, deployment_creds): self.close() # update perms of the package self._update_perms_package() zip_api_url = '%s/api/zipdeploy?isAsync=true' % deployment_creds.scm_uri headers = {'content-type': 'application/octet-stream'} self.log.info("Publishing Function package from %s" % self.pkg.path) zip_file = self.pkg.get_bytes() try: r = requests.post(zip_api_url, data=zip_file, headers=headers, timeout=300, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error("Your Function App deployment timed out after 5 minutes. Try again.") r.raise_for_status() self.log.info("Function publish result: %s" % r.status_code) def close(self): self.pkg.close()
def get_archive(config): required = ['ldap', 'jinja2', 'markupsafe'] remove = ['_yaml.so', 'c7n.egg-link'] def lib_filter(root, dirs, files): for f in tuple(files): if f.endswith('.pyo'): files.remove(f) for r in remove: if r in files: files.remove(r) if os.path.basename(root) == 'site-packages': for n in tuple(dirs): if n not in required: dirs.remove(n) return dirs, files archive = PythonPackageArchive( os.path.dirname(__file__), skip='*.pyc', lib_filter=lib_filter) archive.create() template_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'msg-templates')) for t in os.listdir(template_dir): with open(os.path.join(template_dir, t)) as fh: archive.add_contents('msg-templates/%s' % t, fh.read()) archive.add_contents('config.json', json.dumps(config)) archive.add_contents('periodic.py', entry_source) archive.close() return archive
class FunctionPackage(object): def __init__(self, name, function_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') self.enable_ssl_cert = not distutils.util.strtobool( os.environ.get(ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION, 'no')) if not self.enable_ssl_cert: self.log.warning('SSL Certificate Validation is disabled') def _add_functions_required_files(self, policy, queue_name=None): self.pkg.add_file(self.function_path, dest=self.name + '/function.py') self.pkg.add_contents(dest=self.name + '/__init__.py', contents='') self._add_host_config() if policy: config_contents = self.get_function_config(policy, queue_name) policy_contents = self._get_policy(policy) self.pkg.add_contents(dest=self.name + '/function.json', contents=config_contents) self.pkg.add_contents(dest=self.name + '/config.json', contents=policy_contents) if policy['mode']['type'] == FUNCTION_EVENT_TRIGGER_MODE: self._add_queue_binding_extensions() def _add_host_config(self): config = \ { "version": "2.0", "healthMonitor": { "enabled": True, "healthCheckInterval": "00:00:10", "healthCheckWindow": "00:02:00", "healthCheckThreshold": 6, "counterThreshold": 0.80 }, "functionTimeout": "00:05:00", "logging": { "fileLoggingMode": "debugOnly" }, "extensions": { "http": { "routePrefix": "api", "maxConcurrentRequests": 5, "maxOutstandingRequests": 30 } } } self.pkg.add_contents(dest='host.json', contents=json.dumps(config)) def _add_queue_binding_extensions(self): bindings_dir_path = os.path.abspath( os.path.join(os.path.join(__file__, os.pardir), 'function_binding_resources')) bin_path = os.path.join(bindings_dir_path, 'bin') self.pkg.add_directory(bin_path) self.pkg.add_file(os.path.join(bindings_dir_path, 'extensions.csproj')) def get_function_config(self, policy, queue_name=None): config = \ { "scriptFile": "function.py", "bindings": [{ "direction": "in" }] } mode_type = policy['mode']['type'] binding = config['bindings'][0] if mode_type == FUNCTION_TIME_TRIGGER_MODE: binding['type'] = 'timerTrigger' binding['name'] = 'input' binding['schedule'] = policy['mode']['schedule'] elif mode_type == FUNCTION_EVENT_TRIGGER_MODE: binding['type'] = 'queueTrigger' binding['connection'] = 'AzureWebJobsStorage' binding['name'] = 'input' binding['queueName'] = queue_name else: self.log.error("Mode not yet supported for Azure functions (%s)" % mode_type) return json.dumps(config, indent=2) def _get_policy(self, policy): return json.dumps({'policies': [policy]}, indent=2) def _add_cffi_module(self): """CFFI native bits aren't discovered automatically so for now we grab them manually from supported platforms""" self.pkg.add_modules('cffi') # Add native libraries that are missing site_pkg = FunctionPackage._get_site_packages()[0] # linux platform = sys.platform if platform == "linux" or platform == "linux2": for so_file in os.listdir(site_pkg): if fnmatch.fnmatch(so_file, '*ffi*.so*'): self.pkg.add_file(os.path.join(site_pkg, so_file)) self.pkg.add_directory(os.path.join(site_pkg, '.libs_cffi_backend')) # MacOS elif platform == "darwin": raise NotImplementedError( 'Cannot package Azure Function in MacOS host OS, ' 'please use linux.') # Windows elif platform == "win32": raise NotImplementedError( 'Cannot package Azure Function in Windows host OS, ' 'please use linux or WSL.') def _update_perms_package(self): os.chmod(self.pkg.path, 0o0644) def build(self, policy, queue_name=None, entry_point=None, extra_modules=None): # Get dependencies for azure entry point entry_point = entry_point or \ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'entry.py') modules, so_files = FunctionPackage._get_dependencies(entry_point) # add all loaded modules modules.discard('azure') modules = modules.union({ 'c7n', 'c7n_azure', 'pkg_resources', 'knack', 'argcomplete', 'applicationinsights' }) if extra_modules: modules = modules.union(extra_modules) self.pkg.add_modules(None, *modules) # adding azure manually # we need to ignore the __init__.py of the azure namespace for packaging # https://www.python.org/dev/peps/pep-0420/ self.pkg.add_modules(lambda f: f == 'azure/__init__.py', 'azure') # add config and policy self._add_functions_required_files(policy, queue_name) # generate and add auth s = local_session(Session) self.pkg.add_contents(dest=self.name + '/auth.json', contents=s.get_functions_auth_string()) # cffi module needs special handling self._add_cffi_module() def wait_for_status(self, deployment_creds, retries=10, delay=15): for r in range(retries): if self.status(deployment_creds): return True else: self.log.info( '(%s/%s) Will retry Function App status check in %s seconds...' % (r + 1, retries, delay)) time.sleep(delay) return False def status(self, deployment_creds): status_url = '%s/api/deployments' % deployment_creds.scm_uri try: r = requests.get(status_url, timeout=30, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error( "Your Function app is not responding to a status request.") return False if r.status_code != 200: self.log.error("Application service returned an error.\n%s\n%s" % (r.status_code, r.text)) return False return True def publish(self, deployment_creds): self.close() # update perms of the package self._update_perms_package() zip_api_url = '%s/api/zipdeploy?isAsync=true' % deployment_creds.scm_uri self.log.info("Publishing Function package from %s" % self.pkg.path) zip_file = open(self.pkg.path, 'rb').read() try: r = requests.post(zip_api_url, data=zip_file, timeout=300, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error( "Your Function App deployment timed out after 5 minutes. Try again." ) r.raise_for_status() self.log.info("Function publish result: %s" % r.status_code) def close(self): self.pkg.close() @staticmethod def _get_site_packages(): """Returns a list containing all global site-packages directories (and possibly site-python). For each directory present in the global ``PREFIXES``, this function will find its `site-packages` subdirectory depending on the system environment, and will return a list of full paths. """ site_packages = [] seen = set() prefixes = [sys.prefix, sys.exec_prefix] for prefix in prefixes: if not prefix or prefix in seen: continue seen.add(prefix) if sys.platform in ('os2emx', 'riscos'): site_packages.append( os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': site_packages.append( os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")) site_packages.append(os.path.join(prefix, "lib", "site-python")) else: site_packages.append(prefix) site_packages.append( os.path.join(prefix, "lib", "site-packages")) return site_packages @staticmethod def _get_dependencies(entry_point): # Dynamically find all imported modules from modulefinder import ModuleFinder finder = ModuleFinder() finder.run_script(entry_point) imports = list( set([ v.__file__.split('site-packages/', 1)[-1].split('/')[0] for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ ])) # Get just the modules, ignore the so and py now (maybe useful for calls to add_file) modules = [i.split('.py')[0] for i in imports if ".so" not in i] so_files = list( set([ v.__file__ for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ and ".so" in v.__file__ ])) return set(modules), so_files
def test_reverts_to_py_if_available(self): archive = PythonPackageArchive() py = self.py_with_pyc('foo.py') archive.add_py_file(py + 'c') archive.close() self.assertEqual(archive.get_filenames(), ['foo.py'])
def __init__(self, policy): self.log = logging.getLogger('custodian.azure.function_package') self.basedir = os.path.dirname(os.path.realpath(__file__)) self.pkg = PythonPackageArchive() self.policy = policy
def make_open_archive(self, modules=(), cache_file=None): archive = PythonPackageArchive(modules=modules, cache_file=cache_file) self.addCleanup(archive.remove) return archive
def test_reverts_to_py_if_available(self): archive = PythonPackageArchive() py = self.py_with_pyc('foo.py') archive.add_py_file(py+'c') archive.close() self.assertEqual(archive.get_filenames(), ['foo.py'])
def make_open_archive(self, *a, **kw): archive = PythonPackageArchive(*a, **kw) self.addCleanup(archive.remove) return archive
class FunctionPackage(object): def __init__(self, name, function_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') self.enable_ssl_cert = not distutils.util.strtobool( os.environ.get(ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION, 'no')) if not self.enable_ssl_cert: self.log.warning('SSL Certificate Validation is disabled') def _add_functions_required_files(self, policy, queue_name=None): self.pkg.add_file(self.function_path, dest=self.name + '/function.py') self.pkg.add_contents(dest=self.name + '/__init__.py', contents='') self._add_host_config() if policy: config_contents = self.get_function_config(policy, queue_name) policy_contents = self._get_policy(policy) self.pkg.add_contents(dest=self.name + '/function.json', contents=config_contents) self.pkg.add_contents(dest=self.name + '/config.json', contents=policy_contents) if policy['mode']['type'] == FUNCTION_EVENT_TRIGGER_MODE: self._add_queue_binding_extensions() def _add_host_config(self): config = \ { "version": "2.0", "healthMonitor": { "enabled": True, "healthCheckInterval": "00:00:10", "healthCheckWindow": "00:02:00", "healthCheckThreshold": 6, "counterThreshold": 0.80 }, "functionTimeout": "00:05:00", "logging": { "fileLoggingMode": "debugOnly" }, "extensions": { "http": { "routePrefix": "api", "maxConcurrentRequests": 5, "maxOutstandingRequests": 30 } } } self.pkg.add_contents(dest='host.json', contents=json.dumps(config)) def _add_queue_binding_extensions(self): bindings_dir_path = os.path.abspath( os.path.join(os.path.join(__file__, os.pardir), 'function_binding_resources')) bin_path = os.path.join(bindings_dir_path, 'bin') self.pkg.add_directory(bin_path) self.pkg.add_file(os.path.join(bindings_dir_path, 'extensions.csproj')) def get_function_config(self, policy, queue_name=None): config = \ { "scriptFile": "function.py", "bindings": [{ "direction": "in" }] } mode_type = policy['mode']['type'] binding = config['bindings'][0] if mode_type == FUNCTION_TIME_TRIGGER_MODE: binding['type'] = 'timerTrigger' binding['name'] = 'input' binding['schedule'] = policy['mode']['schedule'] elif mode_type == FUNCTION_EVENT_TRIGGER_MODE: binding['type'] = 'queueTrigger' binding['connection'] = 'AzureWebJobsStorage' binding['name'] = 'input' binding['queueName'] = queue_name else: self.log.error("Mode not yet supported for Azure functions (%s)" % mode_type) return json.dumps(config, indent=2) def _get_policy(self, policy): return json.dumps({'policies': [policy]}, indent=2) def _add_cffi_module(self): """CFFI native bits aren't discovered automatically so for now we grab them manually from supported platforms""" self.pkg.add_modules('cffi') # Add native libraries that are missing site_pkg = FunctionPackage._get_site_packages()[0] # linux platform = sys.platform if platform == "linux" or platform == "linux2": for so_file in os.listdir(site_pkg): if fnmatch.fnmatch(so_file, '*ffi*.so*'): self.pkg.add_file(os.path.join(site_pkg, so_file)) self.pkg.add_directory(os.path.join(site_pkg, '.libs_cffi_backend')) # MacOS elif platform == "darwin": raise NotImplementedError('Cannot package Azure Function in MacOS host OS, ' 'please use linux.') # Windows elif platform == "win32": raise NotImplementedError('Cannot package Azure Function in Windows host OS, ' 'please use linux or WSL.') def _update_perms_package(self): os.chmod(self.pkg.path, 0o0644) def build(self, policy, queue_name=None, entry_point=None, extra_modules=None): # Get dependencies for azure entry point entry_point = entry_point or \ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'entry.py') modules, so_files = FunctionPackage._get_dependencies(entry_point) # add all loaded modules modules.discard('azure') modules = modules.union({'c7n', 'c7n_azure', 'pkg_resources', 'knack', 'argcomplete', 'applicationinsights'}) if extra_modules: modules = modules.union(extra_modules) self.pkg.add_modules(None, *modules) # adding azure manually # we need to ignore the __init__.py of the azure namespace for packaging # https://www.python.org/dev/peps/pep-0420/ self.pkg.add_modules(lambda f: f == 'azure/__init__.py', 'azure') # add config and policy self._add_functions_required_files(policy, queue_name) # generate and add auth s = local_session(Session) self.pkg.add_contents(dest=self.name + '/auth.json', contents=s.get_functions_auth_string()) # cffi module needs special handling self._add_cffi_module() def wait_for_status(self, deployment_creds, retries=10, delay=15): for r in range(retries): if self.status(deployment_creds): return True else: self.log.info('(%s/%s) Will retry Function App status check in %s seconds...' % (r + 1, retries, delay)) time.sleep(delay) return False def status(self, deployment_creds): status_url = '%s/api/deployments' % deployment_creds.scm_uri try: r = requests.get(status_url, timeout=30, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error("Your Function app is not responding to a status request.") return False if r.status_code != 200: self.log.error("Application service returned an error.\n%s\n%s" % (r.status_code, r.text)) return False return True def publish(self, deployment_creds): self.close() # update perms of the package self._update_perms_package() zip_api_url = '%s/api/zipdeploy?isAsync=true' % deployment_creds.scm_uri self.log.info("Publishing Function package from %s" % self.pkg.path) zip_file = open(self.pkg.path, 'rb').read() try: r = requests.post(zip_api_url, data=zip_file, timeout=300, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error("Your Function App deployment timed out after 5 minutes. Try again.") r.raise_for_status() self.log.info("Function publish result: %s" % r.status_code) def close(self): self.pkg.close() @staticmethod def _get_site_packages(): """Returns a list containing all global site-packages directories (and possibly site-python). For each directory present in the global ``PREFIXES``, this function will find its `site-packages` subdirectory depending on the system environment, and will return a list of full paths. """ site_packages = [] seen = set() prefixes = [sys.prefix, sys.exec_prefix] for prefix in prefixes: if not prefix or prefix in seen: continue seen.add(prefix) if sys.platform in ('os2emx', 'riscos'): site_packages.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': site_packages.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")) site_packages.append(os.path.join(prefix, "lib", "site-python")) else: site_packages.append(prefix) site_packages.append(os.path.join(prefix, "lib", "site-packages")) return site_packages @staticmethod def _get_dependencies(entry_point): # Dynamically find all imported modules from modulefinder import ModuleFinder finder = ModuleFinder() finder.run_script(entry_point) imports = list(set([v.__file__.split('site-packages/', 1)[-1].split('/')[0] for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__])) # Get just the modules, ignore the so and py now (maybe useful for calls to add_file) modules = [i.split('.py')[0] for i in imports if ".so" not in i] so_files = list(set([v.__file__ for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ and ".so" in v.__file__])) return set(modules), so_files
class FunctionPackage(object): def __init__(self, name, function_path=None, target_subscription_ids=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') self.enable_ssl_cert = not distutils.util.strtobool( os.environ.get(ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION, 'no')) if target_subscription_ids is not None: self.target_subscription_ids = target_subscription_ids else: self.target_subscription_ids = [None] if not self.enable_ssl_cert: self.log.warning('SSL Certificate Validation is disabled') def _add_functions_required_files(self, policy, queue_name=None): s = local_session(Session) for target_subscription_id in self.target_subscription_ids: name = self.name + ("_" + target_subscription_id if target_subscription_id else "") # generate and add auth self.pkg.add_contents(dest=name + '/auth.json', contents=s.get_functions_auth_string(target_subscription_id)) self.pkg.add_file(self.function_path, dest=name + '/function.py') self.pkg.add_contents(dest=name + '/__init__.py', contents='') if policy: config_contents = self.get_function_config(policy, queue_name) policy_contents = self._get_policy(policy) self.pkg.add_contents(dest=name + '/function.json', contents=config_contents) self.pkg.add_contents(dest=name + '/config.json', contents=policy_contents) if policy['mode']['type'] == FUNCTION_EVENT_TRIGGER_MODE: self._add_queue_binding_extensions() self._add_host_config() def _add_host_config(self): config = \ { "version": "2.0", "healthMonitor": { "enabled": True, "healthCheckInterval": "00:00:10", "healthCheckWindow": "00:02:00", "healthCheckThreshold": 6, "counterThreshold": 0.80 }, "functionTimeout": "00:05:00", "logging": { "fileLoggingMode": "debugOnly" }, "extensions": { "http": { "routePrefix": "api", "maxConcurrentRequests": 5, "maxOutstandingRequests": 30 } } } self.pkg.add_contents(dest='host.json', contents=json.dumps(config)) def _add_queue_binding_extensions(self): bindings_dir_path = os.path.abspath( os.path.join(os.path.join(__file__, os.pardir), 'function_binding_resources')) bin_path = os.path.join(bindings_dir_path, 'bin') self.pkg.add_directory(bin_path) self.pkg.add_file(os.path.join(bindings_dir_path, 'extensions.csproj')) def get_function_config(self, policy, queue_name=None): config = \ { "scriptFile": "function.py", "bindings": [{ "direction": "in" }] } mode_type = policy['mode']['type'] binding = config['bindings'][0] if mode_type == FUNCTION_TIME_TRIGGER_MODE: binding['type'] = 'timerTrigger' binding['name'] = 'input' binding['schedule'] = policy['mode']['schedule'] elif mode_type == FUNCTION_EVENT_TRIGGER_MODE: binding['type'] = 'queueTrigger' binding['connection'] = 'AzureWebJobsStorage' binding['name'] = 'input' binding['queueName'] = queue_name else: self.log.error("Mode not yet supported for Azure functions (%s)" % mode_type) return json.dumps(config, indent=2) def _get_policy(self, policy): return json.dumps({'policies': [policy]}, indent=2) def _update_perms_package(self): os.chmod(self.pkg.path, 0o0644) @property def cache_folder(self): c7n_azure_root = os.path.dirname(__file__) return os.path.join(c7n_azure_root, 'cache') def build(self, policy, modules, non_binary_packages, excluded_packages, queue_name=None,): wheels_folder = os.path.join(self.cache_folder, 'wheels') wheels_install_folder = os.path.join(self.cache_folder, 'dependencies') packages = \ DependencyManager.get_dependency_packages_list(modules, excluded_packages) if not DependencyManager.check_cache(self.cache_folder, wheels_install_folder, packages): self.log.info("Cached packages not found or requirements were changed.") # If cache check fails, wipe all previous wheels, installations etc if os.path.exists(self.cache_folder): self.log.info("Removing cache folder...") shutil.rmtree(self.cache_folder) self.log.info("Preparing non binary wheels...") DependencyManager.prepare_non_binary_wheels(non_binary_packages, wheels_folder) self.log.info("Downloading wheels...") DependencyManager.download_wheels(packages, wheels_folder) self.log.info("Installing wheels...") DependencyManager.install_wheels(wheels_folder, wheels_install_folder) self.log.info("Updating metadata file...") DependencyManager.create_cache_metadata(self.cache_folder, wheels_install_folder, packages) for root, _, files in os.walk(wheels_install_folder): arc_prefix = os.path.relpath(root, wheels_install_folder) for f in files: dest_path = os.path.join(arc_prefix, f) if f.endswith('.pyc') or f.endswith('.c'): continue f_path = os.path.join(root, f) self.pkg.add_file(f_path, dest_path) exclude = os.path.normpath('/cache/') + os.path.sep self.pkg.add_modules(lambda f: (exclude in f), *[m.replace('-', '_') for m in modules]) # add config and policy self._add_functions_required_files(policy, queue_name) def wait_for_status(self, deployment_creds, retries=10, delay=15): for r in range(retries): if self.status(deployment_creds): return True else: self.log.info('(%s/%s) Will retry Function App status check in %s seconds...' % (r + 1, retries, delay)) time.sleep(delay) return False def status(self, deployment_creds): status_url = '%s/api/deployments' % deployment_creds.scm_uri try: r = requests.get(status_url, timeout=30, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error("Your Function app is not responding to a status request.") return False if r.status_code != 200: self.log.error("Application service returned an error.\n%s\n%s" % (r.status_code, r.text)) return False return True def publish(self, deployment_creds): self.close() # update perms of the package self._update_perms_package() zip_api_url = '%s/api/zipdeploy?isAsync=true' % deployment_creds.scm_uri self.log.info("Publishing Function package from %s" % self.pkg.path) zip_file = self.pkg.get_bytes() try: r = requests.post(zip_api_url, data=zip_file, timeout=300, verify=self.enable_ssl_cert) except requests.exceptions.ReadTimeout: self.log.error("Your Function App deployment timed out after 5 minutes. Try again.") r.raise_for_status() self.log.info("Function publish result: %s" % r.status_code) def close(self): self.pkg.close()
class FunctionPackage(object): def __init__(self, name, function_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py') def _add_functions_required_files(self, policy): self.pkg.add_file(self.function_path, dest=self.name + '/function.py') self.pkg.add_contents(dest=self.name + '/__init__.py', contents='') self._add_host_config() if policy: config_contents = self.get_function_config(policy) policy_contents = self._get_policy(policy) self.pkg.add_contents(dest=self.name + '/function.json', contents=config_contents) self.pkg.add_contents(dest=self.name + '/config.json', contents=policy_contents) def _add_host_config(self): config = \ { "http": { "routePrefix": "api", "maxConcurrentRequests": 5, "maxOutstandingRequests": 30 }, "logger": { "defaultLevel": "Trace", "categoryLevels": { "Worker": "Trace" } }, "queues": { "visibilityTimeout": "00:00:10" }, "swagger": { "enabled": True }, "eventHub": { "maxBatchSize": 1000, "prefetchCount": 1000, "batchCheckpointFrequency": 1 }, "healthMonitor": { "enabled": True, "healthCheckInterval": "00:00:10", "healthCheckWindow": "00:02:00", "healthCheckThreshold": 6, "counterThreshold": 0.80 }, "functionTimeout": "00:05:00" } self.pkg.add_contents(dest='host.json', contents=json.dumps(config)) def get_function_config(self, policy): config = \ { "scriptFile": "function.py", "bindings": [{ "direction": "in" }] } mode_type = policy['mode']['type'] binding = config['bindings'][0] if mode_type == 'azure-periodic': binding['type'] = 'timerTrigger' binding['name'] = 'input' binding['schedule'] = policy['mode']['schedule'] elif mode_type == 'azure-stream': binding['type'] = 'httpTrigger' binding['authLevel'] = 'anonymous' binding['name'] = 'input' binding['methods'] = ['post'] config['bindings'].append({ "name": "$return", "type": "http", "direction": "out"}) else: self.log.error("Mode not yet supported for Azure functions (%s)" % mode_type) return json.dumps(config, indent=2) def _get_policy(self, policy): return json.dumps({'policies': [policy]}, indent=2) def _add_cffi_module(self): """CFFI native bits aren't discovered automatically so for now we grab them manually from supported platforms""" self.pkg.add_modules('cffi') # Add native libraries that are missing site_pkg = FunctionPackage._get_site_packages()[0] # linux platform = sys.platform if platform == "linux" or platform == "linux2": for so_file in os.listdir(site_pkg): if fnmatch.fnmatch(so_file, '*ffi*.so*'): self.pkg.add_file(os.path.join(site_pkg, so_file)) self.pkg.add_directory(os.path.join(site_pkg, '.libs_cffi_backend')) # MacOS elif platform == "darwin": raise NotImplementedError('Cannot package Azure Function in MacOS host OS, ' 'please use linux.') # Windows elif platform == "win32": raise NotImplementedError('Cannot package Azure Function in Windows host OS, ' 'please use linux or WSL.') def _update_perms_package(self): os.chmod(self.pkg.path, 0o0644) def build(self, policy, entry_point=None, extra_modules=None): # Get dependencies for azure entry point entry_point = entry_point or \ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'entry.py') modules, so_files = FunctionPackage._get_dependencies(entry_point) # add all loaded modules modules.discard('azure') modules = modules.union({'c7n', 'c7n_azure', 'pkg_resources'}) if extra_modules: modules = modules.union(extra_modules) self.pkg.add_modules(None, *modules) # adding azure manually # we need to ignore the __init__.py of the azure namespace for packaging # https://www.python.org/dev/peps/pep-0420/ self.pkg.add_modules(lambda f: f == 'azure/__init__.py', 'azure') # add config and policy self._add_functions_required_files(policy) # generate and add auth s = local_session(Session) self.pkg.add_contents(dest=self.name + '/auth.json', contents=s.get_auth_string()) # cffi module needs special handling self._add_cffi_module() def wait_for_status(self, app_name, retries=5, delay=15): for r in range(retries): if self.status(app_name): return True else: self.log.info('Will retry Function App status check in %s seconds...' % delay) time.sleep(delay) return False def status(self, app_name): s = local_session(Session) status_url = 'https://%s.scm.azurewebsites.net/api/deployments' % (app_name) headers = { 'Authorization': 'Bearer %s' % (s.get_bearer_token()) } try: r = requests.get(status_url, headers=headers, timeout=30) except requests.exceptions.ReadTimeout: self.log.error("Your Function app is not responding to a status request.") return False if r.status_code != 200: self.log.error("Application service returned an error.\n%s\n%s" % (r.status_code, r.text)) return False return True def publish(self, app_name): self.close() # update perms of the package self._update_perms_package() s = local_session(Session) zip_api_url = 'https://%s.scm.azurewebsites.net/api/zipdeploy?isAsync=true' % (app_name) headers = { 'Content-type': 'application/zip', 'Authorization': 'Bearer %s' % (s.get_bearer_token()) } self.log.info("Publishing package at: %s" % self.pkg.path) zip_file = open(self.pkg.path, 'rb').read() try: r = requests.post(zip_api_url, headers=headers, data=zip_file, timeout=300) except requests.exceptions.ReadTimeout: self.log.error("Your Function App deployment timed out after 5 minutes. Try again.") r.raise_for_status() self.log.info("Function publish result: %s %s" % (r.status_code, r.text)) def close(self): self.pkg.close() @staticmethod def _get_site_packages(): """Returns a list containing all global site-packages directories (and possibly site-python). For each directory present in the global ``PREFIXES``, this function will find its `site-packages` subdirectory depending on the system environment, and will return a list of full paths. """ site_packages = [] seen = set() prefixes = [sys.prefix, sys.exec_prefix] for prefix in prefixes: if not prefix or prefix in seen: continue seen.add(prefix) if sys.platform in ('os2emx', 'riscos'): site_packages.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': site_packages.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")) site_packages.append(os.path.join(prefix, "lib", "site-python")) else: site_packages.append(prefix) site_packages.append(os.path.join(prefix, "lib", "site-packages")) return site_packages @staticmethod def _get_dependencies(entry_point): # Dynamically find all imported modules from modulefinder import ModuleFinder finder = ModuleFinder() finder.run_script(entry_point) imports = list(set([v.__file__.split('site-packages/', 1)[-1].split('/')[0] for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__])) # Get just the modules, ignore the so and py now (maybe useful for calls to add_file) modules = [i.split('.py')[0] for i in imports if ".so" not in i] so_files = list(set([v.__file__ for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ and ".so" in v.__file__])) return set(modules), so_files
def test_fails_if_py_not_available(self): archive = PythonPackageArchive() py = self.py_with_pyc('foo.py') os.unlink(py) self.assertRaises(IOError, archive.add_py_file, py + 'c')
def test_can_add_py_file(self): archive = PythonPackageArchive() archive.add_py_file(self.py_with_pyc('foo.py')) archive.close() self.assertEqual(archive.get_filenames(), ['foo.py'])
def __init__(self, name, function_path=None): self.log = logging.getLogger('custodian.azure.function_package') self.pkg = PythonPackageArchive() self.name = name self.function_path = function_path or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'function.py')
def test_can_add_py_file(self): archive = PythonPackageArchive() archive.add_py_file(self.py_with_pyc("foo.py")) archive.close() self.assertEqual(archive.get_filenames(), ["foo.py"])
def get_archive(config): required = ['ldap3', 'pyasn1', 'jinja2', 'markupsafe'] remove = ['_yaml.so', 'c7n.egg-link'] def lib_filter(root, dirs, files): for f in tuple(files): if f.endswith('.pyo'): files.remove(f) for r in remove: if r in files: files.remove(r) if os.path.basename(root) == 'site-packages': for n in tuple(dirs): if n not in required: dirs.remove(n) return dirs, files archive = PythonPackageArchive( os.path.dirname(__file__), skip='*.pyc', lib_filter=lib_filter) archive.create() template_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'msg-templates')) for t in os.listdir(template_dir): with open(os.path.join(template_dir, t)) as fh: archive.add_contents('msg-templates/%s' % t, fh.read()) archive.add_contents('config.json', json.dumps(config)) archive.add_contents('periodic.py', entry_source) archive.close() return archive