def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_dist2_directory(new_path) # .. a regular directory or a Distutils2 one .. elif os.path.isdir(item_name): try: self.import_services_from_directory(item_name, base_dir, True) except NoDistributionFound, e: msg = 'Caught an exception e=[{}]'.format(format_exc(e)) logger.log(TRACE1, msg) self.import_services_from_directory(item_name, base_dir, False) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir)
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_dist2_directory(new_path) # .. a regular directory or a Distutils2 one .. elif os.path.isdir(item_name): try: self.import_services_from_directory( item_name, base_dir, True) except NoDistributionFound, e: msg = 'Caught an exception e=[{}]'.format(format_exc(e)) logger.log(TRACE1, msg) self.import_services_from_directory( item_name, base_dir, False) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir)
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_directory(new_path) # distutils2 too .. elif os.path.isdir(item_name): self.import_services_from_directory(item_name) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir) # .. must be a module object else: self.import_services_from_module(item_name, is_internal)
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ deployed = [] for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # A regular directory if os.path.isdir(item_name): deployed.extend( self.import_services_from_directory(item_name, base_dir)) # .. a .py/.pyw elif is_python_file(item_name): deployed.extend( self.import_services_from_file(item_name, is_internal, base_dir)) # .. must be a module object else: deployed.extend( self.import_services_from_module(item_name, is_internal)) return deployed
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ # type: (Any, text, text) -> DeploymentInfo items = items if isinstance(items, (list, tuple)) else [items] to_process = [] for item in items: if has_debug: logger.debug('About to import services from:`%s`', item) is_internal = item.startswith('zato') # A regular directory if os.path.isdir(item): to_process.extend( self.import_services_from_directory(item, base_dir)) # .. a .py/.pyw elif is_python_file(item): to_process.extend( self.import_services_from_file(item, is_internal, base_dir)) # .. must be a module object else: to_process.extend( self.import_services_from_module(item, is_internal)) total_size = 0 to_process = set(to_process) to_process = list(to_process) for item in to_process: # type: InRAMService total_size += item.source_code_info.len_source info = DeploymentInfo() info.to_process[:] = to_process info.total_size = total_size info.total_size_human = naturalsize(info.total_size) with closing(self.odb.session()) as session: # Save data to both ODB and RAM now self._store_in_odb(session, info.to_process) self._store_in_ram(session, info.to_process) # Postprocessing, like rate limiting which needs access to information that becomes # available only after a service is saved to ODB. self.after_import(session, info) # Done with everything, we can commit it now session.commit() # Done deploying, we can return return info
def deploy_package(self, package_id, session): dp = self.get_package(package_id, session) if is_archive_file(dp.payload_name) or is_python_file(dp.payload_name): self._deploy_package(session, package_id, dp.payload_name, dp.payload) else: # This shouldn't really happen at all because the pickup notifier is to # filter such things out but life is full of surprises self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.IGNORED) self.logger.warn('Ignoring package id:[{}], payload_name:[{}], not a Python file nor an archive'.format(dp.id, dp.payload_name))
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # A regular directory if os.path.isdir(item_name): self.import_services_from_directory(item_name, base_dir) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir) # .. must be a module object else: self.import_services_from_module(item_name, is_internal)
def _deploy_package(self, session, package_id, payload_name, payload): """ Deploy a package, either a plain Python file or an archive, and update the deployment status. """ success = False current_work_dir = self.server.hot_deploy_config.current_work_dir if is_python_file(payload_name): file_name = os.path.join(current_work_dir, payload_name) success = self._deploy_file(current_work_dir, payload, file_name) elif is_archive_file(payload_name): success = self._deploy_archive(current_work_dir, payload, payload_name) if success: self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.DEPLOYED) msg = 'Uploaded package id:[{}], payload_name:[{}]'.format(package_id, payload_name) self.logger.info(msg) else: msg = 'Package id:[{}], payload_name:[{}] has not been deployed'.format(package_id, payload_name) self.logger.warn(msg)
def _deploy_package(self, session, package_id, payload_name, payload): """ Deploy a package, either a plain Python file or an archive, and update the deployment status. """ success = False current_work_dir = self.server.hot_deploy_config.current_work_dir if is_python_file(payload_name): file_name = os.path.join(current_work_dir, payload_name) success = self._deploy_file(current_work_dir, payload, file_name) elif is_archive_file(payload_name): success = self._deploy_archive(current_work_dir, payload, payload_name) if success: self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.DEPLOYED) msg = 'Uploaded package id:[{}], payload_name:[{}]'.format( package_id, payload_name) self.logger.info(msg) else: msg = 'Package id:[{}], payload_name:[{}] has not been deployed'.format( package_id, payload_name) self.logger.warn(msg)
def _should_process(self, event_name): """ Returns True if the file name's is either a Python source code file we can handle or an archive that can be uncompressed. """ return is_python_file(event_name) or is_archive_file(event_name)