def has_secrets_env_file(self, pipeline_data): for _, service in pipeline_data_utils.get_parsed_services( pipeline_data): if 'env_file' in service and 'secrets.decrypted.env' in service[ 'env_file']: return True return False
def verify_parsed_environment(self, pipeline_data): for _, service in pipeline_data_utils.get_parsed_services(pipeline_data): if 'environment' in service: if not isinstance(service['environment'], (dict,)): raise DeploymentError('Malformed docker-stack file. ' 'Service environment should be on the ' 'format `name: value`')
def has_service_label(self, pipeline_data, label_name): for _, service in pipeline_data_utils.get_parsed_services( pipeline_data): if 'labels' in service: return label_name in [ label.split('=')[0] for label in service['labels'] ] # Only try the first service return False return False
def get_semver_version_from_env(self, pipeline_data, service_name, semver_env_key): for name, service in pipeline_data_utils.get_parsed_services( pipeline_data): if name == service_name: # Environment always exists - set in init_service_pipeline_data.py for env_var, env_val in service['environment'].items(): if env_var == semver_env_key: return env_val raise exceptions.DeploymentError( 'SemVer definition `{}` found in image url but could not be not found under *environment* in docker-stack.yml' .format(semver_env_key))
def test_get_parsed_services(self): pipeline_data = { data_defs.STACK_FILE_PARSED_CONTENT: mock_test_data.get_parsed_stack_content() } service_data = [] for name, service in pipeline_data_utils.get_parsed_services( pipeline_data): service_data.append((name, service)) self.assertEqual(service_data[0][0], 'web') self.assertIsNotNone(service_data[0][1]) self.assertEqual(service_data[1][0], 'api') self.assertIsNotNone(service_data[1][1])
def get_volume_tuples(self, pipeline_data): volumes = [] for _, service in pipeline_data_utils.get_parsed_services( pipeline_data): if 'volumes' in service: for volume in service['volumes']: volume_tuple = self.get_volume_tuple(volume) if volume_tuple: volumes.append(volume_tuple) self.log.debug('Added volume %s', volume_tuple) else: self.log.debug('Skipped volume %s', volume_tuple) return volumes
def get_combined_service_labels(pipeline_data): labels = {} for _, service in pipeline_data_utils.get_parsed_services(pipeline_data): if 'labels' in service: for label in service['labels']: if isinstance(label, str) and '=' in label: name, value = label.split('=', 1) value = value.strip('"') if not name in labels: labels[name] = {} if labels[name]: labels[name] = f'{labels[name]},{value}' else: labels[name] = f'{value}' # labels = {'label1':'value1','value2',...} return labels
def run_step(self, pipeline_data): for name, service in pipeline_data_utils.get_parsed_services( pipeline_data): self.has_image(service) registry = self.parse_registry(service) image_name = self.parse_image_name(service) version = self.parse_version(service) service_index = service_data.get_service_index(pipeline_data, name) image_data = { data_defs.IMG_REGISTRY: registry, data_defs.IMG_NAME: image_name, data_defs.IMG_VERSION: version, data_defs.IMG_IS_SEMVER: False } pipeline_data[data_defs.SERVICES][service_index][ data_defs.S_IMAGE] = image_data self.log.debug('Image data set to "%s"', image_data) return pipeline_data
def run_step(self, pipeline_data): service_data = [] for name, service in pipeline_data_utils.get_parsed_services( pipeline_data): service_json = { data_defs.S_NAME: name, data_defs.S_IMAGE: {}, data_defs.S_ENVIRONMENT: {}, data_defs.S_LABELS: [], data_defs.S_DEPLOY_LABELS: [] } if 'labels' in service: service_json[data_defs.S_LABELS] = service['labels'] if 'deploy' in service and 'labels' in service['deploy']: service_json[ data_defs.S_DEPLOY_LABELS] = service['deploy']['labels'] service_data.append(service_json) pipeline_data[data_defs.SERVICES] = service_data pipeline_data[data_defs.WAS_DEPLOYED] = False self.log.debug('Service data set to "%s"', service_data) return pipeline_data
def run_step(self, pipeline_data): for _, service in pipeline_data_utils.get_parsed_services(pipeline_data): pipeline_data[data_defs.REPLICAS] = self.get_replicas(service) return pipeline_data
def run_step(self, pipeline_data): for _, service in pipeline_data_utils.get_parsed_services( pipeline_data): self.has_restart_policy(service) self.verify_restart_policy(service['deploy']['restart_policy']) return pipeline_data
def run_step(self, pipeline_data): for _, service in pipeline_data_utils.get_parsed_services( pipeline_data): self.has_logging_policy(service) self.verify_logging_policy(service['logging']) return pipeline_data