def finish_lambda(self, context, result): """Finish after running""" context.result = result if self._form.finish: context.interface.output = Dict() # do this explicitly to avoid it re-indexing Dict() mylocals = Dict(dims='', context='') mylocals.dims = self.dims mylocals.context = context context = dex_intersect(self._cfg, self._form.finish, mylocals=mylocals) else: context.interface.output = result if not self._interface or not self._interface.get('Output'): log(type="warning", msg="No interface.Output definition, not processing output data:") if context.interface.output: print("{}".format(context.interface.output)) return {} result = gql_validate.validate(self._interface, 'Output', dictlib.export(context.interface.output)) # TODO: Make this pivot off a config on the polyform if LOGDATA: log(type="data", response=json.dumps(result)) # print("result: {}".format(result)) return result
def __init__(self, func, *args, **kwargs): # pylint: disable=unused-argument self.reqid = str(uuid4()) self._func = func self._interface = Dict(event=None, biome=None) self.dims = Dict(model=dict(trained=None, csv=None)) if kwargs.get('faas') == 'lambda': self.faas = 'lambda' else: raise TypeError("Polyform faas type is unknown, look for: @PolyformDecorator(faas=?)") self._args = args self._kwargs = kwargs
def __init__(self, poly=None, type=None, build=True, lang=None, cache=True, clone=None, test=False): if clone: self.hash = clone.hash self._info = copy.deepcopy(clone._info) # pylint: disable=protected-access self._run = copy.deepcopy(clone._run) # pylint: disable=protected-access self._opts = copy.deepcopy(clone._opts) # pylint: disable=protected-access self._env = copy.deepcopy(clone._env) # pylint: disable=protected-access # mnts is just a dict, not a Dict() self._run.mnts = copy.deepcopy(clone._run.mnts) # pylint: disable=protected-access else: # TODO: ideally the hash isn't of the entire config, but only the form in question self.hash = hashlib.sha224(to_yaml(poly._config).encode()) # pylint: disable=protected-access self._info = Dict(fname='', owd=os.getcwd(), in_build=False, type=type, build=build, lang=lang, poly=poly, test=test) self._run = Dict(img='', args=dict(img=list(), cmd=list())) self._run['mnts'] = dict() # keep this one a dict self._opts = Dict(cache=cache, cleanup=True) self._env = dict() for key in ('POLYTEST', 'LOGDATA', 'DEBUG', 'AWS_PROFILE', 'AWS_DEFAULT_REGION', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY'): if os.environ.get(key): self._env[key] = os.environ.get(key) if not self._opts.cache: self._rebuild = True if test: self._info.test = test if build: self._info.build = build if type: self._info.type = type if lang: self._info.lang = lang if poly: self._info.poly = poly getattr(self, "as_" + self._info.type)()
def gather_lambda(self, event, aws_context, **_kwargs): """Gather data expectations prior to running""" mylocals = Dict( context=dict( interface=dict( event=event, input={}, output={}, biome=dict(aws=aws_context) ) ), dims=self.dims ) if not self._interface or not self._interface.get('Input'): log(type="warning", msg="No interface.Input definition, not processing input data") else: body = event.get('parsed_body') if not body: body = event.get('body') if body is None: raise DataExpectationFailed("no payload") if LOGDATA: log(type="data", preExpect=json.dumps(body)) mylocals.context.interface.input = gql_validate.validate( self._interface, 'Input', body ) if LOGDATA: log(type="data", postExpect=json.dumps(body)) # should check headers and give better errors, but assume its json return dex_intersect(self._cfg, self._form.expect, mylocals=mylocals)
def load_plug(plug, pkg, server): mod = importlib.import_module(pkg + '.' + plug) print("==> creating instance of plug " + plug) return Dict( mod=mod, instance=mod.Handler(server), name=plug )
def add_endpoint(self, endpoint, mod): """add an endpoint as a pluggable module""" # pylint: disable=no-member # mod = importlib.import_module('endpoints.' + endpoint, package='.') # , package=__name__) route = self.conf.server.route_base + "/" + mod.__name__.split(".")[-1] print("route=" + route) handler = mod.Handler(server=self, route=route) print("handler={}".format(handler)) cherrypy.tree.mount(handler, route, self.endpoint_conf) self.endpoints.append(Dict(name=endpoint, mod=mod, handler=handler, route=route))
def _parse_language(self, key, value): self._is_type(key, value, str) name, vers = value.split("-") major, minor = (vers + ".").split(".")[0:2] return Dict(full=value, name=name, ver=vers, major=major, minor=minor, lambci=name + vers)
def initialize(): """module initialization--cache some things""" base = "./polys" # just sugar def pjoin(*args): return os.path.join(*args) cwd = os.getcwd() polys = list() for pname in os.listdir(base): if os.path.isdir(pjoin(base, pname)): print("FOUND polyform " + pname) for fname in os.listdir(pjoin(base, pname)): if os.path.isdir(pjoin(base, pname, fname)): print(" facet " + pname + "." + fname) polys.append( Dict(path=pjoin(base, pname, fname), name=pname + "." + fname)) for poly in polys: pname, fname = poly.name.split(".") bpath = pjoin(cwd, poly.path) path = pjoin(bpath, "_polyform.json") with open(path) as conf: pconf = json.load(conf) os.chdir(pjoin(cwd, poly.path)) sys.path.append('.') form = pconf['forms'][pconf['target']] run = re.sub(r'[^a-z0-9_.]+', '', form['run']) modexp = run.split(".") modpath = ".".join(modexp[0:-1]) importlib.invalidate_caches() mod = importlib.import_module(modpath) sys.path = sys.path[:-1] os.chdir(cwd) POLYS[poly.name] = Dict(conf=pconf, mod=mod, path=bpath, run=modexp[-1])
def export_polyconfig(poly, form): """create a trimmed polyform config and store it as json, to go within the func.zip""" config = poly._export() # pylint: disable=protected-access config['target'] = form fobj = config['forms'][form] config['forms'] = Dict() config['forms'][form] = fobj rds = config['resources']['datastores'] for name in rds: if isinstance(rds[name], dict): for key in list(rds[name].keys()): if key not in ('role', 'driver', 'config'): del rds[name][key] return config
def dex_intersect(polyform, dex_exprs, mylocals=None): """ evaluate an intersection's data expectations """ if not mylocals: raise AttributeError("Missing mylocals={}") # mylocals.update(Dict( # interface=Dict(), # Output=Dict(), # Result=Dict() # )) mylocals['context'].update( creator=None, invoker=None, requestor=None, appexdev=None, polydev=Dict(id=polyform.meta.owner) ) mylocals = dex_eval_locals(mylocals) try: row = 0 for expr in dex_exprs: if DEBUG: print(">>> {}".format(mylocals['context'])) print(">>> {}".format(expr)) row += 1 # lift context up into primary locals mylocals.update(mylocals['context']) result = eval(expr, mylocals) # pylint: disable=eval-used # why does pandas.DataFrame think it's special, gah if not isinstance(result, pandas.DataFrame) and not result: raise DEXError(nbr=row, expr=expr, status="not-true", msg="Expression did not result in a true value") except DEXError: raise except Exception as err: # pylint: disable=broad-except if DEBUG: traceback.print_exc() raise DEXError(nbr=row, expr=expr, status="error", error=err) return mylocals['context']
def __call__(self, *args, **kwargs): """ A polyform wraper, for some convenience steps """ try: with open("_polyform.json") as infile: self._cfg = Dict(json.load(infile)) # TODO NEXT: AUTHENTICATE log(type="exec", msg="Starting Gather") context = self.gather(*args, **kwargs) log(type="exec", msg="Starting Function") result = self._func(context=context, dims=self.dims) if not isinstance(result, Result): raise DataExpectationFailed("function returned a non Result() object") return self.finish(context, result) except DEXError as err: import traceback print(traceback.format_exc()) raise DataExpectationFailed(err.message)
def cmd_deploy(polyform, target, handler): """ deploy a lambda function """ name = "polyform-{}-{}".format(polyform.meta.name, target) print(name) iam = boto3.client('iam') sls = boto3.client('lambda') func = get_func(sls, name) # already defined? if not func: # define it pass # sls.create_func( bucket = get_s3_bucket(polyform, target, handler) form = polyform.forms[target] deploy = Dict(form.deploy['aws']) timeout = deploy.get('timeout', 60) if timeout > 300: sys.exit("Timeout greater than 300 is disallowed") env = deploy.get('env', {}) if isinstance(env, str): env = [env] if env is None: env = {} elif isinstance(env, list): env = {} for line in deploy.get('env'): if not line or not "=" in line: sys.exit("Invalid deploy.env value for form, not KEY=VALUE") name, value = line.split("=", 1) if value[0] == '"': value = json.loads(value) env[name] = value print(env) elif not isinstance(env, dict): sys.exit("Invalid deploy.env for form, not dictionary or list of KEY=VALUE") code_data = get_build(target, 'func.latest.zip') libs_data = get_build(target, 'libs.latest.zip') role = iam.get_role(RoleName=POLYROLE) tags = deploy.get('tags', {}) s3c = S3(schema=dict(Bucket=bucket)) #print('create s3db bucket=' + s3c.bucket + '...') aws_trap('Bucket', s3c.create_bucket) print(polyform.meta.__dict__) layer = aws_trap("Function Libs", sls.publish_layer_version, LayerName=name + "-libs", Content=dict( S3Bucket=bucket, S3Key=name + "-libs", ZipFile=libs_data ), CompatibleRuntimes=[polyform.meta.language.lambci] ) print(layer) return aws_trap("Function", sls.create_function, FunctionName=name, Runtime=polyform.meta.language.lambci, Role=role['Role']['Arn'], Handler=handler, Code=dict(ZipFile=code_data), Timeout=timeout, Environment=dict(Variables=env), Tags=tags )
def interface(indata): """ >>> interface(''' ... type NestedValue { ... balance: Int! ... } ... type Input { ... city: String! ... state: String! ... year: Int ... moar: NestedValue! ... } ... type Output { ... score: Float ... } ... ''') {'ast': {}, 'val': {'types': {'NestedValue': {'balance': {'nullok': False, 'type': 'Int'}}, 'Input': {'city': {'nullok': False, 'type': 'String'}, 'state': {'nullok': False, 'type': 'String'}, 'year': {'nullok': True, 'type': 'Int'}, 'moar': {'nullok': False, 'type': 'NestedValue'}}, 'Output': {'score': {'nullok': True, 'type': 'Float'}}}, 'ops': {}}} """ if not indata: return [] parsed = graphql.parse(indata) out = Dict(types={}, ops={}) #for doc in graphql.parse(indata).definintions: for doc in parsed.definitions: if isinstance(doc, ObjectTypeDefinition): fields = Dict() for field in doc.fields: newfield = Dict(nullok=True) ftype = field.type if isinstance(ftype, NonNullType): newfield.nullok = False ftype = ftype.type newfield.type = ftype.name.value fields[field.name.value] = newfield out.types[doc.name.value] = fields # elif isinstance(doc, OperationDefinition): # out.ops[doc.name.value] = doc # for oper in doc. # # OperationDefinition( # operation='query', # name=Name(value='OIScore'), # variable_definitions=[ # VariableDefinition( # variable=Variable(name=Name(value='about')), # type=NonNullType(type=NamedType(name=Name(value='Input'))), # default_value=None # ) # ], # directives=[], # selection_set=SelectionSet( # selections=[ # Field( # alias=None, name=Name(value='score'), # arguments=[Argument(name=Name(value='about'), # value=Variable(name=Name(value='about')))], # directives=[], # selection_set=SelectionSet( # selections=[ # Field( # alias=None, # name=Name(value='result'), # arguments=[], # directives=[], # selection_set=None) # ] # ) # ) # ] # ) #) else: raise AttributeError("Unrecognized gql type: '{}'".format(doc)) return Dict(ast={}, val=out)
def start(self, test=True): """ Startup script for webhook routing. Called from agent start """ cherrypy.log = logger.CherryLog() cherrypy.config.update({ 'log.screen': False, 'log.access_file': '', 'log.error_file': '' }) cherrypy.engine.unsubscribe('graceful', cherrypy.log.reopen_files) logging.config.dictConfig({ 'version': 1, 'formatters': { 'custom': { '()': 'server.logger.Logger' } }, 'handlers': { 'console': { 'level':'INFO', 'class': 'server.logger.Logger', 'formatter': 'custom', 'stream': 'ext://sys.stdout' } }, 'loggers': { '': { 'handlers': ['console'], 'level': 'INFO' }, 'cherrypy.access': { 'handlers': ['console'], 'level': 'INFO', 'propagate': False }, 'cherrypy.error': { 'handlers': ['console'], 'level': 'INFO', 'propagate': False }, } }) # lots of legacy stuff here which isn't used defaults = { 'deploy_ver': 0, # usable for deployment tools 'server': { 'route_base': '/api/v1', 'port': 64000, 'host': '0.0.0.0' }, 'heartbeat': 10, 'status_report': 3600, # every hour 'requestid': True, 'refresh_maps': 300, 'cache': { 'housekeeper': 60, 'policies': 300, 'sessions': 300, 'groups': 300 }, 'auth': { 'expires': 300 } } cfgin = None # try docker secrets if os.path.exists("/run/secrets/SERVER_CONFIG"): with open("/run/secrets/SERVER_CONFIG") as infile: cfgin = infile.read() # try environ if not cfgin: cfgin = os.environ.get('SERVER_CONFIG') if cfgin: try: cfgin = json2data(base64.b64decode(cfgin)) except: # pylint: disable=bare-except try: cfgin = json2data(cfgin) except Exception as err: # pylint: disable=broad-except traceback.print_exc() logger.abort("Cannot process SERVER_CONFIG: " + str(err) + " from " + cfgin) conf = Dict(dictlib.union(defaults, cfgin)) else: logger.log("Unable to find configuration, using defaults!") conf = Dict(defaults) # cherry py global cherry_conf = { 'server.socket_port': 64000, 'server.socket_host': '0.0.0.0' } if dictlib.dig_get(conf, 'server.port'): # .get('port'): cherry_conf['server.socket_port'] = int(conf.server.port) if dictlib.dig_get(conf, 'server.host'): # .get('host'): cherry_conf['server.socket_host'] = conf.server.host # if production mode if test: logger.log("Test mode enabled", type="notice") conf['test_mode'] = True else: cherry_conf['environment'] = 'production' conf['test_mode'] = False sys.stdout.flush() cherrypy.config.update(cherry_conf) cherrypy.config.update({'engine.autoreload.on': False}) self.conf = conf sys.path.append('.') # # eventually # for mod in self.endpoint_names: # self.add_endpoint(mod) # hack for now # from . import polyform as polyform from server.endpoints import polyform self.add_endpoint('polyform', polyform) # startup cleaning interval def housekeeper(server): for endpoint in server.endpoints: try: endpoint.handler.housekeeper(server) except: # pylint: disable=bare-except traceback.print_exc() timeinterval.start(conf.auth.expires * 1000, housekeeper, self) # mount routes cherrypy.tree.mount(http.Health(server=self), conf.server.route_base + "/health", self.endpoint_conf) int_mon = cherrypy.process.plugins.Monitor(cherrypy.engine, self.monitor, frequency=conf.heartbeat/2) int_mon.start() # whew, now start the server logger.log("Base path={}".format(conf.server.route_base), type="notice") cherrypy.engine.start() cherrypy.engine.block()
class Polyform(ParseObj): """Polyform object type (base)""" _required = ["scheme", "meta", "resources", "forms"] _skel = { 'scheme': '1.0', 'meta': Meta(), 'resources': Resources(), 'forms': { 'form-name': Form._skeleton(Form) } } forms = Dict() scheme = "1.0" meta = Dict(name='') resources = Dict() # name = None # config = None def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._config = kwargs.get('config') def _finish(self): """Called when the object load is completed""" ## TODO: change this to map against default object template function for key in self._required: try: getattr(self, key) except: # pylint: disable=bare-except abort("missing attribute") # handle extends importing for key in self.forms: self.forms[key]._extends() # pylint: disable=protected-access # drop templates for key in list(self.forms.keys()): if self.forms[key].type == "template" and self.forms.get(key): del self.forms[key] else: # flatten dependencies form = self.forms[key] deps = form.dependencies.get("add", list()) for dep in form.dependencies.get("del", []): deps.remove(dep) form.dependencies = list(set(GLOBAL_PIP + deps)) form.dependencies.sort( ) # so Dockerfile doesn't change signature if form.authentication: if not self.resources.authentication.get( form.authentication): # pylint: disable=line-too-long abort( "`forms.{}.authentication={}` auth scheme not defined at `resources.authentication.{}`", key, form.authentication, form.authentication) if form.dimensions.expect: form.expect = form.dimensions.expect + form.expect if form.dimensions.finish: form.finish = form.dimensions.finish + form.finish # copy name to a higher level #self.name = self.meta.name return self # def _parse_name(self, key, value): # return value def _parse_scheme(self, key, value): if value != "1.1": self._error("scheme is not 1.1", type) return value # def _parse_owner(self, key, value): # return value def _parse_meta(self, key, value): return Meta(parent=self, keyword="meta", key=key, value=value) def _parse_resources(self, key, value): return Resources(parent=self, keyword="resources", key=key, value=value) # a list of def _parse_forms(self, key, value): # or make this a sub-class as a list of forms? self._is_type(key, value, dict) forms = dict() for form_key, form_value in value.items(): form = Form(parent=self, keyword="forms." + form_key) form._add_keys(form_value) forms[form_key] = form return forms