def format_instance(instance): """Serialise `instance` For children to be visualised and modified, they must provide an appropriate implementation of __str__. Data that isn't JSON compatible cannot be visualised nor modified. Attributes: name (str): Name of instance niceName (str, optional): Nice name of instance family (str): Name of compatible family data (dict, optional): Associated data publish (bool): Whether or not instance should be published Returns: Dictionary of JSON-compatible instance """ instance = { "name": instance.name, "id": instance.id, "data": format_data(instance.data), "children": list(), } if os.getenv("PYBLISH_SAFE"): schema.validate(instance, "instance") return instance
def get(self): """Return state; do not modify :>jsonarr array context: Context, incl. data and children :>jsonarr array plugins: Available plug-ins :status 200: Return state as per schema_state.json """ state = service_mod.current().state try: state.compute() schema.validate(state, schema="state") except schema.ValidationError as e: return {"ok": False, "message": str(e)}, 500 except Exception as e: message = format_exception() or str(e) return {"ok": False, "message": message}, 500 return {"ok": True, "state": state}, 200
def format_record(record): """Serialise LogRecord instance Data: threadName name thread created process processName args module filename levelno exc_text pathname lineno msg exc_info funcName relativeCreated levelname msecs """ record = record.__dict__ # Humanise output and conform to Exceptions record["message"] = str(record.pop("msg")) if os.getenv("PYBLISH_SAFE"): schema.validate(record, "record") return record
def format_record(record): """Serialise LogRecord instance Data: threadName name thread created process processName args module filename levelno exc_text pathname lineno msg exc_info funcName relativeCreated levelname msecs """ record = record.__dict__ # Humanise output and conform to Exceptions record["message"] = str(record.pop("msg")) schema.validate(record, "record") return record
def put(self): """Process plug-in :<json string plugin: Plug-in to process :<json string instance: Instance to process :>jsonarr string ok: Status message :>jsonarr Result result: Result dictionary; see schema for Result :status 200: Processing ok :status 400: Invalid arguments specified :status 500: Server error """ parser = flask.ext.restful.reqparse.RequestParser() parser.add_argument("plugin", required=True, type=str) parser.add_argument("instance", type=str) parser.add_argument("mode", type=str, default="process") kwargs = parser.parse_args() plugin = kwargs["plugin"] instance = kwargs["instance"] mode = kwargs["mode"] service = service_mod.current() try: if mode == "process": log.debug("Processing..") result = service.process(plugin, instance) schema.validate(result, schema="result") elif mode == "repair": log.debug("Repairing..") result = service.repair(plugin, instance) schema.validate(result, schema="result") else: return { "ok": False, "message": "mode %s unrecognised" % mode }, 400 except schema.ValidationError as e: return {"ok": False, "message": str(e)}, 500 except Exception as e: message = format_exception() or str(e) return {"ok": False, "message": message}, 500 return {"ok": True, "result": result}, 200
def add_jobs(self, jobs): uids = [] if isinstance(jobs, dict): jobs = list(jobs) for job in jobs: schema.validate('Job', job) uid = str(uuid.uuid1()) job['uuid'] = uid self.inst.jobs.add_job(job) uids.append(uid) return uids
def post(self): """Update state Given a `state`, update the host. Else re-evaluate current state of host and return it. :<jsonarr array state: Changes from client :>jsonarr array state: Applied changes :>jsonarr string message: Error message when status == 500 :>jsonarr Changes changes: Changes dictionary; see schema for Changes :status 200: State returned :status 400: Invalid arguments specified :status 500: Internal error; see `message` for information. """ parser = flask.ext.restful.reqparse.RequestParser() parser.add_argument("changes", type=str) kwargs = parser.parse_args() service = service_mod.current() if kwargs["changes"] is None: service.init() else: try: changes = json.loads(kwargs["changes"]) schema.validate(changes, schema="changes") service.state.update(changes) except schema.ValidationError as e: return {"ok": False, "message": str(e)}, 500 except ValueError: message = "Could not de-serialise state: %r" % kwargs log.error(message) return {"ok": False, "message": message}, 500 except Exception as e: message = format_exception() or str(e) log.error(message) return {"ok": False, "message": str(message)}, 500 return {"ok": True, "changes": changes}, 200 return {"ok": True}, 200
def validate_metadata(md): """ Validate a given metadata dictionary. Expands dotted dictionary keys. Parameters ---------- md : dict The metadata dictionary. Returns ------- md : dict The validated metadata dictionary, with default values filled in if necessary. """ if not md: return {} try: variety = md['variety'] except KeyError: raise ValueError( 'Unexpected metadata keys without variety specified: ' + ', '.join(md)) from None md = expand_dotted_dict(md) try: schema = _schema_registry[variety] except KeyError: raise ValueError(f'Unexpected variety: {variety!r}. Valid options: ' + ', '.join(_schema_registry)) from None return schema.validate(md)
def route_api_login(): data = request.get_json() if not schema.validate(data, schema.login): return abort(400) if session.get('username'): session.clear() username = data.get('username', '') result = db.users.find_one({'username_lower': username.lower()}) if not result: return api_error('invalid_username_password') password = data.get('password', '').encode('utf-8') if not bcrypt.checkpw(password, result['password']): return api_error('invalid_username_password') don = get_db_don(result) session['session_id'] = result['session_id'] session['username'] = result['username'] session.permanent = True if data.get('remember') else False return jsonify({ 'status': 'ok', 'username': result['username'], 'display_name': result['display_name'], 'don': don })
def route_api_account_don(): data = request.get_json() if not schema.validate(data, schema.update_don): return abort(400) don_body_fill = data.get('body_fill', '').strip() don_face_fill = data.get('face_fill', '').strip() if len(don_body_fill) != 7 or\ not don_body_fill.startswith("#")\ or not is_hex(don_body_fill[1:])\ or len(don_face_fill) != 7\ or not don_face_fill.startswith("#")\ or not is_hex(don_face_fill[1:]): return api_error('invalid_don') db.users.update_one({'username': session.get('username')}, { '$set': { 'don_body_fill': don_body_fill, 'don_face_fill': don_face_fill, } }) return jsonify({ 'status': 'ok', 'don': { 'body_fill': don_body_fill, 'face_fill': don_face_fill } })
def route_api_account_password(): data = request.get_json() if not schema.validate(data, schema.update_password): return abort(400) user = db.users.find_one({'username': session.get('username')}) current_password = data.get('current_password', '').encode('utf-8') if not bcrypt.checkpw(current_password, user['password']): return api_error('current_password_invalid') new_password = data.get('new_password', '').encode('utf-8') if not 6 <= len(new_password) <= 5000: return api_error('invalid_new_password') salt = bcrypt.gensalt() hashed = bcrypt.hashpw(new_password, salt) session_id = os.urandom(24).hex() db.users.update_one( {'username': session.get('username')}, {'$set': { 'password': hashed, 'session_id': session_id }}) session['session_id'] = session_id return jsonify({'status': 'ok'})
def read_report(self, request, duplicate_error=True): """Read a Reporting object sent by the client. Will validate the object and remove extra fields which are not specified in the schema. """ obj = schema.filter_fields('Reporting', json.load(request.content)) if not schema.validate("Reporting", obj): raise core.SmapSchemaException("Invalid Reporting object (does not validate)", 400) if duplicate_error and self.reports.get_report(obj['uuid']): raise core.SmapException("Report instance already exists!", 400) return obj
def format_instance(instance): """Serialise `instance` For children to be visualised and modified, they must provide an appropriate implementation of __str__. Data that isn't JSON compatible cannot be visualised nor modified. Attributes: name (str): Name of instance niceName (str, optional): Nice name of instance family (str): Name of compatible family children (list, optional): Associated children data (dict, optional): Associated data publish (bool): Whether or not instance should be published Returns: Dictionary of JSON-compatible instance """ children = list() for child in instance: try: json.dumps(child) except: child = "Invalid" children.append(child) instance = { "name": instance.name, "id": instance.id, "children": children, "data": format_data(instance.data) } schema.validate(instance, "instance") return instance
def __setitem__(self, attr, value): if attr in self.FIELDS: dict.__setitem__(self, attr, value) if attr != 'uuid': if not schema.validate("Timeseries", self): raise SmapSchemaException("Invalid schema in " "Timeseries for " + attr) # time series start dirty so when we publish them the # first time we send all their metadata. self.dirty = True else: raise KeyError(attr + " can not be set on a Timeseries!")
def track(): provider = 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11' # todo: use identity produce( 'queue_track_1', provider, schema.validate( schema.queue_track_1, { 'provider': provider, 'time': datetime.datetime.utcnow().timestamp(), 'state': flask.request.json['state'], 'latitude': flask.request.json['latitude'], 'longitude': flask.request.json['longitude'] })) return 'OK'
def format_result(result): """Serialise Result""" instance = None error = None if result["instance"] is not None: instance = format_instance(result["instance"]) if result["error"] is not None: error = format_error(result["error"]) result = { "success": result["success"], "plugin": format_plugin(result["plugin"]), "instance": instance, "error": error, "records": format_records(result["records"]), "duration": result["duration"] } schema.validate(result, "result") return result
def route_api_account_remove(): data = request.get_json() if not schema.validate(data, schema.delete_account): return abort(400) user = db.users.find_one({'username': session.get('username')}) password = data.get('password', '').encode('utf-8') if not bcrypt.checkpw(password, user['password']): return api_error('verify_password_invalid') db.scores.delete_many({'username': session.get('username')}) db.users.delete_one({'username': session.get('username')}) session.clear() return jsonify({'status': 'ok'})
def route_api_account_display_name(): data = request.get_json() if not schema.validate(data, schema.update_display_name): return abort(400) display_name = data.get('display_name', '').strip() if not display_name: display_name = session.get('username') elif len(display_name) > 25: return api_error('invalid_display_name') db.users.update_one({'username': session.get('username')}, { '$set': {'display_name': display_name} }) return jsonify({'status': 'ok', 'display_name': display_name})
def __init__(self, path, inst=None, description=None, *args): """ :param string path: the path where the collection will be added :param SmapInstance inst: the containing :py:class:`SmapInstance` object :param string description: the contents of the sMAP description field :raise SmapSchemaException: if the resulting object does not validate """ self.inst = inst setattr(self, 'path', util.norm_path(path)) if len(args) == 1 and isinstance(args[0], dict): dict.__init__(self, args[0]) else: self.__setitem__("Contents", []) if not schema.validate("Collection", self): raise SmapSchemaException("Error instantiating Collection: " "invalid parameter")
def route_api_register(): data = request.get_json() if not schema.validate(data, schema.register): return abort(400) if session.get('username'): session.clear() username = data.get('username', '') if len(username) < 3 or len(username) > 20 or not re.match( '^[a-zA-Z0-9_]{3,20}$', username): return api_error('invalid_username') if db.users.find_one({'username_lower': username.lower()}): return api_error('username_in_use') password = data.get('password', '').encode('utf-8') if not 6 <= len(password) <= 5000: return api_error('invalid_password') salt = bcrypt.gensalt() hashed = bcrypt.hashpw(password, salt) don = get_default_don() session_id = os.urandom(24).hex() db.users.insert_one({ 'username': username, 'username_lower': username.lower(), 'password': hashed, 'display_name': username, 'don': don, 'user_level': 1, 'session_id': session_id }) session['session_id'] = session_id session['username'] = username session.permanent = True return jsonify({ 'status': 'ok', 'username': username, 'display_name': username, 'don': don })
def route_api_scores_save(): data = request.get_json() if not schema.validate(data, schema.scores_save): return abort(400) username = session.get('username') if data.get('is_import'): db.scores.delete_many({'username': username}) scores = data.get('scores', []) for score in scores: db.scores.update_one({'username': username, 'hash': score['hash']}, {'$set': { 'username': username, 'hash': score['hash'], 'score': score['score'] }}, upsert=True) return jsonify({'status': 'ok'})
def __init__(self, new_uuid, unit, data_type=DEFAULTS['Properties/ReadingType'], timezone=DEFAULTS['Properties/Timezone'], description=None, buffersz=DEFAULTS['BufferSize'], milliseconds=False): """ :param new_uuid: a :py:class:`uuid.UUID` :param string unit: the engineering units of this timeseries :param string data_type: the data type of the data. Options are ``long`` or ``double`` :param string timezone: a tzinfo-style timezone. :param string description: the value of sMAP Description field. :param int buffersz: how many readings to present when the timeseries is retrieved with a ``GET``. :param bool milliseconds: if True, then the stream publishes time in units of Unix milliseconds. Otherwise, normal unix timestamps are assumed """ if isinstance(new_uuid, dict): if not schema.validate('Timeseries', new_uuid): raise SmapSchemaException( "Initializing timeseries failed -- invalid object") dict.__init__(self, new_uuid) reading_init = new_uuid['Readings'] else: self.__setitem__("uuid", new_uuid) self.__setitem__( "Properties", { 'UnitofMeasure': unit, 'ReadingType': data_type, 'Timezone': timezone }) if description: self.__setitem__("Description", description) reading_init = [] self.dirty = True self.milliseconds = milliseconds self.__setitem__("Readings", util.FixedSizeList(buffersz, init=reading_init))
def __init__(self, new_uuid, unit, data_type=DEFAULTS['Properties/ReadingType'], timezone=DEFAULTS['Properties/Timezone'], description=None, buffersz=DEFAULTS['BufferSize'], milliseconds=False): """ :param new_uuid: a :py:class:`uuid.UUID` :param string unit: the engineering units of this timeseries :param string data_type: the data type of the data. Options are ``long`` or ``double`` :param string timezone: a tzinfo-style timezone. :param string description: the value of sMAP Description field. :param int buffersz: how many readings to present when the timeseries is retrieved with a ``GET``. :param bool milliseconds: if True, then the stream publishes time in units of Unix milliseconds. Otherwise, normal unix timestamps are assumed """ if isinstance(new_uuid, dict): if not schema.validate('Timeseries', new_uuid): raise SmapSchemaException("Initializing timeseries failed -- invalid object") dict.__init__(self, new_uuid) reading_init = new_uuid['Readings'] else: self.__setitem__("uuid", new_uuid) self.__setitem__("Properties", { 'UnitofMeasure' : unit, 'ReadingType' : data_type, 'Timezone' : timezone}) if description: self.__setitem__("Description", description) reading_init = [] self.dirty = True self.milliseconds = milliseconds self.__setitem__("Readings", util.FixedSizeList(buffersz, init=reading_init))
import os import prometheus_client import schema import stream consume = stream.consumer(__name__, os.environ['KAFKA_HOST'], os.environ['KAFKA_PORT']) execute = db.initialize(__name__, os.environ['POSTGRES_HOST'], os.environ['POSTGRES_PORT']) def init(): execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') execute('CREATE TABLE IF NOT EXISTS dispatch (time TIMESTAMP, dispatch UUID, consumer UUID, latitude SMALLINT, longitude SMALLINT, state SMALLINT);') def received(value): execute('IF NOT EXISTS (SELECT 1 FROM dispatch WHERE consumer = %s AND state = 0)' + ' INSERT INTO dispatch (time, dispatch, consumer, latitude, longitude, state) VALUES (%s, uuid_generate_v4(), %s, %s, %s, 0)' + ' END IF', (data['consumer'], data['time'], data['consumer'], data['latitude'], data['longitude'])) if __name__=='__main__': prometheus_client.start_http_server(port=4040) init() consume('queue_dispatch_1', lambda data: received(schema.validate(schema.queue_dispatch_1, data)))
def main(): mydir = os.path.dirname(os.path.realpath(__file__)) parser = argparse.ArgumentParser() parser.add_argument('--src', nargs='*', required=True) parser.add_argument('--action', required=True) parser.add_argument('--scope', nargs='*', required=False) parser.add_argument('--noScope', nargs='*', required=False) parser.add_argument('--yamlLoggingConf', help="Logging configuration as a yaml file", required=False) parser.add_argument('--workingFolder', help="Where to store working context", required=True) param = parser.parse_args() if param.yamlLoggingConf != None: loggingConfFile = param.yamlLoggingConf else: loggingConfFile = os.path.join(mydir, "conf/logging.yml") if not os.path.isfile(loggingConfFile): misc.ERROR("'{0}' is not a readable file!".format(loggingConfFile)) logging.config.dictConfig(yaml.load(open(loggingConfFile))) logger.debug("mydir:" + mydir) logger.debug("param.src:" + str(param.src)) workingFolder = param.workingFolder if not os.path.isdir(workingFolder): misc.ERROR("{0} must be an existing folder".format(workingFolder)) if len(os.listdir(workingFolder)) > 0: misc.ERROR("{0} must be an existing EMPTY folder".format(workingFolder)) # ----- We must make a first read of the file, with only the 'master' plugin to fetch plugins list and path masterPluginPath = os.path.abspath(os.path.join(mydir, ".")) context = Context(workingFolder) context.loadPlugin("master", [masterPluginPath]) handleSourceFiles(param.src, context, None) context.groom() # --------------------------------------------- included scope handling context.includedScopes = handleCliScopes(param.scope) if len(context.includedScopes) == 0 and INCLUDED_SCOPES in context.model[SRC]: context.includedScopes = set(context.model[SRC][INCLUDED_SCOPES]) if len(context.includedScopes) > 0: print("Scope limited to {0}".format(str(list(context.includedScopes)))) # -------------------------------------------- Excluded scope handling context.excludedScopes = handleCliScopes(param.noScope) if EXCLUDED_SCOPES in context.model[SRC]: context.excludedScopes = context.excludedScopes.union(context.model[SRC][EXCLUDED_SCOPES]) if len(context.excludedScopes) > 0: print("Scope excluded: {0}".format(str(list(context.excludedScopes)))) # Now, we must have the effective PLUGINS list and PLUGINS_PATHS in the context. We can load all plugins for plName in context.model[SRC][PLUGINS]: context.loadPlugin(plName, context.model[SRC][PLUGINS_PATHS]) # And reload source files, with now all plugins activated fileByVariable = {} if param.action == "dumpvars" else None handleSourceFiles(param.src, context, fileByVariable) if 'include' in context.model[SRC]: del(context.model[SRC]['include']) # Must remove, as not part of the schema # Now, build the schema for source validation, by merge of all schema plugin theSchema = context.getSchema() dump.dumpSchema(context, theSchema) #dump.dumpModel(context) # And validate against this schema schema.validate(context.model[SRC], theSchema) # And groom all plugins context.groom() dump.dumpModel(context) # Check scopes validity # NB: We perform this after grooming, even if grooming can rely on scope. Aims is only to detect scopes with typo. supportedScopes = context.getAllSupportedScopes() scopesToTest = context.excludedScopes.union(context.includedScopes) for scope in scopesToTest: if scope != "all" and scope != "none" and not context.checkScope(scope) and scope not in supportedScopes: # checkScope(): Scope for target file/folders (hosts and hostgroups) misc.ERROR("Scope '{0}' is not supported!".format(scope)) templator = Templator([os.path.join(mydir, './templates'), context.workingFolder], context.model) actions = context.getAllSupportedActions() logger.debug("Supported actions: {0}".format(actions)) action = param.action if action == "none": for action in actions: pluginExts = context.getPluginExtForAction(action) logger.debug("Action: {0} -> plugins: {1}".format(action, pluginExts)) context.buildTemplate(action, pluginExts) context.builRolesPath(action, pluginExts) context.generateAuxTemplates(action, pluginExts) templator.generate("{0}.yml.jj2".format(action), os.path.join(context.workingFolder, "{0}.yml".format(action))) elif action == "dumpvars": if SRC in context.model and VARS in context.model[SRC]: print("---") variables = context.model[SRC][VARS] for name in sorted(variables): x = yaml.dump(variables[name], default_flow_style=True, default_style=None, explicit_end=False) p = x.find("\n...\n") if p > 0: x = x[:-5] p = x.find("\n") if p > 0: x = x[:-1] print("{}: {} ({})".format(name, x, fileByVariable[name] if name in fileByVariable else "??")) print("---") #txt = yaml.dump(context.model[SRC][VARS], default_flow_style=False, default_style=None) return else: if not action in actions: misc.ERROR("Action {0} not supported. Current configuration only supports {1}".format(action, str(actions))) pluginExts = context.getPluginExtForAction(action) logger.debug("Action: {0} -> plugins: {1}".format(action, pluginExts)) context.buildTemplate(action, pluginExts) context.builRolesPath(action, pluginExts) context.generateAuxTemplates(action, pluginExts) templator.generate("{0}.yml.jj2".format(action), os.path.join(context.workingFolder, "{0}.yml".format(action))) templator.generate("inventory.jj2", os.path.join(context.workingFolder, "inventory")) templator.generate("ansible.cfg.jj2", os.path.join(context.workingFolder, "ansible.cfg")) misc.ensureFolder(os.path.join(context.workingFolder, "group_vars")) templator.generate("group_vars_all.jj2", os.path.join(context.workingFolder, "group_vars/all"))
def format_plugin(plugin): """Serialise `plugin` Attributes: name: Name of Python class id: Unique identifier version: Plug-in version category: Optional category requires: Plug-in requirements order: Plug-in order optional: Is the plug-in optional? doc: The plug-in documentation hasRepair: Can the plug-in perform a repair? hasCompatible: Does the plug-in have any compatible instances? type: Which baseclass does the plug-in stem from? E.g. Validator module: File in which plug-in was defined contextEnabled: Does it process the Context? instanceEnabled: Does it process Instance(s)? """ if len(plugin.__mro__) > 3: # In case of a SVEC plug-in. try: # The MRO is as follows: (-1)object, (-2)Plugin, (-3)Selector.. type = plugin.__mro__[-3].__name__ except IndexError: type = None log.critical("This is a bug") else: type = "Simple" module = plugin.__module__ if module == "__main__": # Support for in-memory plug-ins. path = "mem:%s" % plugin.__name__ else: try: path = os.path.abspath(sys.modules[module].__file__) except: path = "unknown" has_repair = False args = inspect.getargspec(plugin.repair).args if "context" in args or "instance" in args: has_repair = True # Legacy abilities if hasattr(plugin, "repair_context") or hasattr(plugin, "repair_instance"): has_repair = True output = { "label": plugin.label, "id": plugin.id, "version": plugin.version, "category": getattr(plugin, "category", None), "requires": plugin.requires, "order": plugin.order, "optional": plugin.optional, "hosts": plugin.hosts, "families": plugin.families, "doc": inspect.getdoc(plugin), "active": plugin.active, # Metadata "__pre11__": plugin.__pre11__, "__contextEnabled__": plugin.__contextEnabled__, "__instanceEnabled__": plugin.__instanceEnabled__, "path": path, "pre11": plugin.__pre11__, "contextEnabled": plugin.__contextEnabled__, "instanceEnabled": plugin.__instanceEnabled__, "name": plugin.__name__, "type": type, "module": module, "hasRepair": has_repair, "process": { "args": inspect.getargspec(plugin.process).args, }, "repair": { "args": inspect.getargspec(plugin.repair).args, }, "actions": [format_action(a) for a in plugin.actions], } schema.validate(output, "plugin") return output
import os import prometheus_client import que import schema _, pop = que.initialize(__name__, os.environ['REDIS_HOST'], os.environ['REDIS_PORT']) execute = db.initialize(__name__, os.environ['POSTGRES_HOST'], os.environ['POSTGRES_PORT']) def received(value): execute( 'IF EXISTS (SELECT 1 FROM dispatch WHERE dispatch = %s AND state = 0)' + ' INSERT INTO accept (time, dispatch, provider) VALUES (%s, %s, %s)' + ' UPDATE dispatch SET state = 1 WHERE dispatch = %s' ' END IF', (data['dispatch'], data['time'], data['dispatch'], data['provider'], data['dispatch'])) if __name__ == '__main__': prometheus_client.start_http_server(port=4040) pop('queue_accept_1', lambda data: received(schema.validate(schema.queue_accept_1, data)))
def format_plugin(plugin): """Serialise `plugin` Attributes: name: Name of Python class id: Unique identifier version: Plug-in version category: Optional category requires: Plug-in requirements order: Plug-in order optional: Is the plug-in optional? doc: The plug-in documentation hasRepair: Can the plug-in perform a repair? hasCompatible: Does the plug-in have any compatible instances? type: Which baseclass does the plug-in stem from? E.g. Validator module: File in which plug-in was defined contextEnabled: Does it process the Context? instanceEnabled: Does it process Instance(s)? """ type = "Other" for order, _type in {pyblish.plugin.CollectorOrder: "Collector", pyblish.plugin.ValidatorOrder: "Validator", pyblish.plugin.ExtractorOrder: "Extractor", pyblish.plugin.IntegratorOrder: "Integrator"}.items(): if pyblish.lib.inrange(plugin.order, base=order): type = _type module = plugin.__module__ if module == "__main__": # Support for in-memory plug-ins. path = "mem:%s" % plugin.__name__ else: try: path = os.path.abspath(sys.modules[module].__file__) except: path = "unknown" has_repair = False args = inspect.getargspec(plugin.repair).args if "context" in args or "instance" in args: has_repair = True # Legacy abilities if hasattr(plugin, "repair_context") or hasattr(plugin, "repair_instance"): has_repair = True output = { "label": plugin.label, "id": plugin.id, "version": plugin.version, "category": getattr(plugin, "category", None), "requires": plugin.requires, "order": plugin.order, "optional": plugin.optional, "hosts": plugin.hosts, "families": plugin.families, "doc": inspect.getdoc(plugin), "active": plugin.active, # Metadata "__pre11__": plugin.__pre11__, "__contextEnabled__": plugin.__contextEnabled__, "__instanceEnabled__": plugin.__instanceEnabled__, "path": path, "pre11": plugin.__pre11__, "contextEnabled": plugin.__contextEnabled__, "instanceEnabled": plugin.__instanceEnabled__, "name": plugin.__name__, "type": type, "module": module, "hasRepair": has_repair, "process": { "args": inspect.getargspec(plugin.process).args, }, "repair": { "args": inspect.getargspec(plugin.repair).args, }, "actions": [format_action(a) for a in plugin.actions], } if os.getenv("PYBLISH_SAFE"): schema.validate(output, "plugin") return output
def format_plugin(plugin): """Serialise `plugin` Attributes: name: Name of Python class id: Unique identifier version: Plug-in version category: Optional category requires: Plug-in requirements order: Plug-in order optional: Is the plug-in optional? doc: The plug-in documentation hasRepair: Can the plug-in perform a repair? hasCompatible: Does the plug-in have any compatible instances? type: Which baseclass does the plug-in stem from? E.g. Validator module: File in which plug-in was defined contextEnabled: Does it process the Context? instanceEnabled: Does it process Instance(s)? """ type = "Other" for order, _type in { pyblish.plugin.CollectorOrder: "Collector", pyblish.plugin.ValidatorOrder: "Validator", pyblish.plugin.ExtractorOrder: "Extractor", pyblish.plugin.IntegratorOrder: "Integrator" }.items(): if pyblish.lib.inrange(plugin.order, base=order): type = _type module = plugin.__module__ if module == "__main__": # Support for in-memory plug-ins. path = "mem:%s" % plugin.__name__ else: try: path = os.path.abspath(sys.modules[module].__file__) except: path = "unknown" has_repair = False args = inspect.getargspec(plugin.repair).args if "context" in args or "instance" in args: has_repair = True # Legacy abilities if hasattr(plugin, "repair_context") or hasattr(plugin, "repair_instance"): has_repair = True output = { "label": plugin.label, "id": plugin.id, "version": plugin.version, "category": getattr(plugin, "category", None), "requires": plugin.requires, "order": plugin.order, "optional": plugin.optional, "hosts": plugin.hosts, "families": plugin.families, "doc": inspect.getdoc(plugin), "active": plugin.active, # Metadata "__pre11__": plugin.__pre11__, "__contextEnabled__": plugin.__contextEnabled__, "__instanceEnabled__": plugin.__instanceEnabled__, "path": path, "pre11": plugin.__pre11__, "contextEnabled": plugin.__contextEnabled__, "instanceEnabled": plugin.__instanceEnabled__, "name": plugin.__name__, "type": type, "module": module, "hasRepair": has_repair, "process": { "args": inspect.getargspec(plugin.process).args, }, "repair": { "args": inspect.getargspec(plugin.repair).args, }, "actions": [format_action(a) for a in plugin.actions], } if os.getenv("PYBLISH_SAFE"): schema.validate(output, "plugin") return output
consume = kaf.consumer(name, instance, os.environ['KAFKA_HOST'], os.environ['KAFKA_PORT']) execute = cas.initialize(name, os.environ['CASSANDRA_HOST'], os.environ['CASSANDRA_PORT']) def received(data): values = (uuid.UUID(data['provider']), datetime.datetime.fromtimestamp(data['time']), data['state'], data['latitude'], data['longitude']) execute( 'INSERT INTO track_event (provider, time, state, latitude, longitude) VALUES (?, ?, ?, ?, ?)', values) execute( 'INSERT INTO track_current (provider, time, state, latitude, longitude) VALUES (?, ?, ?, ?, ?)', values) if __name__ == '__main__': execute( 'CREATE TABLE IF NOT EXISTS track_event (provider UUID, time TIMESTAMP, state TEXT, latitude INT, longitude INT, PRIMARY KEY (time, provider));' ) execute( 'CREATE TABLE IF NOT EXISTS track_current (provider UUID, time TIMESTAMP, state TEXT, latitude INT, longitude INT, PRIMARY KEY (provider));' ) prometheus_client.start_http_server(port=config['prometheus_port']) consume('queue_track_1', lambda data: received(schema.validate(schema.queue_track_1, data)))