def local_query_get(self, query): # # XXX How are we handling subqueries # fields = query.fields # XXX else tap into metadata cls = self.map_object[query.object] # Transform a Filter into a sqlalchemy expression _filters = get_sqla_filters(cls, query.filters) _fields = xgetattr(cls, query.fields) if query.fields else None res = db.query(*_fields) if _fields else db.query(cls) if query.filters: for _filter in _filters: res = res.filter(_filter) # Do we need to limit to the user's own results try: if self.user and cls.restrict_to_self and self.user[ 'email'] != ADMIN_USER: res = res.filter(cls.user_id == self.user['user_id']) except AttributeError: pass try: tuplelist = res.all() return tuplelist except SQLAlchemyError, e: Log.error("SQLAlchemyError trying to rollback db session: %s" % e) db.rollback() self.local_query_get(query) return list()
def callback_error(self, error): """ (Internal usage) See ManifoldGateway::receive_impl. Args: packet: A QUERY Packet. error: The corresponding error message. """ Log.error("Error during Manifold call: %r" % error) self.send(LastRecord())
def check_table_consistency(tables): """ Check whether a set of Tables are consistent or not. Param: tables: A container storing a set of Table instances. Raises: ValueError: if a Table is not well-formed """ for table_name, table in tables.items(): invalid_keys = table.get_invalid_keys() if invalid_keys: error_message = "In %s: in class %r: key(s) not found: %r" % ( filename, table_name, invalid_keys) Log.error(error_message) raise ValueError(error_message)
def start(self): """ \brief Start the daemon """ # Check whether daemon module is properly installed if self.check_python_daemon() == False: self.terminate() import daemon # Prepare Options().lock_file self.make_lock_file() # Prepare the daemon context dcontext = daemon.DaemonContext( detach_process = (not Options().no_daemon), working_directory = Options().working_directory, pidfile = Options().lock_file if not Options().no_daemon else None, stdin = sys.stdin, stdout = sys.stdout, stderr = sys.stderr, uid = Options().uid, gid = Options().gid, files_preserve = Log().files_to_keep ) # Prepare signal handling to stop properly if the daemon is killed # Note that signal.SIGKILL can't be handled: # http://crunchtools.com/unixlinux-signals-101/ dcontext.signal_map = { signal.SIGTERM : self.signal_handler, signal.SIGQUIT : self.signal_handler, signal.SIGINT : self.signal_handler } if Options().debugmode == True: self.main() else: with dcontext: self.make_pid_file() try: self.main() except Exception, why: Log.error("Unhandled exception in start: %s" % why)
def start(self): """ \brief Start the daemon """ # Check whether daemon module is properly installed if self.check_python_daemon() == False: self.terminate() import daemon # Prepare Options().lock_file self.make_lock_file() # Prepare the daemon context dcontext = daemon.DaemonContext( detach_process=(not Options().no_daemon), working_directory=Options().working_directory, pidfile=Options().lock_file if not Options().no_daemon else None, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, uid=Options().uid, gid=Options().gid, files_preserve=Log().files_to_keep) # Prepare signal handling to stop properly if the daemon is killed # Note that signal.SIGKILL can't be handled: # http://crunchtools.com/unixlinux-signals-101/ dcontext.signal_map = { signal.SIGTERM: self.signal_handler, signal.SIGQUIT: self.signal_handler, signal.SIGINT: self.signal_handler } if Options().debugmode == True: self.main() else: with dcontext: self.make_pid_file() try: self.main() except Exception, why: Log.error("Unhandled exception in start: %s" % why)
def run_query(router, query, execute=True): """ Forward a query to the router and dump the result to the standard outpur Params: router: The router instance related to TDMI query: The query instance send to the TDMI's router execute: Execute the Query on the TDMIGateway """ print "*" * 80 print query print "*" * 80 print "=" * 80 result_value = router.forward(query, execute=execute) if execute: if result_value["code"] == ResultValue.SUCCESS: for record in result_value["value"]: print_record(record) else: Log.error("Failed to run query:\n\n%s" % query)
def parse_dot_h(iterable, filename=None): """ Import information stored in a .h file (see manifold/metadata/*.h) Args: iterable: The file descriptor of a successfully opened file. You may also pass iter(string) if the content of the .h is stored in "string" filename: The corresponding filename. It is only used to print user friendly message, so you may pass None. Returns: A tuple made of two dictionnaries (tables, enums) tables: - key: String (the name of the class) - data: the corresponding Table instance enums: - key: String (the name of the enum) - data: the corresponding MetadataEnum instance Raises: ValueError: if the input data is not well-formed. """ # Parse file table_name = None cur_enum_name = None tables = {} enums = {} no_line = -1 for line in iterable: line = line.rstrip("\r\n") is_valid = True error_message = None no_line += 1 if REGEXP_EMPTY_LINE.match(line): continue if line[0] == '#': continue if table_name: # current scope = class # local const MyType my_field[]; /**< Comment */ m = REGEXP_CLASS_FIELD.match(line) if m: qualifiers = list() if m.group(2): qualifiers.append("local") if m.group(3): qualifiers.append("const") tables[table_name].insert_field( Field(qualifiers=qualifiers, type=m.group(4), name=m.group(5), is_array=(m.group(6) != None), description=m.group(7).lstrip("/*< ").rstrip("*/ "))) continue # KEY(my_field1, my_field2); m = REGEXP_CLASS_KEY.match(line) if m: key = m.group(1).split(',') key = [key_elt.strip() for key_elt in key] tables[table_name].insert_key(key) # XXX #if key not in tables[table_name].keys: # tables[table_name].keys.append(key) continue # CAPABILITY(my_field1, my_field2); m = REGEXP_CLASS_CAP.match(line) if m: capability = map(lambda x: x.strip(), m.group(1).split(',')) tables[table_name].set_capability(capability) continue # PARTITIONBY(clause_string); m = REGEXP_CLASS_CLAUSE.match(line) if m: clause_string = m.group(1) clause = Clause(clause_string) tables[table_name].partitions.append(clause) continue # }; if REGEXP_CLASS_END.match(line): cur_class = tables[table_name] if not cur_class.keys: # we must add a implicit key key_name = "%s_id" % table_name if key_name in cur_class.get_field_names(): Log.error( "Trying to add implicit key %s which is already in use" % key_name) Log.info("Adding implicit key %s in %s" % (key_name, table_name)) dummy_key_field = Field(["const"], "unsigned", key_name, False, "Dummy key") cur_class.insert_field(dummy_key_field) cur_class.insert_key(Key([dummy_key_field])) table_name = None continue # Invalid line is_valid = False error_message = "In '%s', line %r: in table '%s': invalid line: [%r] %s" % ( filename, no_line, table_name, line, ''.join( [PATTERN_BEGIN, PATTERN_CLASS_FIELD, PATTERN_END])) elif cur_enum_name: # current scope = enum # "my string value", m = REGEXP_ENUM_FIELD.match(line) if m: value = m.group(1) continue # }; if REGEXP_CLASS_END.match(line): cur_enum_name = None continue # Invalid line is_valid = False error_message = "In '%s', line %r: in enum '%s': invalid line: [%r]" % ( filename, no_line, cur_enum_name, line) else: # no current scope # class MyClass { m = REGEXP_CLASS_BEGIN.match(line) if m: qualifier = m.group(1) table_name = m.group(2) tables[table_name] = Table(None, None, table_name, None, Keys()) # qualifier ?? continue # enum MyEnum { m = REGEXP_ENUM_BEGIN.match(line) if m: cur_enum_name = m.group(1) enums[cur_enum_name] = MetadataEnum(cur_enum_name) continue # Invalid line is_valid = False error_message = "In '%s', line %r: class declaration expected: [%r]" if is_valid == False: if not error_message: error_message = "Invalid input file %s, line %r: [%r]" % ( filename, no_line, line) Log.error(error_message) raise ValueError(error_message) return (tables, enums)
def main(self): """ \brief Runs a XMLRPC server """ Log.info("XMLRPC server daemon (%s) started." % sys.argv[0]) # NOTE it is important to import those files only after daemonization, # since they open files we cannot easily preserve from twisted.web import xmlrpc, server # SSL support from OpenSSL import SSL from twisted.internet import ssl #, reactor #from twisted.internet.protocol import Factory, Protocol #from twisted.internet import reactor # This also imports manifold.util.reactor_thread that uses reactor from manifold.core.router import Router assert not (Options().platform and Options().gateway), "Both gateway and platform cannot be specified at commandline" # This imports twisted code so we need to import it locally from manifold.core.xmlrpc_api import XMLRPCAPI # This should be configurable allowed_capabilities = Capabilities() allowed_capabilities.selection = True allowed_capabilities.projection = True # XXX We should harmonize interfaces between Router and Forwarder if Options().platform: platforms = Storage.execute(Query().get('platform'), format='object') # We pass a single platform to Forwarder platform = [p for p in platforms if p.name == Options().platform][0] self.interface = Forwarder(platform, allowed_capabilities) elif Options().gateway: # XXX user # XXX Change Forwarded initializer #DEPRECATED| platform = Platform(u'dummy', Options().gateway, self.get_gateway_config(Options().gateway), 'user') platform = Platform( platform = u'dummy', gateway_type = Options().gateway, config = self.get_gateway_config(Options().gateway), auth_type = 'user' ) self.interface = Forwarder(platform, allowed_capabilities) else: self.interface = Router() try: def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() print errnum, errdepth return False else: print "Certs are fine", x509, x509.get_subject() return True ssl_path = Options().ssl_path if not ssl_path or not os.path.exists(ssl_path): print "" print "You need to generate SSL keys and certificate in '%s' to be able to run manifold" % ssl_path print "" print "mkdir -p /etc/manifold/keys" print "openssl genrsa 1024 > /etc/manifold/keys/server.key" print "chmod 400 /etc/manifold/keys/server.key" print "openssl req -new -x509 -nodes -sha1 -days 365 -key /etc/manifold/keys/server.key > /etc/manifold/keys/server.cert" print "" sys.exit(0) server_key_file = "%s/server.key" % ssl_path server_crt_file = "%s/server.cert" % ssl_path Log.tmp("key, cert=", server_key_file, server_crt_file) myContextFactory = ssl.DefaultOpenSSLContextFactory(server_key_file, server_crt_file) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER, # | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) # Since we have self-signed certs we have to explicitly # tell the server to trust them. #ctx.load_verify_locations("keys/ca.pem") trusted_roots_path = Options().trusted_roots_path if not trusted_roots_path or not os.path.exists(trusted_roots_path): Log.warning("No trusted root found in %s. You won't be able to login using SSL client certificates" % trusted_roots_path) ctx.load_verify_locations(None, ssl_path) #ReactorThread().listenTCP(Options().xmlrpc_port, server.Site(XMLRPCAPI(self.interface, allowNone=True))) ReactorThread().listenSSL(Options().xmlrpc_port, server.Site(XMLRPCAPI(self.interface, allowNone=True)), myContextFactory) ReactorThread().start_reactor() except Exception, e: # TODO If database gets disconnected, we can sleep/attempt reconnection Log.error("Error in XMLRPC API: %s" % str(e))
def _to_sql_where_elt(predicate): """ (Internal usage) Translate a Predicate in the corresponding SQL clause Args: predicate: A Predicate instance Returns: The String containing the corresponding SQL clause """ # NOTE : & | operator on list, tuple, set: if not make one # NOTE : in MyPLC we could have several modifiers on a field field, op_, value = predicate.get_tuple() op = None if isinstance(value, (list, tuple, set, frozenset)): # handling filters like '~slice_id':[] # this should return true, as it's the opposite of 'slice_id':[] which is false # prior to this fix, 'slice_id':[] would have returned ``slice_id IN (NULL) '' which is unknown # so it worked by coincidence, but the negation '~slice_ids':[] would return false too if not value or len(list(value)) == 0: if op_ in [and_, or_]: operator = eq value = "'{}'" else: field = "" operator = "" value = "FALSE" else: if isinstance(field, (list, tuple, set, frozenset)): and_clauses = [] for value_elt in value: value_elt = map(PostgreSQLGateway._to_sql_value, value_elt) predicate_list = [ "%s = %s" % (f, ve) for f, ve in izip(field, value_elt) ] and_clauses.append(" AND ".join(predicate_list)) field = "" op = "" value = " OR ".join(and_clauses) else: value = map(PostgreSQLGateway.quote, value) if op_ == and_: op = "@>" value = "ARRAY[%s]" % ", ".join(value) elif op == or_: op = "&&" value = "ARRAY[%s]" % ", ".join(value) else: op = "IN" value = "(%s)" % ", ".join(value) else: if value is None: op = "IS" value = "NULL" elif isinstance(value, StringTypes) and \ (value.find("*") > -1 or value.find("%") > -1): op = "LIKE" # insert *** in pattern instead of either * or % # we dont use % as requests are likely to %-expansion later on # actual replacement to % done in PostgreSQL.py value = value.replace("*", "***") value = value.replace("%", "***") value = str(PostgreSQLGateway.quote(value)) else: if op_ == eq: op = "=" elif op_ == lt: op = "<" elif op_ == gt: op = ">" elif op_ == le: op = "<=" elif op_ == ge: op = ">=" else: Log.error("_to_sql_where_elt: invalid operator: op_ = %s" % op_) if isinstance(value, StringTypes) and value[-2:] != "()": # This is a string value and we're not calling a pgsql function # having no parameter (for instance NOW()) value = str(PostgreSQLGateway.quote(value)) elif isinstance(value, datetime.datetime): value = str(PostgreSQLGateway.quote(str(value))) clause = "%s %s %s" % ("\"%s\"" % field if field else "", "%s" % op if op else "", value) if op_ == neg: clause = " ( NOT %s ) " % (clause) return clause