Beispiel #1
0
    def as_exposed(self):
        mb = self.instantiate(self)
        mb.endpoint = Globals.this_service.exposed_ip, mb.endpoint[1]
        mb.entity_info = ConfigurationGenerator().generated_entities_info.get_by_name(self.class_name)
        mb.is_exposed_mailbox = True

        return mb
Beispiel #2
0
 def __init__(self, context_name, entity_typename, remote_id):
     self.remote_id = remote_id
     self.initialized_flag = True
     self.context_name = context_name
     self.entity_typename = entity_typename
     self.entity_info = ConfigurationGenerator(
     ).generated_entities_info.get_by_name(entity_typename)
     self.lost_callbacks = []
     self.clients_connections: List[TCPClient] = []
Beispiel #3
0
    async def GetEntityData(self, entity_dbid: int32, entity_class: FString) -> FBytes:
        """
        Получение бинарных данных о сущности
        @param entity_dbid: ДБИД сущности
        @param entity_class: класс сущности
        @return: сериализованные байты (по классу)
        """
        properties = ConfigurationGenerator().generated_entities_info.get_by_name(entity_class).get_properties('base', only_persistent=True)

        props_templ = ", ".join([f'"{key}"' for key in properties.keys()])

        res = await self.driver.exec_raw(""" SELECT {0} 
                                             FROM "class_{1}"
                                             WHERE db_id={2}; """.format(props_templ, entity_class, entity_dbid))
        sr = BinarySerialization()
        for data in res:
            values = tuple(data.values())
            for index, (prop_name, prop_data) in enumerate(properties.items()):
                print(prop_name, prop_data)
                T = self.find_type(prop_data.typename, BaseEntity)
                value = values[index]
                value = T.pg_null(value)
                sr << T.serializable_value(value).serialize()
            return sr.get_archive()
Beispiel #4
0
    async def __ainit__(self,
                        context_name,
                        entity_typename,
                        endpoint_or_connection,
                        remote_id,
                        existent_endpoint_and_connection=None):
        INFO_MSG("")
        self.remote_id = remote_id
        self.client_connection: TCPClient

        if existent_endpoint_and_connection is None:
            self.client_connection, self.endpoint = await self.get_connection_and_endpoint_with_lost_callback(
                endpoint_or_connection)
        else:
            self.client_connection, self.endpoint = existent_endpoint_and_connection

        self.initialized_flag = True

        self.context_name = context_name
        self.entity_typename = entity_typename
        self.entity_info = ConfigurationGenerator(
        ).generated_entities_info.get_by_name(entity_typename)
        self.lost_callbacks = []
Beispiel #5
0
 def set_service_info(self, entity_typename):
     if ConfigurationGenerator().generated_entities_info.has_entity(
             entity_typename):
         self.entity_info = ConfigurationGenerator(
         ).generated_entities_info.get_by_name(entity_typename)
         self.context_name = AppConfig.context_by_name[entity_typename]
Beispiel #6
0
class MailboxBase(EntityProxy):
    initialized_flag = False

    async def get_connection_and_endpoint_with_lost_callback(
            self, endpoint_or_connection):
        INFO_MSG("endpoint_or_connection")
        if isinstance(endpoint_or_connection, TCPClient):
            endpoint = endpoint_or_connection.endpoint
            client_connection = endpoint_or_connection
            client_connection.add_lost_callback(self.on_connection_lost)
        else:
            endpoint = endpoint_or_connection
            client_connection = await self.service.create_client_connection(
                tuple(endpoint_or_connection), on_lost=self.on_connection_lost)
        return client_connection, endpoint

    def set_id(self, new_id):
        self.remote_id = new_id

    def __repr__(self):
        return "<Unknown mailbox>"

    def send_data(self, msg):
        raise NotImplementedError()

    def destroy(self):
        raise NotImplementedError()

    def set_service_info(self, entity_typename):
        if ConfigurationGenerator().generated_entities_info.has_entity(
                entity_typename):
            self.entity_info = ConfigurationGenerator(
            ).generated_entities_info.get_by_name(entity_typename)
            self.context_name = AppConfig.context_by_name[entity_typename]

    def on_connection_lost(self, connection):
        raise NotImplementedError()

    def send_method_call(self, method_index, future_id, *args, **kwargs):
        if not self:
            WARN_MSG("Call to invalid mailbox %s" % self, depth=1)
            return

        method_info = self.entity_info.get_method(self.context_name,
                                                  method_index)
        params, _ = method_info.signature
        name = method_info.name

        serialized_params = BinarySerialization()

        for param_index, (param_name, param_type) in enumerate(params):
            param = param_type.instantiate(args[param_index])
            serialized_params << param.serialize()

        serialized_call = BinarySerialization()
        serialized_call << self.remote_id
        serialized_call << Globals.generator_signature
        serialized_call << method_index
        serialized_call << future_id
        serialized_call << Globals.access_token
        serialized_call << serialized_params

        message = BinarySerialization()
        message << ConnectionMessageTypes.rmi_call
        message << serialized_call

        self.send_data(message.get_archive())

    def as_exposed(self):
        mb = self.instantiate(self)
        mb.endpoint = Globals.this_service.exposed_ip, mb.endpoint[1]
        mb.entity_info = self.entity_info
        mb.is_exposed_mailbox = True

        return mb

    async def async_method_caller(self, method_index, *args, **kwargs):
        if not self:            return \
   WARN_MSG("Awaiting call to invalid mailbox %s" % self, depth=1)

        future_data = FuturesManager().new(self, method_index)
        self.send_method_call(method_index, future_data['future_id'], *args)
        return await future_data['future']

    def method_caller(self, method_index, *args, **kwargs):
        if not self:            return \
   ERROR_MSG("Call to invalid mailbox %s" % self, depth=1)

        self.send_method_call(method_index, -1, *args)

    async def done_caller(self, future, method_index, retdata):
        # INFO_MSG("Future %i preparing to done" % method_index)
        # method = self.entity_class.get_method_by_id(method_index)

        method = self.entity_info.get_method(self.context_name, method_index)
        _, returns = method.signature

        serialized_rets = BinarySerialization(retdata)
        rets = tuple()
        for param_index, ret_type in enumerate(returns):
            try:
                ret = ret_type.deserialize(
                    serialized_rets.get_data()[param_index])
            except SerializationError:
                ERROR_MSG(
                    f"Failed to return result by method {method.name}, return value {param_index} invalid"
                )
                raise
            if isinstance(ret, AsyncObj):
                ret = await ret
            rets += ret,

        if len(rets) == 1:
            rets = rets[0]

        try:
            future.set_result(rets)
        except asyncio.InvalidStateError:
            ERROR_MSG(
                f"Failed to done future {future} (of method {method}), retdata: {retdata}"
            )

    def __getattr__(self, item):
        if self.initialized_flag:
            method = self.entity_info.find_method(self.context_name, item)
            if method:
                if method.is_async:
                    return partial(self.async_method_caller, method.id)
                else:
                    return partial(self.method_caller, method.id)

    def __eq__(self, other):
        if isinstance(other, Mailbox):
            return other.get_endpoint() == self.get_endpoint()
        return False

    def __ne__(self, other):
        if isinstance(other, Mailbox):
            return not self == other
        return False

    def __hash__(self):
        return tuple(
            self.endpoint).__hash__() if self.endpoint else ().__hash__()

    def get_id(self):
        return self.remote_id

    def get_class_name(self):
        return self.entity_info.entity_name

    def get_endpoint(self):
        return self.endpoint

    def get_context(self):
        return self.context_name
Beispiel #7
0
    def generate_config(self):
        """ Генерация специального конфига для контроля проекта
        """
        apps_mapping = AppConfig.context_by_name

        classes_info = OrderedDict()
        try:
            with open(ConfigurationGenerator().gen_info_filename, 'rb') as f:
                data = f.read().decode('utf-8')
                if data:
                    json_data = json.JSONDecoder(
                        object_pairs_hook=collections.OrderedDict).decode(data)
                    classes_info = json_data.get("entities", None)
                    types_info = json_data.get("types", None)
                    assert classes_info is not None
        except FileNotFoundError:
            pass

        workspace = str(Path(os.getcwd()).parent)

        for ContextName in apps_mapping.values():
            path = workspace + "/Entities/" + ContextName
            try:
                onlyfiles = [f for f in listdir(path) if isfile(join(path, f))]
            except FileNotFoundError:
                onlyfiles = []

            for filename in onlyfiles:
                ClassName = filename.replace(".py", "")
                module_path = workspace + "/Entities/" + ContextName + "/" + filename
                spec = importlib.util.spec_from_file_location(
                    ContextName + "." + ClassName, module_path)
                foo = importlib.util.module_from_spec(spec)
                spec.loader.exec_module(foo)
                cls = foo.__dict__.get(ClassName, None)
                if cls is not None:
                    if ClassName not in classes_info:
                        classes_info[ClassName] = dict()
                    # print(filename, ContextName, ClassName, Entity, dir(Entity))
                    entity_info = self.get_entity_info(cls, ContextName, False)
                    classes_info[ClassName].update(entity_info)

        for ClassName, ContextName in apps_mapping.items():
            if ClassName not in classes_info:
                classes_info[ClassName] = dict()
            filename = ClassName + ".py"
            module_path = workspace + "/Services/" + filename
            spec = importlib.util.spec_from_file_location(
                "module.name", module_path)
            foo = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(foo)
            cls: Entity = getattr(foo, ClassName, None)
            entity_info = self.get_entity_info(cls, ContextName, True)
            classes_info[ClassName].update(entity_info)

        for ContextName in apps_mapping.values():
            from Core.Service import Service
            if 'Service' not in classes_info:
                classes_info['Service'] = dict()
            entity_info = self.get_entity_info(Service, "base", True)
            entity_info["base"]['IsApplication'] = True
            entity_info["base"]['IsBasicServiceClass'] = True
            entity_info["base"]["Exposed"] = False
            classes_info['Service'].update(entity_info)

        structs = {
            name: t
            for name, t in TypeBase.all_types['types'].items()
            if getattr(t, 'is_struct_type', False)
        }
        types_info = dict()

        for typename, typedata in structs.items():
            doc_description, \
                (doc_warnings,), \
                (doc_cvars,) = get_decayed_docstring(typedata.__doc__,
                                                    simples=["warning"],
                                                    multiples=["cvar"])

            docstring_decay = {
                "Description": doc_description,
                "Vars": doc_cvars,
                "Warnings": doc_warnings,
            }

            types_info[typename] = {
                "Name":
                typename,
                "Kind":
                "Struct",
                "Fields": {
                    fieldname: {
                        "Name": field.__name__,
                        "Default": typedata.defaults.get(fieldname, None)
                    }
                    for fieldname, field in typedata.fields
                },
                "InputParams": [
                    field.__name__ + " In" + fieldname
                    for fieldname, field in typedata.fields
                ],
                "SetupParams":
                [(": " if i == 0 else ", ") + fieldname + "(%s)" %
                 ("In" + fieldname)
                 for i, (fieldname, field) in enumerate(typedata.fields)],
                "SetupDefaults":
                [(": " if i == 0 else ", ") + fieldname + "(%s)" %
                 (field.generator_default())
                 for i, (fieldname, field) in enumerate(typedata.fields)],
                "Docstring":
                None if not typedata.__doc__ else
                [s.strip() for s in typedata.__doc__.split('\n')],
                "Specs":
                [] + (["BlueprintType"] if BlueprintType in typedata.specifiers
                      else []) + (["Blueprintable"] if Blueprintable
                                  in typedata.specifiers else []),
                "BlueprintType":
                BlueprintType in typedata.specifiers,
                "Blueprintable":
                Blueprintable in typedata.specifiers,
                "IsLocal":
                Local in typedata.specifiers,
                "PgSpec":
                typedata.pg_spec,
                "DocstringDecay":
                docstring_decay,
                "Signature":
                typedata.get_full_type_signature(),
                "InspectInfo":
                getattr(typedata, "inspect_info", None),
                "GeneratedCode":
                getattr(typedata, "__gencode__", None)
            }

        enums = {
            name: t
            for name, t in TypeBase.all_types['types'].items()
            if getattr(t, 'is_enum_type', False)
        }

        # types_info = dict()
        for typename, typedata in enums.items():
            members = dict()
            for fieldname in typedata.get_members().keys():

                UMETA = list()
                field_data = typedata.__annotations__[fieldname]
                if field_data.hidden:
                    UMETA.append("Hidden")
                if field_data.display_name:
                    UMETA.append("DisplayName = \"%s\"" %
                                 field_data.display_name)
                if field_data.tooltip:
                    UMETA.append("ToolTip = \"%s\"" % field_data.tooltip)

                members[fieldname] = {
                    'Value': typedata.values_specified.get(fieldname, None),
                    'UMETA': UMETA,
                }

            doc_description, \
                (doc_warnings,), \
                (doc_cvars,) = get_decayed_docstring(typedata.__doc__,
                                                     simples=["warning"],
                                                     multiples=["cvar"])

            docstring_decay = {
                "Description": doc_description,
                "Vars": doc_cvars,
                "Warnings": doc_warnings,
            }

            types_info[typename] = {
                "Name":
                typename,
                "Kind":
                "Enum",
                "Members":
                members,
                "Docstring":
                None if not typedata.__doc__ else
                [s.strip() for s in typedata.__doc__.split('\n')],
                "Specs":
                [] + (["BlueprintType"] if BlueprintType in typedata.specifiers
                      else []) + (["Blueprintable"] if Blueprintable
                                  in typedata.specifiers else []),
                "BlueprintType":
                BlueprintType in typedata.specifiers,
                "Blueprintable":
                Blueprintable in typedata.specifiers,
                "IsLocal":
                Local in typedata.specifiers,
                "PgSpec":
                typedata.pg_spec,
                "DocstringDecay":
                docstring_decay,
                "InspectInfo":
                getattr(typedata, "inspect_info", None),
                "GeneratedCode":
                getattr(typedata, "GeneratedCode", None)
            }

        local_types = {
            name: t
            for name, t in TypeBase.all_types['types'].items()
            if getattr(t, 'is_local_datatype', False)
        }
        for typename, typedata in local_types.items():
            types_info[typename] = {
                "Name":
                typename,
                "Kind":
                "Local",
                "Docstring":
                None if not typedata.__doc__ else
                [s.strip() for s in typedata.__doc__.split('\n')],
                "PgSpec":
                typedata.pg_spec,
                "InspectInfo":
                getattr(typedata, "inspect_info", None),
            }

        storage_info = dict()

        for st in storage_list:
            storage_info[st.name] = {
                "Name": st.name,
                "Type": st.type.__name__,
                "AvailableEverywhere": AvailableEverywhere in st.specifiers,
            }

        with open(ConfigurationGenerator().gen_info_filename, 'wb') as f:
            import hashlib
            import base64

            d = to_json({
                'entities': classes_info,
                'types': types_info,
                'storage': storage_info
            }).encode()
            if ConfigGlobals.UseVersionGeneratorSignature:
                ver = ConfigGlobals.Version.encode()
                self.generator_signature = base64.encodebytes(
                    hashlib.md5(ver).digest()).decode().replace(
                        "\n", " ").replace("\r", " ").replace("\\", "/")
            else:
                self.generator_signature = base64.encodebytes(
                    hashlib.md5(d).digest()).decode().replace(
                        "\n", " ").replace("\r", " ").replace("\\", "/")

            d = to_json(
                {
                    'entities': classes_info,
                    'types': types_info,
                    'storage': storage_info,
                    'signature': self.generator_signature
                },
                indent=4,
                ensure_ascii=False).encode('utf8')
            f.write(d)

        self.classes_info = classes_info
        self.types_info = types_info
        self.storage_info = storage_info
Beispiel #8
0
from Execptions import *

from Core.Globals import Globals

Globals.IsInGenerator = True

code_gen = CodeGen()

Globals.IsInGenerator = False

Globals.HaloNet_imported = True

from DBApp import DBApp
from Core.ConfigSystem.GeneratorConfig import ConfigurationGenerator

ConfigurationGenerator().load_generated_info()

from Core.LocalDatatypes import *
from Core.Logging import INFO_MSG, ERROR_MSG
from Core.Service import Service
from Core.Utils import runnable, is_valid


@runnable
class Gen(Service):
    async def start(self):
        self.db, _ = await self.async_wakeup_service_locally_by_name("DBApp")
        if is_valid(self.db):
            if CommandLine.get_arguments().clearall:
                await self.db.ClearDatabase()
                return
Beispiel #9
0
    async def CreateClassTable(self, class_name: FString, fields: TMap[FString, FString]):
        """
        Создать таблицу класса
        @param class_name: имя класса
        @param fields: поля {имя: тип}
        """
        INFO_MSG(f"Create class table {class_name}, {fields}")
        # await self.driver.exec_raw(""" DROP TABLE IF EXISTS public.Class_{0}; """.format(class_name))

        fields_substitute = str()
        for field_name, field_typedata in fields.items():
            T = self.find_type(field_typedata)
            pg_spec = T.pg_spec if T else 'INTEGER'
            default = ConfigurationGenerator().generated_entities_info.get_by_name(class_name).get_property('base', field_name).default

            fields_substitute += ', "%s" %s DEFAULT %s' % (field_name, pg_spec, pg_str(default))

        self.username = None

        current_data = await self.driver.exec_raw("""  SELECT class_data 
                                                       FROM public.classes 
                                                       WHERE class_name='{0}';
                                                  """.format(class_name))
        for c in current_data:
            r = c['class_data']
            deleted_columns = list()
            new_columns = list()
            changed_columns = list()
            alter_strings = list()
            for column_name, column_type in fields.items():
                T = self.find_type(column_type)
                pg_spec = T.pg_spec if T else 'INTEGER'
                default = ConfigurationGenerator().generated_entities_info.get_by_name(class_name).get_property('base', column_name).default
                default = pg_str(default)

                if column_name not in r:
                    new_columns.append((column_name, column_type))
                    alter_strings.append("ADD COLUMN {0} {1} DEFAULT {2}".format(column_name, pg_spec, default))
                elif column_name in r and r[column_name] != column_type:
                    changed_columns.append((column_name, r[column_name], column_type))
                    alter_strings.append("ALTER COLUMN {0} TYPE {1}, ALTER COLUMN {0} SET DEFAULT {2}".format(column_name, pg_spec, default))

            for column_name, column_type in r.items():
                if column_name not in fields:
                    alter_strings.append("DROP COLUMN {0}".format(column_name))
                    deleted_columns.append(column_name)

            if deleted_columns or changed_columns or new_columns:
                try:
                    await self.driver.exec_raw("""  ALTER TABLE IF EXISTS "class_{0}" {1};
                                               """.format(class_name,
                                                          ", ".join(alter_strings)))
                except Exception as e:
                    ERROR_MSG("An exception occurred, returning...", e)
                    return

                if deleted_columns:
                    WARN_MSG("Deleted columns in %s %i: [%s]" % (class_name, len(deleted_columns), ", ".join(deleted_columns)))

                if changed_columns:
                    WARN_MSG("Changed columns in %s %i: [%s]" % (class_name, len(changed_columns), ", ".join(["%s from %s to %s" % c for c in changed_columns])))

                if new_columns:
                    INFO_MSG("New columns in %s %i: [%s]" % (class_name, len(new_columns), ", ".join("%s %s" % c for c in new_columns)))
        INFO_MSG(class_name, fields, fields_substitute)
        await self.driver.exec_raw("""  INSERT INTO public.classes (class_name, class_data) VALUES ('{0}', '{1}')
                                        ON CONFLICT (class_name) DO
                                        UPDATE SET class_data = '{1}';
                                   """.format(class_name, json.dumps(fields)))

        await self.driver.exec_raw("""  CREATE TABLE IF NOT EXISTS "class_{0}"
                                        (
                                            rec_id SERIAL PRIMARY KEY NOT NULL,
                                            db_id SERIAL {1}
                                        );
                                        CREATE UNIQUE INDEX IF NOT EXISTS "class_{0}_rec_id_uindex" ON "class_{0}" (rec_id);
                                   """.format(class_name, fields_substitute))

        INFO_MSG(fields)