Esempio n. 1
0
def get_base(
    types_dict: TypesDict,
    tmp_dir: str,
    scene_object_types: Set[str],
    obj_type: ObjectType,
    zf: zipfile.ZipFile,
    ot_path: str,
) -> None:

    base = base_from_source(obj_type.source, obj_type.id)

    if base is None:
        raise Arcor2Exception(
            f"Could not determine base class for {obj_type.id}.")

    if base in types_dict.keys() | built_in_types_names() | scene_object_types:
        return

    logger.debug(f"Getting {base} as base of {obj_type.id}.")
    base_obj_type = ps.get_object_type(base)

    zf.writestr(
        os.path.join(ot_path, humps.depascalize(base_obj_type.id)) + ".py",
        base_obj_type.source)

    types_dict[base_obj_type.id] = save_and_import_type_def(
        base_obj_type.source, base_obj_type.id, Generic, tmp_dir,
        OBJECT_TYPE_MODULE)

    # try to get base of the base
    get_base(types_dict, tmp_dir, scene_object_types, base_obj_type, zf,
             ot_path)
Esempio n. 2
0
def get_base_from_project_service(
    types_dict: TypesDict,
    tmp_dir: str,
    scene_object_types: set[str],
    obj_type: ObjectType,
    zf: zipfile.ZipFile,
    ot_path: str,
    ast: ast.AST,
) -> None:

    for idx, base in enumerate(base_from_source(ast, obj_type.id)):

        if base in types_dict.keys() | built_in_types_names(
        ) | scene_object_types:
            continue

        logger.debug(f"Getting {base} as base of {obj_type.id}.")
        base_obj_type = ps.get_object_type(base)

        # first try if the code is valid
        try:
            base_ast = parse(base_obj_type.source)
        except Arcor2Exception:
            raise FlaskException(
                f"Invalid code of the {base_obj_type.id} (base of {obj_type.id}).",
                error_code=401)

        # try to get base of the base
        get_base_from_project_service(types_dict, tmp_dir, scene_object_types,
                                      base_obj_type, zf, ot_path, base_ast)

        if idx == 0:  # this is the base ObjectType
            types_dict[base_obj_type.id] = save_and_import_type_def(
                base_obj_type.source, base_obj_type.id, Generic, tmp_dir,
                OBJECT_TYPE_MODULE)
        else:  # these are potential mixins (just try to import them, no need to store them)
            save_and_import_type_def(base_obj_type.source, base_obj_type.id,
                                     object, tmp_dir, OBJECT_TYPE_MODULE)
            scene_object_types.add(base_obj_type.id)

        zf.writestr(
            os.path.join(ot_path, humps.depascalize(base_obj_type.id)) + ".py",
            base_obj_type.source)
Esempio n. 3
0
def get_base_from_imported_package(obj_type: ObjectType,
                                   types_dict: dict[str, ObjectType],
                                   zip_file: zipfile.ZipFile, tmp_dir: str,
                                   ast: ast.AST) -> None:

    for idx, base in enumerate(base_from_source(ast, obj_type.id)):

        if base in types_dict.keys() | built_in_types_names():
            continue

        logger.debug(f"Getting {base} as base of {obj_type.id}.")

        try:
            base_obj_type_src = read_str_from_zip(
                zip_file, f"object_types/{humps.depascalize(base)}.py")
        except KeyError:
            raise FlaskException(
                f"Could not find {base} object type (base of {obj_type.id}).",
                error_code=401)

        # first try if the code is valid
        try:
            base_ast = parse(base_obj_type_src)
        except Arcor2Exception:
            raise FlaskException(
                f"Invalid code of the {base} (base of {obj_type.id}).",
                error_code=401)

        types_dict[base] = ObjectType(base, base_obj_type_src)

        # try to get base of the base
        get_base_from_imported_package(types_dict[base], types_dict, zip_file,
                                       tmp_dir, base_ast)

        # then, try to import it (no need to store the result)
        if idx == 0:  # this is the base ObjectType
            save_and_import_type_def(base_obj_type_src, base, Generic, tmp_dir,
                                     OBJECT_TYPE_MODULE)
        else:  # these are potential mixins
            save_and_import_type_def(base_obj_type_src, base, object, tmp_dir,
                                     OBJECT_TYPE_MODULE)
Esempio n. 4
0
def new_object_type(parent: ObjectTypeMeta, child: ObjectTypeMeta) -> AST:

    assert parent.type == child.base

    tree = Module(body=[], type_ignores=[])

    if parent.type in built_in_types_names():
        import_from = arcor2.object_types.abstract.__name__
    else:
        import_from = f".{humps.depascalize(parent.type)}"

    tree.body.append(
        ImportFrom(module=import_from,
                   names=[alias(name=parent.type, asname=None)],
                   level=0))

    c = ClassDef(
        name=child.type,
        bases=[get_name(parent.type)],
        keywords=[],
        body=[
            Assign(
                targets=[Name(id="_ABSTRACT", ctx=Store())],
                value=NameConstant(value=False, kind=None),
                type_comment=None,
            )
        ],
        decorator_list=[],
    )

    # TODO add docstring with description (if provided)
    c.body.append(Pass())

    tree.body.append(c)

    return tree
Esempio n. 5
0
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None:

    logger.debug(f"Processing {obj_id}.")

    if obj_id in object_types:
        logger.debug(f"{obj_id} already processed, skipping...")
        return

    obj_iddesc = await storage.get_object_type_iddesc(obj_id)

    if obj_id in glob.OBJECT_TYPES:

        assert obj_iddesc.modified
        assert glob.OBJECT_TYPES[
            obj_id].meta.modified, f"Object {obj_id} does not have 'modified' in its meta."

        if obj_iddesc.modified == glob.OBJECT_TYPES[obj_id].meta.modified:
            logger.debug(f"No need to update {obj_id}.")
            return

    obj = await storage.get_object_type(obj_id)

    try:
        bases = otu.base_from_source(obj.source, obj_id)

        if not bases:
            logger.debug(
                f"{obj_id} is definitely not an ObjectType (subclass of {object.__name__}), maybe mixin?"
            )
            return

        if bases[0] not in object_types.keys() | built_in_types_names():
            logger.debug(f"Getting base class {bases[0]} for {obj_id}.")
            await get_object_data(object_types, bases[0])

        for mixin in bases[1:]:
            mixin_obj = await storage.get_object_type(mixin)

            await hlp.run_in_executor(
                hlp.save_and_import_type_def,
                mixin_obj.source,
                mixin_obj.id,
                object,
                settings.OBJECT_TYPE_PATH,
                settings.OBJECT_TYPE_MODULE,
            )

    except Arcor2Exception as e:
        logger.error(
            f"Disabling ObjectType {obj.id}: can't get a base. {str(e)}")
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id,
                           "ObjectType disabled.",
                           disabled=True,
                           problem="Can't get base.",
                           modified=obj.modified))
        return

    logger.debug(f"Updating {obj_id}.")

    try:
        type_def = await hlp.run_in_executor(
            hlp.save_and_import_type_def,
            obj.source,
            obj.id,
            Generic,
            settings.OBJECT_TYPE_PATH,
            settings.OBJECT_TYPE_MODULE,
        )
    except Arcor2Exception as e:
        logger.debug(f"{obj.id} is probably not an ObjectType. {str(e)}")
        return

    assert issubclass(type_def, Generic)

    try:
        meta = meta_from_def(type_def)
    except Arcor2Exception as e:
        logger.error(f"Disabling ObjectType {obj.id}.")
        logger.debug(e, exc_info=True)
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id,
                           "ObjectType disabled.",
                           disabled=True,
                           problem=str(e),
                           modified=obj.modified))
        return

    meta.modified = obj.modified

    if obj.model:
        try:
            model = await storage.get_model(obj.model.id, obj.model.type)
        except Arcor2Exception as e:
            logger.error(
                f"{obj.model.id}: failed to get collision model of type {obj.model.type}. {str(e)}"
            )
            meta.disabled = True
            meta.problem = "Can't get collision model."
            object_types[obj_id] = ObjectTypeData(meta)
            return

        if isinstance(model,
                      Mesh) and model.data_id not in await storage.files_ids():
            logger.error(
                f"Disabling {meta.type} as its mesh file {model.data_id} does not exist."
            )
            meta.disabled = True
            meta.problem = "Mesh file does not exist."
            object_types[obj_id] = ObjectTypeData(meta)
            return

        kwargs = {model.type().value.lower(): model}
        meta.object_model = ObjectModel(model.type(), **kwargs)  # type: ignore

    ast = parse(obj.source)
    otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast)

    object_types[obj_id] = otd
Esempio n. 6
0
async def get_object_types() -> UpdatedObjectTypes:
    """Serves to initialize or update knowledge about awailable ObjectTypes.

    :return:
    """

    initialization = False

    # initialize with built-in types, this has to be done just once
    if not glob.OBJECT_TYPES:
        logger.debug("Initialization of ObjectTypes.")
        initialization = True
        await hlp.run_in_executor(prepare_object_types_dir,
                                  settings.OBJECT_TYPE_PATH,
                                  settings.OBJECT_TYPE_MODULE)
        glob.OBJECT_TYPES.update(built_in_types_data())

    updated_object_types: ObjectTypeDict = {}

    object_type_ids: Union[set[str],
                           list[str]] = await storage.get_object_type_ids()

    if __debug__:  # this should uncover potential problems with order in which ObjectTypes are processed
        import random

        object_type_ids = list(object_type_ids)
        random.shuffle(object_type_ids)

    for obj_id in object_type_ids:
        await get_object_data(updated_object_types, obj_id)

    removed_object_ids = {
        obj
        for obj in glob.OBJECT_TYPES.keys() if obj not in object_type_ids
    } - built_in_types_names()
    updated_object_ids = {
        k
        for k in updated_object_types.keys() if k in glob.OBJECT_TYPES
    }
    new_object_ids = {
        k
        for k in updated_object_types.keys() if k not in glob.OBJECT_TYPES
    }

    logger.debug(f"Removed ids: {removed_object_ids}")
    logger.debug(f"Updated ids: {updated_object_ids}")
    logger.debug(f"New ids: {new_object_ids}")

    if not initialization and removed_object_ids:

        # TODO remove it from sys.modules

        remove_evt = ChangedObjectTypes([
            v.meta for k, v in glob.OBJECT_TYPES.items()
            if k in removed_object_ids
        ])
        remove_evt.change_type = Event.Type.REMOVE
        asyncio.ensure_future(notif.broadcast_event(remove_evt))

        for removed in removed_object_ids:
            assert removed not in built_in_types_names(
            ), "Attempt to remove built-in type."
            del glob.OBJECT_TYPES[removed]
            await hlp.run_in_executor(remove_object_type, removed)

    glob.OBJECT_TYPES.update(updated_object_types)

    logger.debug(f"All known ids: {glob.OBJECT_TYPES.keys()}")

    for obj_type in updated_object_types.values():

        # if description is missing, try to get it from ancestor(s)
        if not obj_type.meta.description:

            try:
                obj_type.meta.description = obj_description_from_base(
                    glob.OBJECT_TYPES, obj_type.meta)
            except otu.DataError as e:
                logger.error(
                    f"Failed to get info from base for {obj_type}, error: '{e}'."
                )

        if not obj_type.meta.disabled and not obj_type.meta.built_in:
            add_ancestor_actions(obj_type.meta.type, glob.OBJECT_TYPES)

    if not initialization:

        if updated_object_ids:
            update_evt = ChangedObjectTypes([
                v.meta for k, v in glob.OBJECT_TYPES.items()
                if k in updated_object_ids
            ])
            update_evt.change_type = Event.Type.UPDATE
            asyncio.ensure_future(notif.broadcast_event(update_evt))

        if new_object_ids:
            add_evt = ChangedObjectTypes([
                v.meta for k, v in glob.OBJECT_TYPES.items()
                if k in new_object_ids
            ])
            add_evt.change_type = Event.Type.ADD
            asyncio.ensure_future(notif.broadcast_event(add_evt))

    for obj_type in updated_object_types.values():

        if obj_type.type_def and issubclass(
                obj_type.type_def, Robot) and not obj_type.type_def.abstract():
            await get_robot_meta(obj_type)

    # if object does not change but its base has changed, it has to be reloaded
    for obj_id, obj in glob.OBJECT_TYPES.items():

        if obj_id in updated_object_ids:
            continue

        if obj.type_def and obj.meta.base in updated_object_ids:

            logger.debug(
                f"Re-importing {obj.meta.type} because its base {obj.meta.base} type has changed."
            )
            obj.type_def = await hlp.run_in_executor(
                hlp.import_type_def,
                obj.meta.type,
                Generic,
                settings.OBJECT_TYPE_PATH,
                settings.OBJECT_TYPE_MODULE,
            )

    return UpdatedObjectTypes(new_object_ids, updated_object_ids,
                              removed_object_ids)
Esempio n. 7
0
def project_import() -> RespT:
    """Imports a project from execution package.
    ---
    put:
      description: Imports a project from execution package.
      parameters:
            - in: query
              name: overwriteScene
              schema:
                type: boolean
                default: false
            - in: query
              name: overwriteProject
              schema:
                type: boolean
                default: false
            - in: query
              name: overwriteObjectTypes
              schema:
                type: boolean
                default: false
            - in: query
              name: overwriteProjectSources
              schema:
                type: boolean
                default: false
            - in: query
              name: overwriteCollisionModels
              schema:
                type: boolean
                default: false
      requestBody:
              content:
                multipart/form-data:
                  schema:
                    type: object
                    required:
                        - executionPackage
                    properties:
                      executionPackage:
                        type: string
                        format: binary
      responses:
        200:
          description: Ok
          content:
                application/json:
                  schema:
                    $ref: ImportResult
        400:
          description: Some other error occurred.
          content:
                application/json:
                  schema:
                    type: string
        401:
          description: Invalid execution package.
          content:
                application/json:
                  schema:
                    type: string
        402:
          description: A difference between package/project service detected (overwrite needed).
          content:
                application/json:
                  schema:
                    type: string
        404:
          description: Something is missing.
          content:
                application/json:
                  schema:
                    type: string
    """

    file = request.files["executionPackage"]

    overwrite_scene = request.args.get("overwriteScene",
                                       default="false") == "true"
    overwrite_project = request.args.get("overwriteProject",
                                         default="false") == "true"
    overwrite_object_types = request.args.get("overwriteObjectTypes",
                                              default="false") == "true"
    overwrite_project_sources = request.args.get("overwriteProjectSources",
                                                 default="false") == "true"
    overwrite_collision_models = request.args.get("overwriteCollisionModels",
                                                  default="false") == "true"

    objects: Dict[str, ObjectType] = {}
    models: Dict[str, Models] = {}
    """
    1) get and validate all data from zip
    2) check what is already on the Project service
    3) do updates
    """
    # BytesIO + stream.read() = workaround for a Python bug (SpooledTemporaryFile not seekable)
    with zipfile.ZipFile(BytesIO(file.stream.read())) as zip_file:

        try:
            project = read_dc_from_zip(zip_file, "data/project.json", Project)
        except KeyError:
            raise FlaskException("Could not find project.json.",
                                 error_code=404)
        except (JSONDecodeError, ValidationError) as e:
            raise FlaskException(f"Failed to process project.json. {str(e)}",
                                 error_code=401)

        try:
            scene = read_dc_from_zip(zip_file, "data/scene.json", Scene)
        except KeyError:
            raise FlaskException("Could not find scene.json.", error_code=404)
        except (JSONDecodeError, ValidationError) as e:
            return json.dumps(f"Failed to process scene.json. {str(e)}"), 401

        if project.scene_id != scene.id:
            raise FlaskException("Project assigned to different scene id.",
                                 error_code=401)

        for scene_obj in scene.objects:

            obj_type_name = scene_obj.type

            if obj_type_name in objects:
                continue

            try:
                obj_type_src = read_str_from_zip(
                    zip_file,
                    f"object_types/{humps.depascalize(obj_type_name)}.py")
            except KeyError:
                raise FlaskException(
                    f"Object type {obj_type_name} is missing in the package.",
                    error_code=404)

            try:
                parse(obj_type_src)
            except Arcor2Exception:
                raise FlaskException(
                    f"Invalid code of the {obj_type_name} object type.",
                    error_code=401)

            # TODO description (is it used somewhere?)
            objects[obj_type_name] = ObjectType(obj_type_name, obj_type_src)

            logger.debug(f"Just imported {obj_type_name}.")

            while True:
                base = base_from_source(obj_type_src, obj_type_name)

                if not base:
                    return json.dumps(
                        f"Could not determine base class for {scene_obj.type}."
                    ), 401

                if base in objects.keys() | built_in_types_names():
                    break

                logger.debug(f"Importing {base} as a base of {obj_type_name}.")

                try:
                    base_obj_type_src = read_str_from_zip(
                        zip_file, f"object_types/{humps.depascalize(base)}.py")
                except KeyError:
                    return json.dumps(
                        f"Could not find {base} object type (base of {obj_type_name})."
                    ), 404

                try:
                    parse(base_obj_type_src)
                except Arcor2Exception:
                    return json.dumps(
                        f"Invalid code of the {base} object type (base of {obj_type_name})."
                    ), 401

                objects[base] = ObjectType(base, base_obj_type_src)

                obj_type_name = base
                obj_type_src = base_obj_type_src

        for obj_type in objects.values():  # handle models

            try:
                model = read_dc_from_zip(
                    zip_file,
                    f"data/models/{humps.depascalize(obj_type.id)}.json",
                    ObjectModel).model()
            except KeyError:
                continue

            logger.debug(f"Found model {model.id} of type {model.type}.")

            obj_type.model = model.metamodel()

            if obj_type.id != obj_type.model.id:
                raise FlaskException(
                    f"Model id ({obj_type.model.id}) has to be the same as ObjectType id ({obj_type.id}).",
                    error_code=401,
                )

            models[obj_type.id] = model

        if not project.has_logic:
            logger.debug("Importing the main script.")

            try:
                script = zip_file.read("script.py").decode("UTF-8")
            except KeyError:
                raise FlaskException("Could not find script.py.",
                                     error_code=404)

            try:
                parse(script)
            except Arcor2Exception:
                raise FlaskException("Invalid code of the main script.",
                                     error_code=401)

    # check that we are not going to overwrite something
    if not overwrite_scene:

        try:
            ps_scene = ps.get_scene(scene.id)
        except ps.ProjectServiceException:
            pass
        else:

            ps_scene.modified = scene.modified  # modified is updated with each PUT

            if ps_scene != scene:
                raise FlaskException(
                    "Scene difference detected. Overwrite needed.",
                    error_code=402)

    if not overwrite_project:

        try:
            ps_project = ps.get_project(project.id)
        except ps.ProjectServiceException:
            pass
        else:

            ps_project.modified = project.modified

            if ps_project != project:
                raise FlaskException(
                    "Project difference detected. Overwrite needed.",
                    error_code=402)

    if not overwrite_object_types:

        for obj_type in objects.values():

            try:
                if ps.get_object_type(obj_type.id) != obj_type:
                    raise FlaskException(
                        f"Difference detected for {obj_type.id} object type. Overwrite needed.",
                        error_code=402)
            except ps.ProjectServiceException:
                pass

    if not overwrite_project_sources and not project.has_logic:

        try:
            if ps.get_project_sources(project.id).script != script:
                raise FlaskException(
                    "Script difference detected. Overwrite needed.",
                    error_code=402)
        except ps.ProjectServiceException:
            pass

    if not overwrite_collision_models:

        for model in models.values():
            try:
                if model != ps.get_model(model.id, model.type()):
                    raise FlaskException(
                        "Collision model difference detected. Overwrite needed.",
                        error_code=402)
            except ps.ProjectServiceException:
                pass

    for model in models.values():
        ps.put_model(model)

    for obj_type in objects.values():
        ps.update_object_type(obj_type)

    ps.update_scene(scene)
    ps.update_project(project)
    if not project.has_logic:
        ps.update_project_sources(ProjectSources(project.id, script))

    logger.info(
        f"Imported project {project.name} (scene {scene.name}), with {len(objects)} "
        f"object type(s) and {len(models)} model(s).")

    return ImportResult(scene.id, project.id).to_json(), 200
Esempio n. 8
0
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None:

    glob.logger.debug(f"Processing {obj_id}.")

    if obj_id in object_types:
        glob.logger.debug(f"{obj_id} already processed, skipping...")
        return

    obj = await storage.get_object_type(obj_id)

    if obj_id in glob.OBJECT_TYPES and glob.OBJECT_TYPES[obj_id].type_def is not None:

        stored_type_def = glob.OBJECT_TYPES[obj_id].type_def
        assert stored_type_def

        # TODO do not compare sources but 'modified`
        # the code we get from type_def has Unix line endings, while the code from Project service might have Windows...
        obj.source = convert_line_endings_to_unix(obj.source)

        if get_containing_module_sources(stored_type_def) == obj.source:
            glob.logger.debug(f"No need to update {obj_id}.")
            return

    try:
        bases = otu.base_from_source(obj.source, obj_id)
        if bases and bases[0] not in object_types.keys() | built_in_types_names():
            glob.logger.debug(f"Getting base class {bases[0]} for {obj_id}.")
            await get_object_data(object_types, bases[0])

        for mixin in bases[1:]:
            mixin_obj = await storage.get_object_type(mixin)

            await hlp.run_in_executor(
                hlp.save_and_import_type_def,
                mixin_obj.source,
                mixin_obj.id,
                object,
                settings.OBJECT_TYPE_PATH,
                settings.OBJECT_TYPE_MODULE,
            )

    except Arcor2Exception as e:
        glob.logger.warn(f"Disabling object type {obj.id}: can't get a base. {str(e)}")
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem="Can't get base.")
        )
        return

    glob.logger.debug(f"Updating {obj_id}.")

    try:
        type_def = await hlp.run_in_executor(
            hlp.save_and_import_type_def,
            obj.source,
            obj.id,
            Generic,
            settings.OBJECT_TYPE_PATH,
            settings.OBJECT_TYPE_MODULE,
        )
    except Arcor2Exception as e:
        glob.logger.debug(f"{obj.id} is probably not an object type. {str(e)}")
        return

    assert issubclass(type_def, Generic)

    try:
        meta = meta_from_def(type_def)
        otu.get_settings_def(type_def)  # just to check if settings are ok
    except Arcor2Exception as e:
        glob.logger.warning(f"Disabling object type {obj.id}.")
        glob.logger.debug(e, exc_info=True)
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem=str(e))
        )
        return

    if obj.model:
        try:
            model = await storage.get_model(obj.model.id, obj.model.type)
        except Arcor2Exception:
            glob.logger.error(f"{obj.model.id}: failed to get collision model of type {obj.model.type}.")
            meta.disabled = True
            meta.problem = "Can't get collision model."
            object_types[obj_id] = ObjectTypeData(meta)
            return

        kwargs = {model.type().value.lower(): model}
        meta.object_model = ObjectModel(model.type(), **kwargs)  # type: ignore

    ast = parse(obj.source)
    otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast)

    object_types[obj_id] = otd
Esempio n. 9
0
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None:

    glob.logger.debug(f"Processing {obj_id}.")

    if obj_id in object_types:
        glob.logger.debug(f"{obj_id} already processed, skipping...")
        return

    obj = await storage.get_object_type(obj_id)

    if obj_id in glob.OBJECT_TYPES and glob.OBJECT_TYPES[
            obj_id].type_def is not None:

        stored_type_def = glob.OBJECT_TYPES[obj_id].type_def
        assert stored_type_def
        if hash(get_containing_module_sources(stored_type_def)) == hash(
                obj.source):
            glob.logger.debug(f"No need to update {obj_id}.")
            return

    try:
        base = otu.base_from_source(obj.source, obj_id)
        if base and base not in object_types.keys() | built_in_types_names():
            glob.logger.debug(f"Getting base class {base} for {obj_id}.")
            await get_object_data(object_types, base)
    except Arcor2Exception:
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id,
                           "Object type disabled.",
                           disabled=True,
                           problem="Can't get base."))
        return

    glob.logger.debug(f"Updating {obj_id}.")

    try:
        type_def = await hlp.run_in_executor(
            hlp.save_and_import_type_def,
            obj.source,
            obj.id,
            Generic,
            settings.OBJECT_TYPE_PATH,
            settings.OBJECT_TYPE_MODULE,
        )
        assert issubclass(type_def, Generic)
        meta = meta_from_def(type_def)
        otu.get_settings_def(type_def)  # just to check if settings are ok
    except Arcor2Exception as e:
        glob.logger.warning(f"Disabling object type {obj.id}.")
        glob.logger.debug(e, exc_info=True)
        object_types[obj_id] = ObjectTypeData(
            ObjectTypeMeta(obj_id,
                           "Object type disabled.",
                           disabled=True,
                           problem=str(e)))
        return

    if obj.model:
        try:
            model = await storage.get_model(obj.model.id, obj.model.type)
        except Arcor2Exception:
            glob.logger.error(
                f"{obj.model.id}: failed to get collision model of type {obj.model.type}."
            )
            meta.disabled = True
            meta.problem = "Can't get collision model."
            object_types[obj_id] = ObjectTypeData(meta)
            return

        kwargs = {model.type().value.lower(): model}
        meta.object_model = ObjectModel(model.type(), **kwargs)  # type: ignore

    ast = parse(obj.source)
    otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast)

    object_types[obj_id] = otd
Esempio n. 10
0
    def __init__(self, apply_action_mapping: bool = True) -> None:

        models: dict[str, Optional[Models]] = {}

        scene = self.read_project_data(Scene.__name__.lower(), Scene)
        project = self.read_project_data(Project.__name__.lower(), Project)

        self.scene = CachedScene(scene)
        self.project = CachedProject(project)

        if self.project.scene_id != self.scene.id:
            raise ResourcesException("Project/scene not consistent!")

        # make all poses absolute
        for aps in self.project.action_points_with_parent:
            # Action point pose is relative to its parent object/AP pose in scene but is absolute during runtime.
            tr.make_relative_ap_global(self.scene, self.project, aps)

        for obj_type in self.scene.object_types:

            try:
                models[obj_type] = self.read_project_data(
                    "models/" + humps.depascalize(obj_type),
                    ObjectModel).model()
            except IOError:
                models[obj_type] = None

        type_defs: TypesDict = {}

        for scene_obj_type in self.scene.object_types:  # get all type-defs

            assert scene_obj_type not in type_defs
            assert scene_obj_type not in built_in_types_names()

            module = importlib.import_module(CUSTOM_OBJECT_TYPES_MODULE + "." +
                                             humps.depascalize(scene_obj_type))

            cls = getattr(module, scene_obj_type)
            patch_object_actions(cls)
            type_defs[cls.__name__] = cls

            if apply_action_mapping:
                patch_with_action_mapping(cls, self.scene, self.project)

        action.start_paused, action.breakpoints = parse_args()

        if action.breakpoints:
            ap_ids = self.project.action_points_ids
            for bp in action.breakpoints:
                if bp not in ap_ids:
                    raise ResourcesException(f"Breakpoint ID unknown: {bp}.")

        # orientations / joints have to be monkey-patched with AP's ID in order to make breakpoints work in @action
        for ap in self.project.action_points:

            setattr(ap.position, AP_ID_ATTR, ap.id)

            for joints in self.project.ap_joints(ap.id):
                setattr(joints, AP_ID_ATTR, ap.id)

        package_id = os.path.basename(os.getcwd())
        package_meta = package.read_package_meta(package_id)
        package_info_event = PackageInfo(
            PackageInfo.Data(package_id, package_meta.name, scene, project))

        for model in models.values():

            if not model:
                continue

            if isinstance(model, Box):
                package_info_event.data.collision_models.boxes.append(model)
            elif isinstance(model, Sphere):
                package_info_event.data.collision_models.spheres.append(model)
            elif isinstance(model, Cylinder):
                package_info_event.data.collision_models.cylinders.append(
                    model)
            elif isinstance(model, Mesh):
                package_info_event.data.collision_models.meshes.append(model)

        # following steps might take some time, so let UIs know about the package as a first thing
        print_event(package_info_event)

        # in order to prepare a clean environment (clears all configurations and all collisions)
        scene_service.stop()

        self.executor = concurrent.futures.ThreadPoolExecutor()
        futures: list[concurrent.futures.Future] = []

        for scene_obj in self.scene.objects:

            cls = type_defs[scene_obj.type]
            settings = settings_from_params(
                cls, scene_obj.parameters,
                self.project.overrides.get(scene_obj.id, None))

            if issubclass(cls, Robot):
                futures.append(
                    self.executor.submit(cls, scene_obj.id, scene_obj.name,
                                         scene_obj.pose, settings))
            elif issubclass(cls, CollisionObject):
                futures.append(
                    self.executor.submit(cls, scene_obj.id, scene_obj.name,
                                         scene_obj.pose,
                                         models[scene_obj.type], settings))
            elif issubclass(cls, GenericWithPose):
                futures.append(
                    self.executor.submit(cls, scene_obj.id, scene_obj.name,
                                         scene_obj.pose, settings))
            elif issubclass(cls, Generic):
                futures.append(
                    self.executor.submit(cls, scene_obj.id, scene_obj.name,
                                         settings))
            else:
                raise Arcor2Exception(
                    f"{cls.__name__} has unknown base class.")

        exceptions: list[Arcor2Exception] = []

        self.objects: dict[str, Generic] = {}

        for f in concurrent.futures.as_completed(futures):
            try:
                inst = f.result(
                )  # if an object creation resulted in exception, it will be raised here
            except Arcor2Exception as e:
                print_exception(e)
                exceptions.append(e)
            else:
                self.objects[
                    inst.id] = inst  # successfully initialized objects

        if exceptions:  # if something failed, tear down those that succeeded and stop
            self.cleanup_all_objects()
            raise ResourcesException(" ".join([str(e) for e in exceptions]),
                                     exceptions)

        scene_service.start()

        self._stream_futures: list[concurrent.futures.Future] = []
Esempio n. 11
0
    def __init__(self, scene: Scene, project: Project,
                 models: Dict[str, Optional[Models]]) -> None:

        self.project = CachedProject(project)
        self.scene = CachedScene(scene)

        if self.project.scene_id != self.scene.id:
            raise ResourcesException("Project/scene not consistent!")

        self.objects: Dict[str, Generic] = {}

        self.type_defs: TypesDict = {}

        built_in = built_in_types_names()

        if scene_service.started():
            scene_service.stop()

        scene_service.delete_all_collisions()

        package_id = os.path.basename(os.getcwd())
        package_meta = package.read_package_meta(package_id)
        package_info_event = PackageInfo(
            PackageInfo.Data(package_id, package_meta.name, scene, project))

        for scene_obj_type in self.scene.object_types:  # get all type-defs

            assert scene_obj_type not in self.type_defs

            if scene_obj_type in built_in:
                module = importlib.import_module(
                    arcor2.object_types.__name__ + "." +
                    humps.depascalize(scene_obj_type))
            else:
                module = importlib.import_module(
                    Resources.CUSTOM_OBJECT_TYPES_MODULE + "." +
                    humps.depascalize(scene_obj_type))

            cls = getattr(module, scene_obj_type)
            patch_object_actions(
                cls,
                get_action_name_to_id(self.scene, self.project, cls.__name__))
            self.type_defs[cls.__name__] = cls

        scene_objects = list(self.scene.objects)

        # sort according to OT initialization priority (highest is initialized first)
        scene_objects.sort(key=lambda x: self.type_defs[x.type].INIT_PRIORITY,
                           reverse=True)

        for scene_obj in scene_objects:

            cls = self.type_defs[scene_obj.type]

            assert scene_obj.id not in self.objects, "Duplicate object id {}!".format(
                scene_obj.id)

            settings = settings_from_params(
                cls, scene_obj.parameters,
                self.project.overrides.get(scene_obj.id, None))

            if issubclass(cls, Robot):
                self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name,
                                                 scene_obj.pose, settings)
            elif issubclass(cls, GenericWithPose):
                self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name,
                                                 scene_obj.pose,
                                                 models[scene_obj.type],
                                                 settings)
            elif issubclass(cls, Generic):
                self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name,
                                                 settings)
            else:
                raise Arcor2Exception("Unknown base class.")

        for model in models.values():

            if not model:
                continue

            if isinstance(model, Box):
                package_info_event.data.collision_models.boxes.append(model)
            elif isinstance(model, Sphere):
                package_info_event.data.collision_models.spheres.append(model)
            elif isinstance(model, Cylinder):
                package_info_event.data.collision_models.cylinders.append(
                    model)
            elif isinstance(model, Mesh):
                package_info_event.data.collision_models.meshes.append(model)

        scene_service.start()

        print_event(package_info_event)

        # make all poses absolute
        for aps in self.project.action_points_with_parent:
            # Action point pose is relative to its parent object/AP pose in scene but is absolute during runtime.
            tr.make_relative_ap_global(self.scene, self.project, aps)