def check_object_type(type_def: Type[Generic]) -> None: """Checks whether the object type source is a valid one. :param type_def: :return: """ if not issubclass(type_def, Generic): raise ObjectTypeException("Not a subclass of Generic.") # it might happen that the code is ok, but can't be parsed e.g. due to unsupported placement of comment # parse_def is not enough here - there might be something unparseable outside of the ObjectType class itself parse(get_containing_module_sources(type_def))
def base_from_source(source: Union[str, ast.AST], cls_name: str) -> List[str]: """Returns list, where the first element is name of the base class and others are mixins. :param source: :param cls_name: :return: """ if isinstance(source, str): cls_def = find_class_def(cls_name, parse(source)) else: cls_def = find_class_def(cls_name, source) if not cls_def.bases: return [] ret: List[str] = [] for base in reversed(cls_def.bases): assert isinstance(base, ast.Name) ret.append(base.id) return ret
def base_from_source(source: str, cls_name: str) -> Optional[str]: cls_def = find_class_def(cls_name, parse(source)) if not cls_def.bases: return None base_name = cls_def.bases[ -1] # allow usage of mixins e.g. class MyType(mixin, Generic) assert isinstance(base_name, ast.Name) return base_name.id
def get_base_from_project_service( types_dict: TypesDict, tmp_dir: str, scene_object_types: set[str], obj_type: ObjectType, zf: zipfile.ZipFile, ot_path: str, ast: ast.AST, ) -> None: for idx, base in enumerate(base_from_source(ast, obj_type.id)): if base in types_dict.keys() | built_in_types_names( ) | scene_object_types: continue logger.debug(f"Getting {base} as base of {obj_type.id}.") base_obj_type = ps.get_object_type(base) # first try if the code is valid try: base_ast = parse(base_obj_type.source) except Arcor2Exception: raise FlaskException( f"Invalid code of the {base_obj_type.id} (base of {obj_type.id}).", error_code=401) # try to get base of the base get_base_from_project_service(types_dict, tmp_dir, scene_object_types, base_obj_type, zf, ot_path, base_ast) if idx == 0: # this is the base ObjectType types_dict[base_obj_type.id] = save_and_import_type_def( base_obj_type.source, base_obj_type.id, Generic, tmp_dir, OBJECT_TYPE_MODULE) else: # these are potential mixins (just try to import them, no need to store them) save_and_import_type_def(base_obj_type.source, base_obj_type.id, object, tmp_dir, OBJECT_TYPE_MODULE) scene_object_types.add(base_obj_type.id) zf.writestr( os.path.join(ot_path, humps.depascalize(base_obj_type.id)) + ".py", base_obj_type.source)
def get_base_from_imported_package(obj_type: ObjectType, types_dict: dict[str, ObjectType], zip_file: zipfile.ZipFile, tmp_dir: str, ast: ast.AST) -> None: for idx, base in enumerate(base_from_source(ast, obj_type.id)): if base in types_dict.keys() | built_in_types_names(): continue logger.debug(f"Getting {base} as base of {obj_type.id}.") try: base_obj_type_src = read_str_from_zip( zip_file, f"object_types/{humps.depascalize(base)}.py") except KeyError: raise FlaskException( f"Could not find {base} object type (base of {obj_type.id}).", error_code=401) # first try if the code is valid try: base_ast = parse(base_obj_type_src) except Arcor2Exception: raise FlaskException( f"Invalid code of the {base} (base of {obj_type.id}).", error_code=401) types_dict[base] = ObjectType(base, base_obj_type_src) # try to get base of the base get_base_from_imported_package(types_dict[base], types_dict, zip_file, tmp_dir, base_ast) # then, try to import it (no need to store the result) if idx == 0: # this is the base ObjectType save_and_import_type_def(base_obj_type_src, base, Generic, tmp_dir, OBJECT_TYPE_MODULE) else: # these are potential mixins save_and_import_type_def(base_obj_type_src, base, object, tmp_dir, OBJECT_TYPE_MODULE)
def project_import() -> RespT: """Imports a project from execution package. --- put: tags: - Build summary: Imports a project from execution package. operationId: ProjectImport parameters: - in: query name: overwriteScene schema: type: boolean default: false description: overwrite Scene - in: query name: overwriteProject schema: type: boolean default: false description: overwrite Project - in: query name: overwriteObjectTypes schema: type: boolean default: false description: overwrite ObjectTypes - in: query name: overwriteProjectSources schema: type: boolean default: false description: overwrite ProjectSources - in: query name: overwriteCollisionModels schema: type: boolean default: false description: overwrite collision models requestBody: content: multipart/form-data: schema: type: object required: - executionPackage properties: executionPackage: type: string format: binary responses: 200: description: Ok content: application/json: schema: $ref: ImportResult 400: description: Some other error occurred. content: application/json: schema: type: string 401: description: Invalid execution package. content: application/json: schema: type: string 402: description: A difference between package/project service detected (overwrite needed). content: application/json: schema: type: string 404: description: Something is missing. content: application/json: schema: type: string """ file = request.files["executionPackage"] overwrite_scene = request.args.get("overwriteScene", default="false") == "true" overwrite_project = request.args.get("overwriteProject", default="false") == "true" overwrite_object_types = request.args.get("overwriteObjectTypes", default="false") == "true" overwrite_project_sources = request.args.get("overwriteProjectSources", default="false") == "true" overwrite_collision_models = request.args.get("overwriteCollisionModels", default="false") == "true" objects: dict[str, ObjectType] = {} models: dict[str, Models] = {} """ 1) get and validate all data from zip 2) check what is already on the Project service 3) do updates """ # BytesIO + stream.read() = workaround for a Python bug (SpooledTemporaryFile not seekable) with zipfile.ZipFile(BytesIO(file.stream.read())) as zip_file: try: project = read_dc_from_zip(zip_file, "data/project.json", Project) except KeyError: raise FlaskException("Could not find project.json.", error_code=404) except (json.JsonException, ValidationError) as e: raise FlaskException(f"Failed to process project.json. {str(e)}", error_code=401) try: scene = read_dc_from_zip(zip_file, "data/scene.json", Scene) except KeyError: raise FlaskException("Could not find scene.json.", error_code=404) except (json.JsonException, ValidationError) as e: return json.dumps(f"Failed to process scene.json. {str(e)}"), 401 if project.scene_id != scene.id: raise FlaskException("Project assigned to different scene id.", error_code=401) with tempfile.TemporaryDirectory() as tmp_dir: # restore original environment sys.path = list(original_sys_path) sys.modules = dict(original_sys_modules) prepare_object_types_dir(tmp_dir, OBJECT_TYPE_MODULE) for scene_obj in scene.objects: obj_type_name = scene_obj.type if obj_type_name in objects: # there might be more instances of the same type continue logger.debug(f"Importing {obj_type_name}.") try: obj_type_src = read_str_from_zip( zip_file, f"object_types/{humps.depascalize(obj_type_name)}.py") except KeyError: raise FlaskException( f"Object type {obj_type_name} is missing in the package.", error_code=404) try: ast = parse(obj_type_src) except Arcor2Exception: raise FlaskException( f"Invalid code of the {obj_type_name} object type.", error_code=401) # TODO fill in OT description (is it used somewhere?) objects[obj_type_name] = ObjectType(obj_type_name, obj_type_src) get_base_from_imported_package(objects[obj_type_name], objects, zip_file, tmp_dir, ast) type_def = save_and_import_type_def(obj_type_src, obj_type_name, Generic, tmp_dir, OBJECT_TYPE_MODULE) assert obj_type_name == type_def.__name__ if type_def.abstract(): raise FlaskException( f"Scene contains abstract object type: {obj_type_name}.", error_code=401) for obj_type in objects.values(): # handle models # TODO rather iterate on content of data/models? try: model = read_dc_from_zip( zip_file, f"data/models/{humps.depascalize(obj_type.id)}.json", ObjectModel).model() except KeyError: continue logger.debug(f"Found model {model.id} of type {model.type}.") obj_type.model = model.metamodel() if obj_type.id != obj_type.model.id: raise FlaskException( f"Model id ({obj_type.model.id}) has to be the same as ObjectType id ({obj_type.id}).", error_code=401, ) models[obj_type.id] = model if not project.has_logic: logger.debug("Importing the main script.") try: script = zip_file.read("script.py").decode("UTF-8") except KeyError: raise FlaskException("Could not find script.py.", error_code=404) try: parse(script) except Arcor2Exception: raise FlaskException("Invalid code of the main script.", error_code=401) # check that we are not going to overwrite something if not overwrite_scene: try: ps_scene = ps.get_scene(scene.id) except ps.ProjectServiceException: pass else: # do not take created / modified into account ps_scene.created = scene.created = None ps_scene.modified = scene.modified = None if ps_scene != scene: raise FlaskException( "Scene difference detected. Overwrite needed.", error_code=402) if not overwrite_project: try: ps_project = ps.get_project(project.id) except ps.ProjectServiceException: pass else: # do not take created / modified into account ps_project.created = project.created = None ps_project.modified = project.modified = None if ps_project != project: raise FlaskException( "Project difference detected. Overwrite needed.", error_code=402) if not overwrite_object_types: for obj_type in objects.values(): try: ot = ps.get_object_type(obj_type.id) # ignore changes in description (no one cares) if ot.source != obj_type.source or ot.model != obj_type.model: raise FlaskException( f"Difference detected for {obj_type.id} object type. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass if not overwrite_project_sources and not project.has_logic: try: if ps.get_project_sources(project.id).script != script: raise FlaskException( "Script difference detected. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass if not overwrite_collision_models: for model in models.values(): try: if model != ps.get_model(model.id, model.type()): raise FlaskException( "Collision model difference detected. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass for model in models.values(): ps.put_model(model) for obj_type in objects.values(): ps.update_object_type(obj_type) ps.update_scene(scene) ps.update_project(project) if not project.has_logic: ps.update_project_sources(ProjectSources(project.id, script)) logger.info( f"Imported project {project.name} (scene {scene.name}), with {len(objects)} " f"object type(s) and {len(models)} model(s).") return ImportResult(scene.id, project.id).to_json(), 200
def _publish(project_id: str, package_name: str) -> RespT: mem_zip = BytesIO() logger.debug( f"Generating package {package_name} for project_id: {project_id}.") types_dict: TypesDict = {} # restore original environment sys.path = list(original_sys_path) sys.modules = dict(original_sys_modules) with tempfile.TemporaryDirectory() as tmp_dir: prepare_object_types_dir(tmp_dir, OBJECT_TYPE_MODULE) with zipfile.ZipFile(mem_zip, mode="w", compression=zipfile.ZIP_DEFLATED) as zf: try: logger.debug("Getting scene and project.") project = ps.get_project(project_id) cached_project = CachedProject(project) scene = ps.get_scene(project.scene_id) cached_scene = CachedScene(scene) if not package_name: package_name = project.name data_path = "data" ot_path = "object_types" zf.writestr(os.path.join(ot_path, "__init__.py"), "") zf.writestr(os.path.join(data_path, "project.json"), project.to_json()) zf.writestr(os.path.join(data_path, "scene.json"), scene.to_json()) obj_types = set(cached_scene.object_types) obj_types_with_models: set[str] = set() if __debug__: # this should uncover potential problems with order in which ObjectTypes are processed import random random.shuffle(scene.objects) for scene_obj in scene.objects: if scene_obj.type in types_dict: continue logger.debug( f"Getting scene object type {scene_obj.type}.") obj_type = ps.get_object_type(scene_obj.type) if obj_type.model and obj_type.id not in obj_types_with_models: obj_types_with_models.add(obj_type.id) model = ps.get_model(obj_type.model.id, obj_type.model.type) obj_model = ObjectModel( obj_type.model.type, **{model.type().value.lower(): model} # type: ignore ) zf.writestr( os.path.join( data_path, "models", humps.depascalize(obj_type.id) + ".json"), obj_model.to_json(), ) zf.writestr( os.path.join(ot_path, humps.depascalize(obj_type.id)) + ".py", obj_type.source) # handle inheritance get_base_from_project_service(types_dict, tmp_dir, obj_types, obj_type, zf, ot_path, parse(obj_type.source)) types_dict[scene_obj.type] = save_and_import_type_def( obj_type.source, scene_obj.type, Generic, tmp_dir, OBJECT_TYPE_MODULE) except Arcor2Exception as e: logger.exception( f"Failed to prepare package content. {str(e)}") raise FlaskException(str(e), error_code=404) script_path = "script.py" try: if project.has_logic: logger.debug("Generating script from project logic.") zf.writestr( script_path, program_src(types_dict, cached_project, cached_scene, True)) else: try: logger.debug("Getting project sources.") script = ps.get_project_sources(project.id).script # check if it is a valid Python code try: parse(script) except SourceException: logger.exception( "Failed to parse code of the uploaded script.") raise FlaskException("Invalid code.", error_code=501) zf.writestr(script_path, script) except ps.ProjectServiceException: logger.info( "Script not found on project service, creating one from scratch." ) # write script without the main loop zf.writestr( script_path, program_src(types_dict, cached_project, cached_scene, False)) logger.debug("Generating supplementary files.") logger.debug("action_points.py") zf.writestr("action_points.py", global_action_points_class(cached_project)) logger.debug("package.json") zf.writestr( "package.json", PackageMeta(package_name, datetime.now(tz=timezone.utc)).to_json()) except Arcor2Exception as e: logger.exception("Failed to generate script.") raise FlaskException(str(e), error_code=501) logger.info( f"Done with {package_name} (scene {scene.name}, project {project.name})." ) mem_zip.seek(0) return send_file(mem_zip, as_attachment=True, max_age=0, download_name=f"{package_name}_package.zip")
def test_branched_output_2() -> None: scene = Scene("s1") obj = SceneObject("test_name", Test.__name__) scene.objects.append(obj) project = Project("p1", "s1") ap1 = ActionPoint("ap1", Position()) project.action_points.append(ap1) ac1 = Action("ac1", f"{obj.id}/test", flows=[Flow(outputs=["bool_res"])]) ap1.actions.append(ac1) ac2 = Action("ac2", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac2) ac3 = Action("ac3", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac3) ac4 = Action("ac4", f"{obj.id}/test", flows=[Flow(outputs=["bool2_res"])]) ap1.actions.append(ac4) ac5 = Action("ac5", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac5) ac6 = Action("ac6", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac6) project.logic.append(LogicItem(LogicItem.START, ac1.id)) project.logic.append( LogicItem(ac1.id, ac2.id, ProjectLogicIf(f"{ac1.id}/default/0", json.dumps(True)))) project.logic.append( LogicItem(ac1.id, ac4.id, ProjectLogicIf(f"{ac1.id}/default/0", json.dumps(False)))) project.logic.append(LogicItem(ac2.id, ac3.id)) project.logic.append(LogicItem(ac3.id, ac6.id)) project.logic.append( LogicItem(ac4.id, ac5.id, ProjectLogicIf(f"{ac4.id}/default/0", json.dumps(True)))) project.logic.append(LogicItem(ac5.id, ac6.id)) project.logic.append(LogicItem(ac6.id, LogicItem.END)) project.logic.append( LogicItem(ac4.id, LogicItem.END, ProjectLogicIf(f"{ac4.id}/default/0", json.dumps(False)))) src = program_src({Test.__name__: Test}, CachedProject(project), CachedScene(scene)) parse(src) """ bool_res = test_name.test(res.ac1) if (bool_res == False): bool2_res = test_name.test(res.ac4) if (bool2_res == False): continue elif (bool2_res == True): test_name.test(res.ac5) elif (bool_res == True): test_name.test(res.ac2) test_name.test(res.ac3) test_name.test(res.ac6) """ spl = src.splitlines() # it has to be robust against changed order of blocks ac1_idx = subs_index(spl, "bool_res = test_name.test(an='ac1')") if_bool_res_false_idx = subs_index(spl, "if (bool_res == False):") assert if_bool_res_false_idx > ac1_idx assert cntsp(spl[ac1_idx]) == cntsp(spl[if_bool_res_false_idx]) bool2_res_idx = subs_index(spl, "bool2_res = test_name.test(an='ac4')") assert bool2_res_idx > if_bool_res_false_idx assert cntsp(spl[if_bool_res_false_idx]) == cntsp(spl[bool2_res_idx]) - TAB if_bool_2_res_false_idx = subs_index(spl, "if (bool2_res == False):") assert cntsp(spl[if_bool_2_res_false_idx]) == cntsp(spl[bool2_res_idx]) assert if_bool_2_res_false_idx > bool2_res_idx assert "continue" in spl[if_bool_2_res_false_idx + 1] assert cntsp( spl[bool2_res_idx]) == cntsp(spl[if_bool_2_res_false_idx + 1]) - TAB if_bool_2_res_true_idx = subs_index(spl, "if (bool2_res == True):") assert if_bool_2_res_true_idx > bool2_res_idx assert "test_name.test(an='ac5')" in spl[if_bool_2_res_true_idx + 1] assert cntsp(spl[if_bool_2_res_true_idx]) == cntsp( spl[if_bool_2_res_true_idx + 1]) - TAB if_bool_res_true_idx = subs_index(spl, "if (bool_res == True):") assert if_bool_res_true_idx > ac1_idx assert cntsp(spl[ac1_idx]) == cntsp(spl[if_bool_res_true_idx]) assert "test_name.test(an='ac2')" in spl[if_bool_res_true_idx + 1] assert cntsp(spl[if_bool_res_true_idx]) == cntsp( spl[if_bool_res_true_idx + 1]) - TAB assert "test_name.test(an='ac3')" in spl[if_bool_res_true_idx + 2] assert cntsp(spl[if_bool_res_true_idx]) == cntsp( spl[if_bool_res_true_idx + 2]) - TAB ac6_idx = subs_index(spl, "test_name.test(an='ac6')") assert cntsp(spl[ac1_idx]) == cntsp(spl[ac6_idx]) assert ac6_idx > if_bool_2_res_false_idx assert ac6_idx > if_bool_2_res_true_idx
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None: logger.debug(f"Processing {obj_id}.") if obj_id in object_types: logger.debug(f"{obj_id} already processed, skipping...") return obj_iddesc = await storage.get_object_type_iddesc(obj_id) if obj_id in glob.OBJECT_TYPES: assert obj_iddesc.modified assert glob.OBJECT_TYPES[ obj_id].meta.modified, f"Object {obj_id} does not have 'modified' in its meta." if obj_iddesc.modified == glob.OBJECT_TYPES[obj_id].meta.modified: logger.debug(f"No need to update {obj_id}.") return obj = await storage.get_object_type(obj_id) try: bases = otu.base_from_source(obj.source, obj_id) if not bases: logger.debug( f"{obj_id} is definitely not an ObjectType (subclass of {object.__name__}), maybe mixin?" ) return if bases[0] not in object_types.keys() | built_in_types_names(): logger.debug(f"Getting base class {bases[0]} for {obj_id}.") await get_object_data(object_types, bases[0]) for mixin in bases[1:]: mixin_obj = await storage.get_object_type(mixin) await hlp.run_in_executor( hlp.save_and_import_type_def, mixin_obj.source, mixin_obj.id, object, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) except Arcor2Exception as e: logger.error( f"Disabling ObjectType {obj.id}: can't get a base. {str(e)}") object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "ObjectType disabled.", disabled=True, problem="Can't get base.", modified=obj.modified)) return logger.debug(f"Updating {obj_id}.") try: type_def = await hlp.run_in_executor( hlp.save_and_import_type_def, obj.source, obj.id, Generic, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) except Arcor2Exception as e: logger.debug(f"{obj.id} is probably not an ObjectType. {str(e)}") return assert issubclass(type_def, Generic) try: meta = meta_from_def(type_def) except Arcor2Exception as e: logger.error(f"Disabling ObjectType {obj.id}.") logger.debug(e, exc_info=True) object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "ObjectType disabled.", disabled=True, problem=str(e), modified=obj.modified)) return meta.modified = obj.modified if obj.model: try: model = await storage.get_model(obj.model.id, obj.model.type) except Arcor2Exception as e: logger.error( f"{obj.model.id}: failed to get collision model of type {obj.model.type}. {str(e)}" ) meta.disabled = True meta.problem = "Can't get collision model." object_types[obj_id] = ObjectTypeData(meta) return if isinstance(model, Mesh) and model.data_id not in await storage.files_ids(): logger.error( f"Disabling {meta.type} as its mesh file {model.data_id} does not exist." ) meta.disabled = True meta.problem = "Mesh file does not exist." object_types[obj_id] = ObjectTypeData(meta) return kwargs = {model.type().value.lower(): model} meta.object_model = ObjectModel(model.type(), **kwargs) # type: ignore ast = parse(obj.source) otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast) object_types[obj_id] = otd
def project_import() -> RespT: """Imports a project from execution package. --- put: description: Imports a project from execution package. parameters: - in: query name: overwriteScene schema: type: boolean default: false - in: query name: overwriteProject schema: type: boolean default: false - in: query name: overwriteObjectTypes schema: type: boolean default: false - in: query name: overwriteProjectSources schema: type: boolean default: false - in: query name: overwriteCollisionModels schema: type: boolean default: false requestBody: content: multipart/form-data: schema: type: object required: - executionPackage properties: executionPackage: type: string format: binary responses: 200: description: Ok content: application/json: schema: $ref: ImportResult 400: description: Some other error occurred. content: application/json: schema: type: string 401: description: Invalid execution package. content: application/json: schema: type: string 402: description: A difference between package/project service detected (overwrite needed). content: application/json: schema: type: string 404: description: Something is missing. content: application/json: schema: type: string """ file = request.files["executionPackage"] overwrite_scene = request.args.get("overwriteScene", default="false") == "true" overwrite_project = request.args.get("overwriteProject", default="false") == "true" overwrite_object_types = request.args.get("overwriteObjectTypes", default="false") == "true" overwrite_project_sources = request.args.get("overwriteProjectSources", default="false") == "true" overwrite_collision_models = request.args.get("overwriteCollisionModels", default="false") == "true" objects: Dict[str, ObjectType] = {} models: Dict[str, Models] = {} """ 1) get and validate all data from zip 2) check what is already on the Project service 3) do updates """ # BytesIO + stream.read() = workaround for a Python bug (SpooledTemporaryFile not seekable) with zipfile.ZipFile(BytesIO(file.stream.read())) as zip_file: try: project = read_dc_from_zip(zip_file, "data/project.json", Project) except KeyError: raise FlaskException("Could not find project.json.", error_code=404) except (JSONDecodeError, ValidationError) as e: raise FlaskException(f"Failed to process project.json. {str(e)}", error_code=401) try: scene = read_dc_from_zip(zip_file, "data/scene.json", Scene) except KeyError: raise FlaskException("Could not find scene.json.", error_code=404) except (JSONDecodeError, ValidationError) as e: return json.dumps(f"Failed to process scene.json. {str(e)}"), 401 if project.scene_id != scene.id: raise FlaskException("Project assigned to different scene id.", error_code=401) for scene_obj in scene.objects: obj_type_name = scene_obj.type if obj_type_name in objects: continue try: obj_type_src = read_str_from_zip( zip_file, f"object_types/{humps.depascalize(obj_type_name)}.py") except KeyError: raise FlaskException( f"Object type {obj_type_name} is missing in the package.", error_code=404) try: parse(obj_type_src) except Arcor2Exception: raise FlaskException( f"Invalid code of the {obj_type_name} object type.", error_code=401) # TODO description (is it used somewhere?) objects[obj_type_name] = ObjectType(obj_type_name, obj_type_src) logger.debug(f"Just imported {obj_type_name}.") while True: base = base_from_source(obj_type_src, obj_type_name) if not base: return json.dumps( f"Could not determine base class for {scene_obj.type}." ), 401 if base in objects.keys() | built_in_types_names(): break logger.debug(f"Importing {base} as a base of {obj_type_name}.") try: base_obj_type_src = read_str_from_zip( zip_file, f"object_types/{humps.depascalize(base)}.py") except KeyError: return json.dumps( f"Could not find {base} object type (base of {obj_type_name})." ), 404 try: parse(base_obj_type_src) except Arcor2Exception: return json.dumps( f"Invalid code of the {base} object type (base of {obj_type_name})." ), 401 objects[base] = ObjectType(base, base_obj_type_src) obj_type_name = base obj_type_src = base_obj_type_src for obj_type in objects.values(): # handle models try: model = read_dc_from_zip( zip_file, f"data/models/{humps.depascalize(obj_type.id)}.json", ObjectModel).model() except KeyError: continue logger.debug(f"Found model {model.id} of type {model.type}.") obj_type.model = model.metamodel() if obj_type.id != obj_type.model.id: raise FlaskException( f"Model id ({obj_type.model.id}) has to be the same as ObjectType id ({obj_type.id}).", error_code=401, ) models[obj_type.id] = model if not project.has_logic: logger.debug("Importing the main script.") try: script = zip_file.read("script.py").decode("UTF-8") except KeyError: raise FlaskException("Could not find script.py.", error_code=404) try: parse(script) except Arcor2Exception: raise FlaskException("Invalid code of the main script.", error_code=401) # check that we are not going to overwrite something if not overwrite_scene: try: ps_scene = ps.get_scene(scene.id) except ps.ProjectServiceException: pass else: ps_scene.modified = scene.modified # modified is updated with each PUT if ps_scene != scene: raise FlaskException( "Scene difference detected. Overwrite needed.", error_code=402) if not overwrite_project: try: ps_project = ps.get_project(project.id) except ps.ProjectServiceException: pass else: ps_project.modified = project.modified if ps_project != project: raise FlaskException( "Project difference detected. Overwrite needed.", error_code=402) if not overwrite_object_types: for obj_type in objects.values(): try: if ps.get_object_type(obj_type.id) != obj_type: raise FlaskException( f"Difference detected for {obj_type.id} object type. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass if not overwrite_project_sources and not project.has_logic: try: if ps.get_project_sources(project.id).script != script: raise FlaskException( "Script difference detected. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass if not overwrite_collision_models: for model in models.values(): try: if model != ps.get_model(model.id, model.type()): raise FlaskException( "Collision model difference detected. Overwrite needed.", error_code=402) except ps.ProjectServiceException: pass for model in models.values(): ps.put_model(model) for obj_type in objects.values(): ps.update_object_type(obj_type) ps.update_scene(scene) ps.update_project(project) if not project.has_logic: ps.update_project_sources(ProjectSources(project.id, script)) logger.info( f"Imported project {project.name} (scene {scene.name}), with {len(objects)} " f"object type(s) and {len(models)} model(s).") return ImportResult(scene.id, project.id).to_json(), 200
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None: glob.logger.debug(f"Processing {obj_id}.") if obj_id in object_types: glob.logger.debug(f"{obj_id} already processed, skipping...") return obj = await storage.get_object_type(obj_id) if obj_id in glob.OBJECT_TYPES and glob.OBJECT_TYPES[obj_id].type_def is not None: stored_type_def = glob.OBJECT_TYPES[obj_id].type_def assert stored_type_def # TODO do not compare sources but 'modified` # the code we get from type_def has Unix line endings, while the code from Project service might have Windows... obj.source = convert_line_endings_to_unix(obj.source) if get_containing_module_sources(stored_type_def) == obj.source: glob.logger.debug(f"No need to update {obj_id}.") return try: bases = otu.base_from_source(obj.source, obj_id) if bases and bases[0] not in object_types.keys() | built_in_types_names(): glob.logger.debug(f"Getting base class {bases[0]} for {obj_id}.") await get_object_data(object_types, bases[0]) for mixin in bases[1:]: mixin_obj = await storage.get_object_type(mixin) await hlp.run_in_executor( hlp.save_and_import_type_def, mixin_obj.source, mixin_obj.id, object, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) except Arcor2Exception as e: glob.logger.warn(f"Disabling object type {obj.id}: can't get a base. {str(e)}") object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem="Can't get base.") ) return glob.logger.debug(f"Updating {obj_id}.") try: type_def = await hlp.run_in_executor( hlp.save_and_import_type_def, obj.source, obj.id, Generic, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) except Arcor2Exception as e: glob.logger.debug(f"{obj.id} is probably not an object type. {str(e)}") return assert issubclass(type_def, Generic) try: meta = meta_from_def(type_def) otu.get_settings_def(type_def) # just to check if settings are ok except Arcor2Exception as e: glob.logger.warning(f"Disabling object type {obj.id}.") glob.logger.debug(e, exc_info=True) object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem=str(e)) ) return if obj.model: try: model = await storage.get_model(obj.model.id, obj.model.type) except Arcor2Exception: glob.logger.error(f"{obj.model.id}: failed to get collision model of type {obj.model.type}.") meta.disabled = True meta.problem = "Can't get collision model." object_types[obj_id] = ObjectTypeData(meta) return kwargs = {model.type().value.lower(): model} meta.object_model = ObjectModel(model.type(), **kwargs) # type: ignore ast = parse(obj.source) otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast) object_types[obj_id] = otd
async def get_object_data(object_types: ObjectTypeDict, obj_id: str) -> None: glob.logger.debug(f"Processing {obj_id}.") if obj_id in object_types: glob.logger.debug(f"{obj_id} already processed, skipping...") return obj = await storage.get_object_type(obj_id) if obj_id in glob.OBJECT_TYPES and glob.OBJECT_TYPES[ obj_id].type_def is not None: stored_type_def = glob.OBJECT_TYPES[obj_id].type_def assert stored_type_def if hash(get_containing_module_sources(stored_type_def)) == hash( obj.source): glob.logger.debug(f"No need to update {obj_id}.") return try: base = otu.base_from_source(obj.source, obj_id) if base and base not in object_types.keys() | built_in_types_names(): glob.logger.debug(f"Getting base class {base} for {obj_id}.") await get_object_data(object_types, base) except Arcor2Exception: object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem="Can't get base.")) return glob.logger.debug(f"Updating {obj_id}.") try: type_def = await hlp.run_in_executor( hlp.save_and_import_type_def, obj.source, obj.id, Generic, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) assert issubclass(type_def, Generic) meta = meta_from_def(type_def) otu.get_settings_def(type_def) # just to check if settings are ok except Arcor2Exception as e: glob.logger.warning(f"Disabling object type {obj.id}.") glob.logger.debug(e, exc_info=True) object_types[obj_id] = ObjectTypeData( ObjectTypeMeta(obj_id, "Object type disabled.", disabled=True, problem=str(e))) return if obj.model: try: model = await storage.get_model(obj.model.id, obj.model.type) except Arcor2Exception: glob.logger.error( f"{obj.model.id}: failed to get collision model of type {obj.model.type}." ) meta.disabled = True meta.problem = "Can't get collision model." object_types[obj_id] = ObjectTypeData(meta) return kwargs = {model.type().value.lower(): model} meta.object_model = ObjectModel(model.type(), **kwargs) # type: ignore ast = parse(obj.source) otd = ObjectTypeData(meta, type_def, object_actions(type_def, ast), ast) object_types[obj_id] = otd
def test_branched_output() -> None: scene = Scene("s1") obj = SceneObject("test_name", "Test") scene.objects.append(obj) project = Project("p1", "s1") ap1 = ActionPoint("ap1", Position()) project.action_points.append(ap1) ac1 = Action("ac1", f"{obj.id}/test", flows=[Flow(outputs=["bool_res"])]) ap1.actions.append(ac1) ac2 = Action("ac2", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac2) ac3 = Action("ac3", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac3) ac4 = Action("ac4", f"{obj.id}/test", flows=[Flow()]) ap1.actions.append(ac4) project.logic.append(LogicItem(LogicItem.START, ac1.id)) project.logic.append( LogicItem(ac1.id, ac2.id, ProjectLogicIf(f"{ac1.id}/default/0", json.dumps(True)))) project.logic.append( LogicItem(ac1.id, ac3.id, ProjectLogicIf(f"{ac1.id}/default/0", json.dumps(False)))) project.logic.append(LogicItem(ac2.id, ac4.id)) project.logic.append(LogicItem(ac3.id, ac4.id)) project.logic.append(LogicItem(ac4.id, LogicItem.END)) src = program_src({Test.__name__: Test}, CachedProject(project), CachedScene(scene)) parse(src) """ bool_res = test_name.test(res.ac1) if bool_res == False: test_name.test(res.ac3) elif bool_res == True: test_name.test(res.ac2) test_name.test(res.ac4) """ spl = src.splitlines() ac1_idx = subs_index(spl, "bool_res = test_name.test(an='ac1')") if_bool_res_false_idx = subs_index(spl, "if bool_res == False:") assert if_bool_res_false_idx > ac1_idx assert cntsp(spl[if_bool_res_false_idx]) == cntsp(spl[ac1_idx]) assert "test_name.test(an='ac3')" in spl[if_bool_res_false_idx + 1] assert cntsp(spl[if_bool_res_false_idx]) == cntsp( spl[if_bool_res_false_idx + 1]) - TAB if_bool_res_true_idx = subs_index(spl, "if bool_res == True:") assert if_bool_res_true_idx > ac1_idx assert cntsp(spl[if_bool_res_true_idx]) == cntsp(spl[ac1_idx]) assert "test_name.test(an='ac2')" in spl[if_bool_res_true_idx + 1] assert cntsp(spl[if_bool_res_true_idx]) == cntsp( spl[if_bool_res_true_idx + 1]) - TAB ac4_idx = subs_index(spl, "test_name.test(an='ac4')") assert ac4_idx > if_bool_res_false_idx assert ac4_idx > if_bool_res_true_idx assert cntsp(spl[ac4_idx]) == cntsp(spl[ac1_idx])