def get_base( types_dict: TypesDict, tmp_dir: str, scene_object_types: Set[str], obj_type: ObjectType, zf: zipfile.ZipFile, ot_path: str, ) -> None: base = base_from_source(obj_type.source, obj_type.id) if base is None: raise Arcor2Exception( f"Could not determine base class for {obj_type.id}.") if base in types_dict.keys() | built_in_types_names() | scene_object_types: return logger.debug(f"Getting {base} as base of {obj_type.id}.") base_obj_type = ps.get_object_type(base) zf.writestr( os.path.join(ot_path, humps.depascalize(base_obj_type.id)) + ".py", base_obj_type.source) types_dict[base_obj_type.id] = save_and_import_type_def( base_obj_type.source, base_obj_type.id, Generic, tmp_dir, OBJECT_TYPE_MODULE) # try to get base of the base get_base(types_dict, tmp_dir, scene_object_types, base_obj_type, zf, ot_path)
def get_baseballdatabank_files(): for file in BASEBALLDATABANK_PATH.glob("*.csv"): # Just need to change from PascalCase to snake_case to match table names # Editing OF fielding files to get PascalCasev conformity for all databank filenames file_name = file.name.replace("OFs", "OfS").replace("OF", "Of") depascalized_file = OUTPUT_PATH.with_name(humps.depascalize(file_name)) with open(file, 'r') as f_in, open(depascalized_file, 'w') as f_out: f_in.readline() f_out.write(f_in.read()) file.unlink() compress(depascalized_file, OUTPUT_PATH)
def program_src(type_defs: TypesDict, project: CProject, scene: CScene, add_logic: bool = True) -> str: tree = empty_script_tree(project.id, add_main_loop=add_logic) # get object instances from resources object main = find_function("main", tree) last_assign = find_last_assign(main) for obj in scene.objects: add_import(tree, "object_types." + humps.depascalize(obj.type), obj.type, try_to_import=False) last_assign += 1 main.body.insert(last_assign, object_instance_from_res(obj.name, obj.id, obj.type)) # TODO temporary solution - should be (probably) handled by plugin(s) from arcor2 import json # TODO should we put there even unused parameters? for param in project.parameters: val = json.loads(param.value) aval: Optional[expr] = None if isinstance(val, bool): # subclass of int aval = NameConstant(value=val, kind=None) elif isinstance(val, (int, float)): aval = Num(n=val, kind=None) elif isinstance(val, str): aval = Str(s=val, kind="") if not aval: raise Arcor2Exception( f"Unsupported project parameter type ({param.type}) or value ({val})." ) last_assign += 1 main.body.insert( last_assign, Assign( # TODO use rather AnnAssign? targets=[Name(id=param.name, ctx=Store())], value=aval, type_comment=None), ) if add_logic: add_logic_to_loop(type_defs, tree, scene, project) return SCRIPT_HEADER + tree_to_str(tree)
def import_type_def(type_name: str, output_type: Type[T], path: str, module_name: str) -> Type[T]: """Save source to a file in object_type_path directory. :param source: :param type_name: :param output_type: :param object_type_path: :return: """ if path not in sys.path: sys.path.append(path) type_file = humps.depascalize(type_name) importlib.invalidate_caches( ) # otherwise import might fail randomly (not sure why exactly) try: module = importlib.import_module(f"{module_name}.{type_file}") # reload is necessary for cases when the module is already loaded path_to_file = os.path.abspath(module.__file__) assert os.path.exists( path_to_file), f"Path {path_to_file} does not exist." module = importlib.reload( module) # TODO does this really solve anything? except ImportError as e: raise ImportClsException( f"Failed to import '{module_name}.{type_file}'. {str(e).capitalize()}." ) from e try: cls = getattr(module, type_name) except AttributeError: raise ImportClsException( f"Class {type_name} not found in module '{module_name}'.") if not issubclass(cls, output_type): raise ImportClsException( f"{cls.__name__} is not subclass of {output_type.__name__}.") return cls
def save_and_import_type_def(source: str, type_name: str, output_type: Type[T], path: str, module_name: str) -> Type[T]: """Save source to a file in object_type_path directory. :param source: :param type_name: :param output_type: :param object_type_path: :return: """ type_file = humps.depascalize(type_name) full_path = os.path.join(path, module_name, type_file) with open(f"{full_path}.py", "w") as file: file.write(source) return import_type_def(type_name, output_type, path, module_name)
def get_base_from_project_service( types_dict: TypesDict, tmp_dir: str, scene_object_types: set[str], obj_type: ObjectType, zf: zipfile.ZipFile, ot_path: str, ast: ast.AST, ) -> None: for idx, base in enumerate(base_from_source(ast, obj_type.id)): if base in types_dict.keys() | built_in_types_names( ) | scene_object_types: continue logger.debug(f"Getting {base} as base of {obj_type.id}.") base_obj_type = ps.get_object_type(base) # first try if the code is valid try: base_ast = parse(base_obj_type.source) except Arcor2Exception: raise FlaskException( f"Invalid code of the {base_obj_type.id} (base of {obj_type.id}).", error_code=401) # try to get base of the base get_base_from_project_service(types_dict, tmp_dir, scene_object_types, base_obj_type, zf, ot_path, base_ast) if idx == 0: # this is the base ObjectType types_dict[base_obj_type.id] = save_and_import_type_def( base_obj_type.source, base_obj_type.id, Generic, tmp_dir, OBJECT_TYPE_MODULE) else: # these are potential mixins (just try to import them, no need to store them) save_and_import_type_def(base_obj_type.source, base_obj_type.id, object, tmp_dir, OBJECT_TYPE_MODULE) scene_object_types.add(base_obj_type.id) zf.writestr( os.path.join(ot_path, humps.depascalize(base_obj_type.id)) + ".py", base_obj_type.source)
def save_and_import_type_def(source: str, type_name: str, output_type: type[T], path: str, module_name: str) -> type[T]: """Save source to a file in object_type_path directory. :param source: :param type_name: :param output_type: :param object_type_path: :return: """ type_file = humps.depascalize(type_name) full_path = f"{os.path.join(path, module_name, type_file)}.py" with open(full_path, "w") as file: file.write(source) try: return import_type_def(type_name, output_type, path, module_name) except Arcor2Exception: os.remove(full_path) raise
def __init__(self, project_id: str) -> None: scene = self.read_project_data(Scene.__name__.lower(), Scene) project = self.read_project_data(Project.__name__.lower(), Project) if project_id != project.id: raise ResourcesException( "Resources were generated for different project!") models: Dict[str, Optional[Models]] = {} for obj in scene.objects: if obj.type in models: continue try: models[obj.type] = self.read_project_data( "models/" + humps.depascalize(obj.type), ObjectModel).model() except IOError: models[obj.type] = None super(Resources, self).__init__(scene, project, models)
def __init__(self, apply_action_mapping: bool = True) -> None: models: dict[str, Optional[Models]] = {} scene = self.read_project_data(Scene.__name__.lower(), Scene) project = self.read_project_data(Project.__name__.lower(), Project) self.scene = CachedScene(scene) self.project = CachedProject(project) if self.project.scene_id != self.scene.id: raise ResourcesException("Project/scene not consistent!") # make all poses absolute for aps in self.project.action_points_with_parent: # Action point pose is relative to its parent object/AP pose in scene but is absolute during runtime. tr.make_relative_ap_global(self.scene, self.project, aps) for obj_type in self.scene.object_types: try: models[obj_type] = self.read_project_data( "models/" + humps.depascalize(obj_type), ObjectModel).model() except IOError: models[obj_type] = None type_defs: TypesDict = {} for scene_obj_type in self.scene.object_types: # get all type-defs assert scene_obj_type not in type_defs assert scene_obj_type not in built_in_types_names() module = importlib.import_module(CUSTOM_OBJECT_TYPES_MODULE + "." + humps.depascalize(scene_obj_type)) cls = getattr(module, scene_obj_type) patch_object_actions(cls) type_defs[cls.__name__] = cls if apply_action_mapping: patch_with_action_mapping(cls, self.scene, self.project) action.start_paused, action.breakpoints = parse_args() if action.breakpoints: ap_ids = self.project.action_points_ids for bp in action.breakpoints: if bp not in ap_ids: raise ResourcesException(f"Breakpoint ID unknown: {bp}.") # orientations / joints have to be monkey-patched with AP's ID in order to make breakpoints work in @action for ap in self.project.action_points: setattr(ap.position, AP_ID_ATTR, ap.id) for joints in self.project.ap_joints(ap.id): setattr(joints, AP_ID_ATTR, ap.id) package_id = os.path.basename(os.getcwd()) package_meta = package.read_package_meta(package_id) package_info_event = PackageInfo( PackageInfo.Data(package_id, package_meta.name, scene, project)) for model in models.values(): if not model: continue if isinstance(model, Box): package_info_event.data.collision_models.boxes.append(model) elif isinstance(model, Sphere): package_info_event.data.collision_models.spheres.append(model) elif isinstance(model, Cylinder): package_info_event.data.collision_models.cylinders.append( model) elif isinstance(model, Mesh): package_info_event.data.collision_models.meshes.append(model) # following steps might take some time, so let UIs know about the package as a first thing print_event(package_info_event) # in order to prepare a clean environment (clears all configurations and all collisions) scene_service.stop() self.executor = concurrent.futures.ThreadPoolExecutor() futures: list[concurrent.futures.Future] = [] for scene_obj in self.scene.objects: cls = type_defs[scene_obj.type] settings = settings_from_params( cls, scene_obj.parameters, self.project.overrides.get(scene_obj.id, None)) if issubclass(cls, Robot): futures.append( self.executor.submit(cls, scene_obj.id, scene_obj.name, scene_obj.pose, settings)) elif issubclass(cls, CollisionObject): futures.append( self.executor.submit(cls, scene_obj.id, scene_obj.name, scene_obj.pose, models[scene_obj.type], settings)) elif issubclass(cls, GenericWithPose): futures.append( self.executor.submit(cls, scene_obj.id, scene_obj.name, scene_obj.pose, settings)) elif issubclass(cls, Generic): futures.append( self.executor.submit(cls, scene_obj.id, scene_obj.name, settings)) else: raise Arcor2Exception( f"{cls.__name__} has unknown base class.") exceptions: list[Arcor2Exception] = [] self.objects: dict[str, Generic] = {} for f in concurrent.futures.as_completed(futures): try: inst = f.result( ) # if an object creation resulted in exception, it will be raised here except Arcor2Exception as e: print_exception(e) exceptions.append(e) else: self.objects[ inst.id] = inst # successfully initialized objects if exceptions: # if something failed, tear down those that succeeded and stop self.cleanup_all_objects() raise ResourcesException(" ".join([str(e) for e in exceptions]), exceptions) scene_service.start() self._stream_futures: list[concurrent.futures.Future] = []
def type_name(cls) -> str: """Returns parameter type as string used in JSON.""" return humps.depascalize(cls.type().__name__)
def _publish(project_id: str, package_name: str) -> RespT: mem_zip = BytesIO() logger.debug( f"Generating package {package_name} for project_id: {project_id}.") types_dict: TypesDict = {} # restore original environment sys.path = list(original_sys_path) sys.modules = dict(original_sys_modules) with tempfile.TemporaryDirectory() as tmp_dir: prepare_object_types_dir(tmp_dir, OBJECT_TYPE_MODULE) with zipfile.ZipFile(mem_zip, mode="w", compression=zipfile.ZIP_DEFLATED) as zf: try: logger.debug("Getting scene and project.") project = ps.get_project(project_id) cached_project = CachedProject(project) scene = ps.get_scene(project.scene_id) cached_scene = CachedScene(scene) if not package_name: package_name = project.name data_path = "data" ot_path = "object_types" zf.writestr(os.path.join(ot_path, "__init__.py"), "") zf.writestr(os.path.join(data_path, "project.json"), project.to_json()) zf.writestr(os.path.join(data_path, "scene.json"), scene.to_json()) obj_types = set(cached_scene.object_types) obj_types_with_models: set[str] = set() if __debug__: # this should uncover potential problems with order in which ObjectTypes are processed import random random.shuffle(scene.objects) for scene_obj in scene.objects: if scene_obj.type in types_dict: continue logger.debug( f"Getting scene object type {scene_obj.type}.") obj_type = ps.get_object_type(scene_obj.type) if obj_type.model and obj_type.id not in obj_types_with_models: obj_types_with_models.add(obj_type.id) model = ps.get_model(obj_type.model.id, obj_type.model.type) obj_model = ObjectModel( obj_type.model.type, **{model.type().value.lower(): model} # type: ignore ) zf.writestr( os.path.join( data_path, "models", humps.depascalize(obj_type.id) + ".json"), obj_model.to_json(), ) zf.writestr( os.path.join(ot_path, humps.depascalize(obj_type.id)) + ".py", obj_type.source) # handle inheritance get_base_from_project_service(types_dict, tmp_dir, obj_types, obj_type, zf, ot_path, parse(obj_type.source)) types_dict[scene_obj.type] = save_and_import_type_def( obj_type.source, scene_obj.type, Generic, tmp_dir, OBJECT_TYPE_MODULE) except Arcor2Exception as e: logger.exception( f"Failed to prepare package content. {str(e)}") raise FlaskException(str(e), error_code=404) script_path = "script.py" try: if project.has_logic: logger.debug("Generating script from project logic.") zf.writestr( script_path, program_src(types_dict, cached_project, cached_scene, True)) else: try: logger.debug("Getting project sources.") script = ps.get_project_sources(project.id).script # check if it is a valid Python code try: parse(script) except SourceException: logger.exception( "Failed to parse code of the uploaded script.") raise FlaskException("Invalid code.", error_code=501) zf.writestr(script_path, script) except ps.ProjectServiceException: logger.info( "Script not found on project service, creating one from scratch." ) # write script without the main loop zf.writestr( script_path, program_src(types_dict, cached_project, cached_scene, False)) logger.debug("Generating supplementary files.") logger.debug("action_points.py") zf.writestr("action_points.py", global_action_points_class(cached_project)) logger.debug("package.json") zf.writestr( "package.json", PackageMeta(package_name, datetime.now(tz=timezone.utc)).to_json()) except Arcor2Exception as e: logger.exception("Failed to generate script.") raise FlaskException(str(e), error_code=501) logger.info( f"Done with {package_name} (scene {scene.name}, project {project.name})." ) mem_zip.seek(0) return send_file(mem_zip, as_attachment=True, max_age=0, download_name=f"{package_name}_package.zip")
def test_depascalize(): actual = humps.depascalize([ { 'Symbol': 'AAL', 'LastPrice': 31.78, 'ChangePct': 2.8146, 'ImpliedVolatality': 0.482, }, { 'Symbol': 'LBTYA', 'LastPrice': 25.95, 'ChangePct': 2.6503, 'ImpliedVolatality': 0.7287, }, { '_Symbol': 'LBTYK', 'ChangePct_': 2.5827, '_LastPrice__': 25.42, '__ImpliedVolatality_': 0.4454, }, { 'API': 'test_upper', '_API_': 'test_upper', '__API__': 'test_upper', 'APIResponse': 'test_acronym', '_APIResponse_': 'test_acronym', '__APIResponse__': 'test_acronym', 'ruby_tuesdays': 'ruby_tuesdays', }, ]) expected = [ { 'symbol': 'AAL', 'last_price': 31.78, 'change_pct': 2.8146, 'implied_volatality': 0.482, }, { 'symbol': 'LBTYA', 'last_price': 25.95, 'change_pct': 2.6503, 'implied_volatality': 0.7287, }, { '_symbol': 'LBTYK', 'change_pct_': 2.5827, '_last_price__': 25.42, '__implied_volatality_': 0.4454, }, { 'API': 'test_upper', '_API_': 'test_upper', '__API__': 'test_upper', 'api_response': 'test_acronym', '_api_response_': 'test_acronym', '__api_response__': 'test_acronym', 'ruby_tuesdays': 'ruby_tuesdays', }, ] assert actual == expected
def __tablename__(cls) -> str: return humps.depascalize(cls.__name__)
def test_converting_strings(): assert humps.camelize('jack_in_the_box') == 'jackInTheBox' assert humps.decamelize('rubyTuesdays') == 'ruby_tuesdays' assert humps.depascalize('UnosPizza') == 'unos_pizza' assert humps.pascalize('red_robin') == 'RedRobin'
def test_upper(): assert humps.camelize("API") == "API" assert humps.decamelize("API") == "API" assert humps.pascalize("API") == "API" assert humps.depascalize("API") == "API"
def test_converting_strings(): assert humps.camelize("jack_in_the_box") == "jackInTheBox" assert humps.decamelize("rubyTuesdays") == "ruby_tuesdays" assert humps.depascalize("UnosPizza") == "unos_pizza" assert humps.pascalize("red_robin") == "RedRobin"
def test_depascalize(): actual = humps.depascalize([ { "Symbol": "AAL", "LastPrice": 31.78, "ChangePct": 2.8146, "ImpliedVolatality": 0.482, }, { "Symbol": "LBTYA", "LastPrice": 25.95, "ChangePct": 2.6503, "ImpliedVolatality": 0.7287, }, { "_Symbol": "LBTYK", "ChangePct_": 2.5827, "_LastPrice__": 25.42, "__ImpliedVolatality_": 0.4454, }, { "API": "test_upper", "_API_": "test_upper", "__API__": "test_upper", "APIResponse": "test_acronym", "_APIResponse_": "test_acronym", "__APIResponse__": "test_acronym", "ruby_tuesdays": "ruby_tuesdays", }, ]) expected = [ { "symbol": "AAL", "last_price": 31.78, "change_pct": 2.8146, "implied_volatality": 0.482, }, { "symbol": "LBTYA", "last_price": 25.95, "change_pct": 2.6503, "implied_volatality": 0.7287, }, { "_symbol": "LBTYK", "change_pct_": 2.5827, "_last_price__": 25.42, "__implied_volatality_": 0.4454, }, { "API": "test_upper", "_API_": "test_upper", "__API__": "test_upper", "api_response": "test_acronym", "_api_response_": "test_acronym", "__api_response__": "test_acronym", "ruby_tuesdays": "ruby_tuesdays", }, ] assert actual == expected
def __init__(self, scene: Scene, project: Project, models: Dict[str, Optional[Models]]) -> None: self.project = CachedProject(project) self.scene = CachedScene(scene) if self.project.scene_id != self.scene.id: raise ResourcesException("Project/scene not consistent!") self.objects: Dict[str, Generic] = {} self.type_defs: TypesDict = {} built_in = built_in_types_names() if scene_service.started(): scene_service.stop() scene_service.delete_all_collisions() package_id = os.path.basename(os.getcwd()) package_meta = package.read_package_meta(package_id) package_info_event = PackageInfo( PackageInfo.Data(package_id, package_meta.name, scene, project)) for scene_obj_type in self.scene.object_types: # get all type-defs assert scene_obj_type not in self.type_defs if scene_obj_type in built_in: module = importlib.import_module( arcor2.object_types.__name__ + "." + humps.depascalize(scene_obj_type)) else: module = importlib.import_module( Resources.CUSTOM_OBJECT_TYPES_MODULE + "." + humps.depascalize(scene_obj_type)) cls = getattr(module, scene_obj_type) patch_object_actions( cls, get_action_name_to_id(self.scene, self.project, cls.__name__)) self.type_defs[cls.__name__] = cls scene_objects = list(self.scene.objects) # sort according to OT initialization priority (highest is initialized first) scene_objects.sort(key=lambda x: self.type_defs[x.type].INIT_PRIORITY, reverse=True) for scene_obj in scene_objects: cls = self.type_defs[scene_obj.type] assert scene_obj.id not in self.objects, "Duplicate object id {}!".format( scene_obj.id) settings = settings_from_params( cls, scene_obj.parameters, self.project.overrides.get(scene_obj.id, None)) if issubclass(cls, Robot): self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name, scene_obj.pose, settings) elif issubclass(cls, GenericWithPose): self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name, scene_obj.pose, models[scene_obj.type], settings) elif issubclass(cls, Generic): self.objects[scene_obj.id] = cls(scene_obj.id, scene_obj.name, settings) else: raise Arcor2Exception("Unknown base class.") for model in models.values(): if not model: continue if isinstance(model, Box): package_info_event.data.collision_models.boxes.append(model) elif isinstance(model, Sphere): package_info_event.data.collision_models.spheres.append(model) elif isinstance(model, Cylinder): package_info_event.data.collision_models.cylinders.append( model) elif isinstance(model, Mesh): package_info_event.data.collision_models.meshes.append(model) scene_service.start() print_event(package_info_event) # make all poses absolute for aps in self.project.action_points_with_parent: # Action point pose is relative to its parent object/AP pose in scene but is absolute during runtime. tr.make_relative_ap_global(self.scene, self.project, aps)
def test_upper(): assert humps.camelize('API') == 'API' assert humps.decamelize('API') == 'API' assert humps.pascalize('API') == 'API' assert humps.depascalize('API') == 'API'