def main() -> None: parser = argparse.ArgumentParser() parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=LogLevel.DEBUG, default=LogLevel.DEBUG if env.get_bool("ARCOR2_LOGGER_DEBUG") else LogLevel.INFO, ) parser.add_argument("--version", action="version", version=version(), help="Shows version and exits.") parser.add_argument( "-a", "--asyncio_debug", help="Turn on asyncio debug mode.", action="store_const", const=True, default=env.get_bool("ARCOR2_LOGGER_ASYNCIO_DEBUG"), ) args = parser.parse_args() logger.level = args.debug run(aio_main(), stop_on_unhandled_errors=True)
def main() -> None: parser = argparse.ArgumentParser(description=SERVICE_NAME) parser.add_argument("-s", "--swagger", action="store_true", default=False) parser.add_argument("-m", "--mock", action="store_true", default=env.get_bool("ARCOR2_DOBOT_MOCK")) parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=logging.DEBUG, default=logging.DEBUG if env.get_bool("ARCOR2_DOBOT_DEBUG") else logging.INFO, ) args = parser.parse_args() logger.setLevel(args.debug) global _mock _mock = args.mock if _mock: logger.info("Starting as a mock!") if not args.swagger: scene_service.wait_for() run_app(app, SERVICE_NAME, version(), port_from_url(URL), [Pose, Joint], args.swagger) if _dobot: _dobot.cleanup()
def main() -> None: parser = argparse.ArgumentParser(description=SERVICE_NAME) parser.add_argument("-s", "--swagger", action="store_true", default=False) parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=logging.DEBUG, default=logging.DEBUG if env.get_bool("ARCOR2_BUILD_DEBUG") else logging.INFO, ) args = parser.parse_args() logger.setLevel(args.debug) run_app( app, SERVICE_NAME, arcor2_build.version(), port_from_url(URL), [ImportResult], print_spec=args.swagger, )
def run_app( app: Flask, name: str, version: str, api_version: str, port: int, dataclasses: Optional[List[Type[JsonSchemaMixin]]] = None, print_spec: bool = False, ) -> None: spec = APISpec( title=f"{name} ({version})", version=api_version, openapi_version="3.0.2", plugins=[FlaskPlugin(), DataclassesPlugin()], ) if dataclasses is not None: for dc in dataclasses: spec.components.schema(dc.__name__, schema=dc) with app.test_request_context(): for rule in app.url_map.iter_rules(): if rule.endpoint != "static": spec.path(view=app.view_functions[rule.endpoint]) if print_spec: print(spec.to_yaml()) return @app.route("/swagger/api/swagger.json", methods=["GET"]) def get_swagger() -> str: return jsonify(spec.to_dict()) @app.errorhandler(Arcor2Exception) def handle_bad_request_general(e: Arcor2Exception) -> Tuple[str, int]: return json.dumps(str(e)), 400 @app.errorhandler(FlaskException) def handle_bad_request_intentional(e: FlaskException) -> Tuple[str, int]: return json.dumps(str(e)), e.error_code SWAGGER_URL = "/swagger" swaggerui_blueprint = get_swaggerui_blueprint( SWAGGER_URL, "./api/swagger.json" # Swagger UI static files will be mapped to '{SWAGGER_URL}/dist/' ) # Register blueprint at URL app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL) if not env.get_bool("ARCOR2_REST_API_DEBUG", False): # turn off logging each endpoint call by default log = logging.getLogger("werkzeug") log.setLevel(logging.ERROR) app.run(host="0.0.0.0", port=port)
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=LogLevel.DEBUG, default=LogLevel.DEBUG if env.get_bool("ARCOR2_EXECUTION_DEBUG") else LogLevel.INFO, ) parser.add_argument( "--version", action="version", version=arcor2_execution.version(), help="Shows version and exits." ) parser.add_argument( "--api_version", action="version", version=arcor2_execution_data.version(), help="Shows API version and exits." ) parser.add_argument( "-a", "--asyncio_debug", help="Turn on asyncio debug mode.", action="store_const", const=True, default=env.get_bool("ARCOR2_EXECUTION_ASYNCIO_DEBUG"), ) args = parser.parse_args() logger.level = args.debug loop = asyncio.get_event_loop() loop.set_debug(enabled=args.asyncio_debug) loop.set_exception_handler(ws_server.custom_exception_handler) compile_json_schemas() run(aio_main(), loop=loop)
def main() -> None: global inflation parser = argparse.ArgumentParser(description=SCENE_SERVICE_NAME) parser.add_argument("-s", "--swagger", action="store_true", default=False) parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=logging.DEBUG, default=logging.DEBUG if env.get_bool("ARCOR2_SCENE_DEBUG") else logging.INFO, ) parser.add_argument( "-i", "--inflation", help="How much to inflate collision objects (meters).", nargs="?", default=env.get_float("ARCOR2_SCENE_INFLATION", 0.01), type=float, ) args = parser.parse_args() logger.setLevel(args.debug) inflation = args.inflation run_app( app, SCENE_SERVICE_NAME, version(), SCENE_PORT, [ common.Pose, object_type.Box, object_type.Cylinder, object_type.Sphere, object_type.Mesh, scene.MeshFocusAction, scene.LineCheck, scene.LineCheckResult, ], args.swagger, api_version="0.5.0", )
def main() -> None: parser = argparse.ArgumentParser(description=SERVICE_NAME) parser.add_argument("-s", "--swagger", action="store_true", default=False) parser.add_argument("-m", "--mock", action="store_true", default=env.get_bool("ARCOR2_KINECT_AZURE_MOCK")) args = parser.parse_args() global _mock _mock = args.mock if _mock: logger.info("Starting as a mock!") run_app(app, SERVICE_NAME, version(), port_from_url(URL), [CameraParameters], args.swagger) if _kinect: _kinect.cleanup()
def main() -> None: parser = argparse.ArgumentParser(description=SERVICE_NAME) parser.add_argument( "-d", "--debug", help="Set logging level to debug.", action="store_const", const=logging.DEBUG, default=logging.DEBUG if env.get_bool("ARCOR2_CALIBRATION_DEBUG") else logging.INFO, ) run_as_mock = env.get_bool("ARCOR2_CALIBRATION_MOCK") # argparse has not support for env vars so this is kind of workaround # TODO maybe it could be solved using a custom action like https://gist.github.com/orls/51525c86ee77a56ad396 if not run_as_mock: group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "--config-file", "-c", type=argparse.FileType("r"), help="Config file name containing a valid YAML configuration.", ) sub_group = group.add_mutually_exclusive_group() sub_group.add_argument("-s", "--swagger", action="store_true", default=False) sub_group.add_argument( "-m", "--mock", action="store_true", default=False, help= "Run the service in a mock mode. The same can be done by setting ARCOR2_CALIBRATION_MOCK.", ) args = parser.parse_args() logger.setLevel(args.debug) if not (run_as_mock or args.swagger or args.mock): data = args.config_file.read() global MARKER_SIZE global MIN_DIST global MAX_DIST try: config = yaml.safe_load(data) MARKER_SIZE = float(config.get("marker_size", MARKER_SIZE)) MIN_DIST = float(config.get("min_dist", MIN_DIST)) MAX_DIST = float(config.get("max_dist", MAX_DIST)) calibration.BLUR_THRESHOLD = float( config.get("blur_threshold", calibration.BLUR_THRESHOLD)) for marker_id, marker in config["markers"].items(): MARKERS[int(marker_id)] = Pose.from_dict(marker["pose"]) logger.info( f"Loaded configuration id '{config['id']}' with {len(MARKERS)} marker(s) of size {MARKER_SIZE}." ) except (KeyError, ValueError, TypeError, ValidationError): logger.exception("Failed to load the configuration file.") sys.exit(1) if not (MAX_DIST > MIN_DIST): logger.error("'max_dist' have to be bigger than 'min_dist'.") sys.exit(1) global _mock _mock = run_as_mock or args.mock if _mock: logger.info("Starting as a mock!") run_app( app, SERVICE_NAME, arcor2_calibration.version(), port_from_url(CALIBRATION_URL), [Pose, CalibrateRobotArgs, MarkerCorners, EstimatedPose], getattr(args, "swagger", False), )
from arcor2 import env from arcor2.cached import CachedProject as CProject from arcor2.cached import CachedScene as CScene from arcor2.data.common import Action, ActionParameter, FlowTypes from arcor2.exceptions import Arcor2Exception from arcor2.logging import get_logger from arcor2.parameter_plugins.base import TypesDict from arcor2.parameter_plugins.utils import plugin_from_type_name from arcor2.source import SCRIPT_HEADER, SourceException from arcor2.source.utils import add_import, add_method_call, tree_to_str from arcor2_build.source.object_types import object_instance_from_res from arcor2_build.source.utils import empty_script_tree, find_function, find_last_assign, main_loop logger = get_logger( __name__, logging.DEBUG if env.get_bool("ARCOR2_LOGIC_DEBUG", False) else logging.INFO) def program_src(type_defs: TypesDict, project: CProject, scene: CScene, add_logic: bool = True) -> str: tree = empty_script_tree(project.id, add_main_loop=add_logic) # get object instances from resources object main = find_function("main", tree) last_assign = find_last_assign(main) for obj in scene.objects: add_import(tree, "object_types." + humps.depascalize(obj.type),
the read timeout is the number of seconds the client will wait for the server to send a response. (Specifically, it’s the number of seconds that the client will wait between bytes sent from the server. In 99.9% of cases, this is the time before the server sends the first byte). Source: https://requests.readthedocs.io/en/master/user/advanced/#timeouts """ connect: float = 3.05 read: float = 20.0 OptTimeout = Optional[Timeout] # module-level variables debug = env.get_bool("ARCOR2_REST_DEBUG", False) headers = {"accept": "application/json", "content-type": "application/json"} session = requests.session() logger = get_logger(__name__, logging.DEBUG if debug else logging.INFO) def dataclass_from_json(resp_json: Dict[str, Any], return_type: Type[DataClass]) -> DataClass: try: return return_type.from_dict(resp_json) except ValidationError as e: logger.debug( f'{return_type.__name__}: validation error "{e}" while parsing "{resp_json}".' ) raise RestException("Invalid data.", str(e)) from e
import humps from arcor2 import env from arcor2.cached import CachedProject as CProject from arcor2.cached import CachedScene as CScene from arcor2.data.common import Action, ActionParameter, FlowTypes from arcor2.exceptions import Arcor2Exception from arcor2.logging import get_logger from arcor2.parameter_plugins.base import TypesDict from arcor2.parameter_plugins.utils import plugin_from_type_name from arcor2.source import SCRIPT_HEADER, SourceException from arcor2.source.utils import add_import, add_method_call, tree_to_str from arcor2_build.source.object_types import object_instance_from_res from arcor2_build.source.utils import empty_script_tree, find_function, find_last_assign, main_loop logger = get_logger(__name__, logging.DEBUG if env.get_bool("ARCOR2_LOGIC_DEBUG", False) else logging.INFO) def program_src(type_defs: TypesDict, project: CProject, scene: CScene, add_logic: bool = True) -> str: tree = empty_script_tree(project.id, add_main_loop=add_logic) # get object instances from resources object main = find_function("main", tree) last_assign = find_last_assign(main) for obj in scene.objects: add_import(tree, "object_types." + humps.depascalize(obj.type), obj.type, try_to_import=False) last_assign += 1 main.body.insert(last_assign, object_instance_from_res(obj.name, obj.id, obj.type)) # TODO temporary solution - should be (probably) handled by plugin(s)
get_project_sources, put_model, update_object_type, update_project_sources, ) from arcor2.clients.persistent_storage import ProjectServiceException from arcor2.data.common import IdDesc, IdDescList, Project, Scene """ This module adds some caching capabilities to the aio version of persistent_storage. It should be only used by ARServer. Caching can be disabled by setting respective environment variable - this is useful for environments where ARServer is not the only one who touches Project service. """ _cache_enabled = env.get_bool("ARCOR2_ARSERVER_CACHE_ENABLED", True) _cache_scenes = env.get_int("ARCOR2_ARSERVER_CACHE_SCENES", 16) _cache_projects = env.get_int("ARCOR2_ARSERVER_CACHE_PROJECTS", 32) if _cache_enabled: _cache_scenes = max(_cache_scenes, 1) _cache_projects = max(_cache_projects, 1) # here we need to know all the items _scenes_list: Optional[Dict[str, IdDesc]] = None _projects_list: Optional[Dict[str, IdDesc]] = None # here we can forget least used items if TYPE_CHECKING: _scenes: Optional[Dict[str, Scene]] = None _projects: Optional[Dict[str, Project]] = None