def walk(v): pp(type(v)) pp(v) if isinstance(v, ruamel.yaml.comments.CommentedSeq): for d in v: if "in" in d: in_value = d["in"] if in_value == "query": if "name" in d: name = d["name"] + ".query" go_variable_name = humps.pascalize( name.replace(".", "_")) d["x-go-name"] = dq(go_variable_name) if in_value == "path": if "name" in d: name = d["name"] + ".path" go_variable_name = humps.pascalize( name.replace(".", "_")) d["x-go-name"] = dq(go_variable_name) if isinstance(v, list): for e in v: walk(e) if isinstance(v, dict): for key in list(v.keys()): walk(v[key])
def ssm_document_name(template_name: str, name: str, is_logical: bool) -> str: if is_logical: return re.sub(r'[\W_]', '', humps.pascalize(f"ssm_doc_{template_name}_{name}")) else: return re.sub(r'[\W_]', '', humps.pascalize(f"imp_{template_name}_{name}"))
def __init__( self, replication_task_arn, start_replication_task_type, polling_interval=10, *args, **kwargs ): """ Trigger AWS Data Migration Services Replication Task function :param replication_task_arn (string) [REQUIRED] -- The Amazon Resource Name (ARN) of the replication task to be started :param start_replication_task_type (string) [REQUIRED] -- The type of replication task. Possible Values include start-replication, resume-processing, reload-target :param polling_interval (integer) (default: 10) -- time interval, in seconds, to check the status of the job :param cdc_start_time (datetime) -- Indicates the start time for a change data capture (CDC) operation. Use either CdcStartTime or CdcStartPosition to specify when you want a CDC operation to start. Specifying both values results in an error. :param cdc_start_position (string) -- Indicates when you want a change data capture (CDC) operation to start. Use either CdcStartPosition or CdcStartTime to specify when you want a CDC operation to start. Specifying both values results in an error. :param cdc_stop_position (string) -- Indicates when you want a change data capture (CDC) operation to stop. The value can be either server time or commit time. Reference: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dms.html#DatabaseMigrationService.Client.start_replication_task """ super(StartDMSReplicationTaskOperator, self).__init__(*args, **kwargs) self.replication_task_arn = replication_task_arn self.start_replication_task_type = start_replication_task_type self.polling_interval = polling_interval self.__dict__.update(kwargs) self.dms_client = boto3.client('dms') self.pascalized_args = {humps.pascalize( k): v for k, v in self.__dict__.items()} boto3_dms_arguments = ['ReplicationTaskArn', 'StartReplicationTaskType', 'CdcStartTime', 'CdcStartPosition', 'CdcStopPosition'] self.func_args = {key: self.pascalized_args[key] for key in set( boto3_dms_arguments).intersection(self.pascalized_args.keys())}
def MemberExpression(syntax, info={}): info['error_check'] = False method_name = call_func_by_syntax(syntax.property) if syntax.object.type == 'ThisExpression' and syntax.property.type == 'Identifier' and method_need_check_err( method_name): info['error_check'] = True if syntax.object.type == 'ThisExpression': method_name = humps.pascalize(method_name) obj = call_func_by_syntax(syntax.object) if syntax.object.type == 'ThisExpression': return f'{obj}.{method_name}' else: m = { 'toString': f'fmt.Sprintf("%v", {obj})', 'length': f'self.Length({obj})', } default = f'{obj}[{method_name}]' if syntax.property.type == 'Identifier' and syntax.property.name == 'push': if 'arg_str' in info: return f'{obj} = append({obj}, {info["arg_str"]})' if syntax.property.type == 'Identifier' and syntax.property.name == 'split': if 'arg_str' in info: return f'strings.Split({obj}, {info["arg_str"]})' # if info.get('pre') in ['right', 'init', 'test']: if info.get('pre') != 'left': default = f'self.Member({obj}, {method_name})' if info.get('pre') in ['left']: default = f'self.SetValue({obj}, {method_name})' return m.get(syntax.property.name, default)
def load_class_from_file(_file_path): """ A loader utility, which takes an experiment directory path, and loads necessary things into the ModelRegistry. This imposes an opinionated directory structure on the users, which looks something like : - envs/ - my_env_1.py - my_env_2.py .... - my_env_N.py - models/ - my_model_1.py - my_model_2.py ..... - my_model_N.py """ basename = os.path.basename(_file_path) filename = basename.replace(".py", "") class_name = humps.pascalize(filename) # Load the module loader = importlib.machinery.SourceFileLoader(filename, _file_path) mod = types.ModuleType(loader.name) loader.exec_module(mod) try: _class = getattr(mod, class_name) except KeyError: raise Exception("Looking for a class named {} in the file {}." "Did you name the class correctly ?".format( filename, class_name)) return filename, class_name, _class
def test_pascalize(input_str, expected_output): """ :param input_str: String that will be transformed. :param expected_output: The expected transformation. """ output = humps.pascalize(input_str) assert output == expected_output, "%s != %s" % (output, expected_output)
def test_pascalize(): actual = humps.pascalize({ "videos": [{ "fallback_url": "https://media.io/video", "scrubber_media_url": "https://media.io/video", "dash_url": "https://media.io/video", }], "images": [{ "fallback_url": "https://media.io/image", "scrubber_media_url": "https://media.io/image", "url": "https://media.io/image", }], "other": [ { "_fallback_url": "https://media.io/image", "__scrubber_media___url_": "https://media.io/image", "_url__": "https://media.io/image", }, { "API": "test_upper", "_API_": "test_upper", "__API__": "test_upper", "APIResponse": "test_acronym", "_APIResponse_": "test_acronym", "__APIResponse__": "test_acronym", }, ], }) expected = { "Videos": [{ "FallbackUrl": "https://media.io/video", "ScrubberMediaUrl": "https://media.io/video", "DashUrl": "https://media.io/video", }], "Images": [{ "FallbackUrl": "https://media.io/image", "ScrubberMediaUrl": "https://media.io/image", "Url": "https://media.io/image", }], "Other": [ { "_FallbackUrl": "https://media.io/image", "__ScrubberMediaUrl_": "https://media.io/image", "_Url__": "https://media.io/image", }, { "API": "test_upper", "_API_": "test_upper", "__API__": "test_upper", "APIResponse": "test_acronym", "_APIResponse_": "test_acronym", "__APIResponse__": "test_acronym", }, ], } assert actual == expected
def to_camel(self): # input: # ab_cd_ef || AB_CD_EF # output: # abCdEf # AbCdEf tmp = self.str_to_convert.lower() return [ humps.camelize(tmp), humps.pascalize(tmp), ]
def pascalize(s): print("pascalize ", s) s = s.replace(".", "_") s = s.replace("_url", "_URL") s = s.replace("_uuid", "_UUID") s = s.replace("_uid", "_UID") s = s.replace("_http", "_HTTP") s = s.replace("_id", "_ID") s = s.replace("_ipv4", "_IPV4") s = s.replace("_ipv6", "_IPV6") s = s.replace("_ip", "_IP") s = s.replace("_vm", "_VM") s = s.replace("_acl", "_ACL") s = s.replace("_tcp", "_TCP") s = s.replace("_udp", "_UDP") s = s.replace("_dns", "_DNS") s = s.replace("_uri", "_URI") s = s.replace("_tls", "_TLS") s = s.replace("_ttl", "_TTL") s = s.replace("_cpu", "_CPU") #s = s.replace("_kdc", "_KDC") s = s.replace("url_", "URL_") s = s.replace("uuid_", "UUID_") s = s.replace("uid_", "UID_") s = s.replace("http_", "HTTP_") s = s.replace("id_", "ID_") s = s.replace("ipv4_", "IPV4_") s = s.replace("ipv6_", "IPV6_") s = s.replace("ip_", "IP_") s = s.replace("vm_", "VM_") s = s.replace("acl_", "ACL_") s = s.replace("tcp_", "TCP_") s = s.replace("udp_", "UDP_") s = s.replace("dns_", "DNS_") s = s.replace("uri_", "URI_") s = s.replace("tls_", "TLS_") s = s.replace("ttl_", "TTL_") s = s.replace("cpu_", "CPU_") #s = s.replace("kdc_", "KDC_") # fix sas_ports.phy_1.state etc.. s = s.replace("phy_1", "phy1") s = s.replace("phy_2", "phy2") s = s.replace("phy_3", "phy3") s = s.replace("phy_4", "phy4") s = humps.pascalize(s) s = s.replace("IPv4", "IPV4") s = s.replace("IPv6", "IPV6") return s
def load_class_from_file(_file_path): basename = os.path.basename(_file_path) filename = basename.replace(".py", "") class_name = humps.pascalize(filename) # TODO : Add validation here for env_name as being snake_case # Load the module loader = importlib.machinery.SourceFileLoader(filename, _file_path) mod = types.ModuleType(loader.name) loader.exec_module(mod) try: _class = getattr(mod, class_name) except KeyError: # TODO : Add a better error message raise Exception("Looking for a class named {} in the file {}." "Did you name the class correctly ?".format( filename, class_name)) return filename, class_name, _class
def __init__(self, job_name, polling_interval=10, *args, **kwargs): """ Trigger AWS Glue Job function :param job_name (string) [REQUIRED] -- the name of the Glue job to start and monitor :param polling_interval (integer) (default: 10) -- time interval, in seconds, to check the status of the job :param job_run_id (string) -- The ID of a previous JobRun to retry. :param arguments (dict) -- The job arguments specifically for this run. For this job run, they replace the default arguments set in the job definition itself. :param timeout (integer) -- The JobRun timeout in minutes. :param max_capacity (float) -- The number of AWS Glue data processing units (DPUs) that can be allocated when this job runs. :param security_configuration (string) -- The name of the SecurityConfiguration structure to be used with this job run. :param notification_property (dict) -- Specifies configuration properties of a job run notification. :param worker_type (string) -- The type of predefined worker that is allocated when a job runs. Accepts a value of Standard, G.1X, or G.2X. :param number_of_workers (integer) -- The number of workers of a defined workerType that are allocated when a job runs. Reference: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/glue.html#Glue.Client.start_job_run """ super(StartGlueJobRunOperator, self).__init__(*args, **kwargs) self.job_name = job_name self.polling_interval = polling_interval self.__dict__.update(kwargs) self.glue_client = boto3.client('glue') self.pascalized_args = { humps.pascalize(k): v for k, v in self.__dict__.items() } boto3_glue_arguments = [ 'JobName', 'JobRunId', 'Arguments', 'Timeout', 'MaxCapacity', 'SecurityConfiguration', 'NotificationProperty', 'WorkerType', 'NumberOfWorkers' ] self.func_args = { key: self.pascalized_args[key] for key in set(boto3_glue_arguments).intersection( self.pascalized_args.keys()) }
def test_upper(): assert humps.camelize("API") == "API" assert humps.decamelize("API") == "API" assert humps.pascalize("API") == "API" assert humps.depascalize("API") == "API"
def test_converting_strings(): assert humps.camelize("jack_in_the_box") == "jackInTheBox" assert humps.decamelize("rubyTuesdays") == "ruby_tuesdays" assert humps.depascalize("UnosPizza") == "unos_pizza" assert humps.pascalize("red_robin") == "RedRobin"
def test_numeric(): assert humps.camelize(1234) == 1234 assert humps.decamelize(123) == 123 assert humps.pascalize(123) == 123
def rule_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"rule_{name}"))
def fis_action_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"fis_action_{name}"))
def global_action_points_class(project: CachedProject) -> str: tree = Module(body=[]) tree.body.append( ImportFrom( module=arcor2.data.common.__name__, names=[ alias(name=ActionPoint.__name__, asname=None), alias(name=Position.__name__, asname=None), alias(name=Pose.__name__, asname=None), alias(name=ProjectRobotJoints.__name__, asname=None), ], level=0, )) tree.body.append( ImportFrom( module=copy.__name__, names=[alias(name=copy.deepcopy.__name__, asname=None)], level=0, )) tree.body.append( ImportFrom( module=RES_MODULE, names=[alias(name=RES_CLS, asname=None)], level=0, )) aps_init_body: List[Union[Assign, Pass]] = [] for ap in project.action_points: ap_cls_body: List[Assign] = [ Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Store()) ], value=Attribute( value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr=CachedProject.bare_action_point.__name__, ctx=Load(), ), args=[Str(s=ap.id, kind="")], keywords=[], ), attr="position", ctx=Load(), ), type_comment=None, ) ] ap_type_name = humps.pascalize(ap.name) ap_joints_init_body: List[Assign] = [] for joints in project.ap_joints(ap.id): ap_joints_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="joints", ctx=Load(), ), args=[Str(s=joints.id, kind="")], keywords=[], ), type_comment=None, )) if ap_joints_init_body: ap_joints_cls_def = ClassDef( name=f"{ap_type_name}Joints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_joints_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for joints in project.ap_joints(ap.id): ap_joints_cls_def.body.append( FunctionDef( name=joints.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=ProjectRobotJoints.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_joints_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="joints", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Joints", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_orientations_init_body: List[Assign] = [] for ori in project.ap_orientations(ap.id): ap_orientations_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="pose", ctx=Load(), ), args=[Str(s=ori.id, kind="")], keywords=[], ), type_comment=None, )) if ap_orientations_init_body: ap_orientations_cls_def = ClassDef( name=f"{ap_type_name}Poses", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_orientations_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for ori in project.ap_orientations(ap.id): ap_orientations_cls_def.body.append( FunctionDef( name=ori.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Pose.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_orientations_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="poses", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Poses", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_cls_def = ClassDef( name=ap_type_name, bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_cls_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) # add copy property for position ap_cls_def.body.append( FunctionDef( name="position", args=arguments( args=[arg(arg="self", annotation=None, type_comment=None)], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Position.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_cls_def) aps_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=ap.name, ctx=Store()) ], value=Call(func=Name(id=ap_type_name, ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[]), type_comment=None, )) if not aps_init_body: # there are no action points aps_init_body.append(Pass()) aps_cls_def = ClassDef( name="ActionPoints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=aps_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) tree.body.append(aps_cls_def) return tree_to_str(tree)
def test_upper(): assert humps.camelize('API') == 'API' assert humps.decamelize('API') == 'API' assert humps.pascalize('API') == 'API' assert humps.depascalize('API') == 'API'
def fis_template_name(name: str, is_logical: bool) -> str: if is_logical: return re.sub(r'[\W_]', '', humps.pascalize(f"fis_template_{name}")) else: return re.sub(r'[\W_]', '', humps.pascalize(f"imp_{name}"))
def test_pascalize(): actual = humps.pascalize({ 'videos': [ { 'fallback_url': 'https://media.io/video', 'scrubber_media_url': 'https://media.io/video', 'dash_url': 'https://media.io/video', }, ], 'images': [ { 'fallback_url': 'https://media.io/image', 'scrubber_media_url': 'https://media.io/image', 'url': 'https://media.io/image', }, ], 'other': [ { '_fallback_url': 'https://media.io/image', '__scrubber_media___url_': 'https://media.io/image', '_url__': 'https://media.io/image', }, { 'API': 'test_upper', '_API_': 'test_upper', '__API__': 'test_upper', 'APIResponse': 'test_acronym', '_APIResponse_': 'test_acronym', '__APIResponse__': 'test_acronym', }, ], }) expected = { 'Videos': [ { 'FallbackUrl': 'https://media.io/video', 'ScrubberMediaUrl': 'https://media.io/video', 'DashUrl': 'https://media.io/video', }, ], 'Images': [ { 'FallbackUrl': 'https://media.io/image', 'ScrubberMediaUrl': 'https://media.io/image', 'Url': 'https://media.io/image', }, ], 'Other': [ { '_FallbackUrl': 'https://media.io/image', '__ScrubberMediaUrl_': 'https://media.io/image', '_Url__': 'https://media.io/image', }, { 'API': 'test_upper', '_API_': 'test_upper', '__API__': 'test_upper', 'APIResponse': 'test_acronym', '_APIResponse_': 'test_acronym', '__APIResponse__': 'test_acronym', }, ], } assert actual == expected
def cf_template_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"imp_template_{name}"))
def cf_automation_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"imp_automation_{name}"))
def fis_target_name(template_name: str, name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"fis_target_{template_name}_{name}"))
def pascalize(str): return humps.pascalize(str)
def lambda_permission_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"lambda_permission_{name}"))
def iam_assume_role_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"assume_role_{name}"))
from inspect import ismethoddescriptor import torch from torch import nn import humps def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs nn.Module.__init__(self) def _create_module_class(name, func): return type( name, (nn.Module, ), { "__init__": __init__, "forward": lambda self, x: func(x, *self.args, **self.kwargs) } ) for name in dir(torch.Tensor): if name.startswith('_') or name.endswith('_'): continue f = getattr(torch.Tensor, name) if not ismethoddescriptor(f): continue name = humps.pascalize(name) if hasattr(nn, name): continue globals()[name] = _create_module_class(name, f)
def register_obs(name: str): def register_observation_cls(cls): if name in OBS_REGISTRY: raise ValueError(f'Observation "{name}" already registred.') if not issubclass(cls, Observation): raise ValueError(f'Observation "{name}" ({cls.__name__}) must extend the Observation base class.') OBS_REGISTRY[name] = cls return cls return register_observation_cls def make_obs(name: str, config, *args, **kwargs) -> Observation: return OBS_REGISTRY[name](config, *args, **kwargs) # automatically import any Python files in the obs/ directory load_count = 0 for file in os.listdir(os.path.dirname(__file__)): if file.endswith('.py') and not file.startswith('_'): basename = os.path.basename(file) filename = basename.replace(".py", "") class_name = humps.pascalize(filename) module = importlib.import_module(f'.{file[:-3]}', package=__name__) load_count += 1 print("- Successfully loaded", load_count, "observation classes")
def test_converting_strings(): assert humps.camelize('jack_in_the_box') == 'jackInTheBox' assert humps.decamelize('rubyTuesdays') == 'ruby_tuesdays' assert humps.depascalize('UnosPizza') == 'unos_pizza' assert humps.pascalize('red_robin') == 'RedRobin'
def fis_automated_experiment_name(name: str) -> str: return re.sub(r'[\W_]', '', humps.pascalize(f"imp_automation_{name}"))