def create_lexical_lists(set_name): """ Create Clearswift lexical expression list. :type set_name: set """ for name in set_name - get_names(DIR_LEXICAL, "TextualAnalysis"): while True: uuid = generate_uuid() file_lexical = DIR_LEXICAL.joinpath("{}.xml".format(uuid)) if not file_lexical.exists(): break try: with open(file_lexical, "w") as f: f.write( TEMPLATE_LEXICAL.substitute( count="1", name=quoteattr(name), uuid=uuid, phrases=TEMPLATE_PHRASE.substitute( text=quoteattr("dummy"), uuid=generate_uuid()))) chown(file_lexical, user=CS_USER, group=CS_GROUP) except Exception: raise Exception( "Cannot write lexical list file '{}'".format(file_lexical))
def to_internal_value(self, data): internal_value = super(QPropertyCustomizationSerializer, self).to_internal_value(data) # add the original key to use as guid so that a new key is not automatically generated internal_value.update({ "guid": generate_uuid(data.get("key")), "category_key": generate_uuid(data.get("category_key")), # also put category_key back so I can use it to locate the correct category in 'create' below }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def to_internal_value(self, data): internal_value = super(QPropertyCustomizationSerializer, self).to_internal_value(data) internal_value.update({ "guid": generate_uuid(data.get("key")), "category_key": generate_uuid(data.get("category_key")), # put the category key back so that I can use it to locate the correct category in 'create' and/or 'update' below }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def to_internal_value(self, data): internal_value = super(QScientificCategoryCustomizationSerializer, self).to_internal_value(data) # add the original key to use as guid so that a new key is not automatically generated # and put back the vocabulary/component keys (have to convert them back from strings) internal_value.update({ "guid": generate_uuid(data.get("key")), "vocabulary_key": generate_uuid(data.get("vocabulary_key")), "component_key": generate_uuid(data.get("component_key")), }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def to_internal_value(self, data): internal_value = super(QScientificPropertyCustomizationSerializer, self).to_internal_value(data) # put the category_key back so that I can use it to locate the correct category in "create" and/or "update" below internal_value.update({ "guid": generate_uuid(data.get("key")), "category_key": generate_uuid(data.get("category_key")), "vocabulary_key": generate_uuid(data.get("vocabulary_key")), "component_key": generate_uuid(data.get("component_key")), }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def __init__(self, name, base_updates=[], interactions=[], **kwargs): self._id = generate_uuid() self._name = name self._base_updates = base_updates self._interactions = interactions for attr, val in kwargs.items(): self.__dict__[attr] = val
def from_dict(request_dict): id = request_dict.get('id', generate_uuid()) request = TaskRequest(id=id) request.load_type = request_dict["loadType"] request.load_id = request_dict["loadId"] request.user_id = request_dict["userId"] request.earliest_pickup_time = TimeStamp.from_str( request_dict["earliestPickupTime"]) request.latest_pickup_time = TimeStamp.from_str( request_dict["latestPickupTime"]) pickup_area_dict = request_dict.get('pickup_pose', None) if pickup_area_dict: request.pickup_pose = Area.from_dict(pickup_area_dict) else: # when the provided dict is from json schema request.pickup_pose = Area() request.pickup_pose.name = request_dict.get("pickupLocation", '') request.pickup_pose.floor_number = request_dict.get( "pickupLocationLevel", 0) delivery_area_dict = request_dict.get('delivery_pose', None) if delivery_area_dict: request.delivery_pose = Area.from_dict(delivery_area_dict) else: # when the provided dict is from json schema request.delivery_pose = Area() request.delivery_pose.name = request_dict.get( "deliveryLocation", '') request.delivery_pose.floor_number = request_dict.get( "deliveryLocationLevel", 0) request.priority = request_dict["priority"] return request
def __init__(self, robot_id, uuid=None, **kwargs): self.robot_id = robot_id if uuid is None: self.uuid = generate_uuid() else: self.uuid = uuid self.last_update = dict() self.position = kwargs.get('position', None) self.availability = kwargs.get('availability', None) self.component_status = kwargs.get('component_status', None) self.schedule = kwargs.get('schedule', None) self.current_task = kwargs.get('current_task', None) self.nickname = kwargs.get('nickname', None) self.version = kwargs.get( 'version', { 'hardware': Hardware.full_version(), 'software': Software.full_version(), 'black_box': BlackBox.full_version() }) for attr in attributes: val = self.__dict__.get('attr', None) if val is not None: self.last_update[attr] = val
def __init__(self, value=None, valid=False, key=None, discard=False): self._value = value self._valid = valid self._discard = discard if key is not None: self._key = key else: self._key = generate_uuid()
def full_version(uuid=None): if uuid is None: uuid = generate_uuid() return { 'uuid': uuid, 'logger': Software.ropod_sw(), 'fault_detection': Software.ropod_sw() }
def to_internal_value(self, data): internal_value = super(QPropertyRealizationSerializer, self).to_internal_value(data) # add the original key to use as guid so that a new key is not automatically generated internal_value.update({ "guid": generate_uuid(data.get("key")), }) pk = data.get("id") if pk: internal_value.update({ # put id back so that update/create will work for QListSerializer "id": pk, }) else: internal_value.update({ # also put "category_key" back iff this is a new instance # so that I can find the correct category in "create" below (it's not needed for "update") "category_key": generate_uuid(data.get("category_key")), }) return internal_value
def update_msg_id(message, id=None): header = message.get('header') if header: if id: header.update(msgId=id) else: header.update(msgId=generate_uuid()) else: header = MessageFactoryBase.get_header(None) message.update(header)
def temporary_cnf_file(base_path: Path = Path('.')) -> Iterator[Path]: """Returns a `Path` to a new, local file in the directory of the given path with a .cnf suffix. When used as a context manager (recommended), the file will be deleted when it leaves the context scope. """ cnf_file = base_path / Path(str(generate_uuid())).with_suffix('.cnf') try: yield cnf_file finally: if cnf_file.exists(): cnf_file.unlink()
def get_by_key(self, key): if isinstance(key, basestring): key = generate_uuid(key) # TODO: THERE IS THE CHANCE OF MULTIPLE CUSTOMIZATIONS W/ THE SAME KEY B/C OF RECURSION # TODO: THIS MAKES SURE TO ONLY EVER RETURN THE 1ST ONE # TODO: IN THE LONG-TERM, I SHOULD FIX THIS FROM HAPPENING # return self.get(guid=key) matching_category_customizations = self.filter(guid=key) if matching_category_customizations: return matching_category_customizations[0] return None
def create_wim_account(self, wim, tenant, properties): """Associate a wim to a tenant using the ``wim_nfvo_tenants`` table and create a ``wim_account`` to store credentials and configurations. For the sake of simplification, we assume that each NFVO tenant can be attached to a WIM using only one WIM account. This is automatically guaranteed via database constraints. For corner cases, the same WIM can be registered twice using another name. Arguments: wim (str): name or uuid of the WIM related to the account being created tenant (str): name or uuid of the nfvo tenant to which the account will be created properties (dict): properties of the account (eg. user, password, ...) """ wim_id = self.get_by_name_or_uuid('wims', wim, SELECT=['uuid'])['uuid'] tenant = self.get_by_name_or_uuid('nfvo_tenants', tenant, SELECT=['uuid', 'name']) account = properties.setdefault('name', tenant['name']) wim_account = self.query_one('wim_accounts', WHERE={'wim_id': wim_id, 'name': account}, error_if_none=False) transaction = [] used_uuids = [] if wim_account is None: # If a row for the wim account doesn't exist yet, we need to # create one, otherwise we can just re-use it. account_id = str(generate_uuid()) used_uuids.append(account_id) row = merge_dicts(properties, wim_id=wim_id, uuid=account_id) transaction.append({'wim_accounts': _preprocess_wim_account(row)}) else: account_id = wim_account['uuid'] properties.pop('config', None) # Config is too complex to compare diff = {k: v for k, v in properties.items() if v != wim_account[k]} if diff: tip = 'Edit the account first, and then attach it to a tenant' raise WimAccountOverwrite(wim_account, diff, tip) transaction.append({ 'wim_nfvo_tenants': {'nfvo_tenant_id': tenant['uuid'], 'wim_id': wim_id, 'wim_account_id': account_id}}) with self._associate(wim_id, tenant['uuid']): self.db.new_rows(transaction, used_uuids, confidential_data=True) return account_id
def to_internal_value(self, data): internal_value = super(QCategoryCustomizationSerializer, self).to_internal_value(data) # add the original key to use as guid so that a new key is not automatically generated internal_value.update({ "guid": generate_uuid(data.get("key")) }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def __init__(self, id=''): if not id: self.id = generate_uuid() else: self.id = id self.pickup_pose = Area() self.delivery_pose = Area() self.earliest_pickup_time = TimeStamp() self.latest_pickup_time = TimeStamp() self.user_id = '' self.load_type = '' self.load_id = '' self.priority = -1
def get_header(message_type, meta_model='msg', recipients=[]): if recipients is not None and not isinstance(recipients, list): raise Exception("Recipients must be a list of strings") return { "header": { 'type': message_type, 'metamodel': 'ropod-%s-schema.json' % meta_model, 'msgId': generate_uuid(), 'timestamp': TimeStamp().to_str(), 'receiverIds': recipients } }
def to_internal_value(self, data): internal_value = super(QPropertyRealizationSerializer, self).to_internal_value(data) internal_value.update({ "guid": generate_uuid(data.get("key")), }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def to_internal_value(self, data): internal_value = super(QStandardPropertyCustomizationSerializer, self).to_internal_value(data) internal_value.update({ "guid": generate_uuid(data.get("key")), }) # put the category_key back so that I can use it to locate the correct category in "create" and/or "update" below try: internal_value.update({ "category_key": generate_uuid(data.get("category_key")), }) except TypeError: # note that this may not work for standard_properties of subforms (b/c they have not necessarily been categorized) # that's okay... hence this try/except block pass pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def admin_maintenance_api_post(): data = json.loads(request.data.decode('utf8')) self.log.info('maintenance message: %s' % data) session_id = str(generate_uuid()) self.log.info('creating session: %s' % session_id) self.maint_sessions[session_id] = (AdminMain( self.trasport_url, session_id, data, self, self.conf, self.log)) self.maint_sessions[session_id].start() reply = json.dumps({ 'session_id': session_id, 'state': 'ACK_%s' % data['state'] }) self.log.debug('reply: %s' % reply) return reply, 200, None
def admin_create_session(self, ctx, data): """Create maintenance workflow session thread""" LOG.info("EngineEndpoint: admin_create_session") LOG.info("data: %s" % data) if len(self.workflow_sessions.keys()) == MAX_SESSIONS: LOG.error("Too many sessions: %d" % MAX_SESSIONS) return None session_id = str(generate_uuid()) if "workflow" not in data: workflow = "fenix.workflow.workflows.default" else: workflow = "fenix.workflow.workflows.%s" % data["workflow"] LOG.info("Workflow plugin module: %s" % workflow) wf_plugin = getattr(import_module(workflow), 'Workflow') self.workflow_sessions[session_id] = (wf_plugin( CONF, session_id, data)) self.workflow_sessions[session_id].start() return {'session_id': session_id}
def to_internal_value(self, data): if not data: return None model_serializer = QModelCustomizationSerializer() internal_value = model_serializer.to_internal_value(data) internal_value.update({ "guid": generate_uuid(data.get("key")), }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def to_internal_value(self, data): if not data: return None # need to import w/in this fn to prevent circular dependencies from .serializers_realizations_models import QModelRealizationSerializer model_serializer = QModelRealizationSerializer() internal_value = model_serializer.to_internal_value(data) internal_value.update({ "guid": generate_uuid(data.get("key")), }) pk = data.get("id") if pk: internal_value.update({ "id": pk, # put id back so that update/create will work for QListSerializer }) return internal_value
def create_lists(set_name, template, directory, tag): """ Create Clearswift item list. :type set_name: set :type template: Template :type directory: Path :type tag: str """ for name in set_name - get_names(directory, tag): while True: uuid = generate_uuid() file_list = directory.joinpath("{}.xml".format(uuid)) if not file_list.exists(): break try: with open(file_list, "w") as f: f.write(template.substitute(name=quoteattr(name), uuid=uuid)) chown(file_list, user=CS_USER, group=CS_GROUP) except Exception: raise Exception("Cannot write list file '{}'".format(file_list))
def __init__(self, uuid: str) -> None: self._uuid = uuid or str(generate_uuid()) self.channels = self.setup_channels() stream_info = self.construct_streaminfo() self._wrapped_outlet = lsl.StreamOutlet(stream_info)
def command_install(args, command_description): """ Install external commands. :type args: argparse.Namespace :type command_description: dict """ dict_media_type = get_media_types() dict_disposal_action = get_disposal_actions() command_update(args, command_description) for command in args.command: url_config = "{}/{}/{}".format(URL_REPO, command, FILE_CONFIG) try: config = urlopen(url_config).read().decode("utf-8") except Exception: raise Exception( "Cannot download config file '{}'".format(url_config)) config = parse_config(config) set_rule = get_names(DIR_RULES, "ExecutablePolicyRule") duplicate_rules = config.keys() & set_rule if duplicate_rules: raise Exception("Policy rules {} already exist".format( str(duplicate_rules)[1:-1])) download_script(command) for (name, rule) in config.items(): if rule.packages: for package in rule.packages: try: run(["/usr/bin/yum", "install", "-y", package], stdout=DEVNULL, stderr=DEVNULL, check=True) except Exception: raise Exception( "Cannot install package '{}'".format(package)) if rule.modules: for module in rule.modules: try: run([sys.executable, "-m", "pip", "install", module], stdout=DEVNULL, stderr=DEVNULL, check=True) except Exception: raise Exception( "Cannot install Python module '{}'".format(module)) for disposal_action in rule.disposal_actions: for action in disposal_action: if not action in dict_disposal_action: while True: uuid = generate_uuid() if uuid not in dict_disposal_action.values(): break try: with open(FILE_DISPOSAL, "r+b") as f: f.seek(-21, SEEK_END) f.write( TEMPLATE_AREA.substitute( name=quoteattr(action[5:]), uuid=uuid).encode("utf-8")) except Exception: raise Exception( "Cannot write disposal actions file '{}'". format(FILE_DISPOSAL)) dict_disposal_action[action] = uuid if rule.list_address: create_lists(rule.list_address, TEMPLATE_ADDRESS, DIR_ADDRESS, "AddressList") if rule.list_filename: create_lists(rule.list_filename, TEMPLATE_FILENAME, DIR_FILENAME, "FilenameList") if rule.list_url: create_lists(rule.list_url, TEMPLATE_URL, DIR_URL, "UrlList") if rule.list_lexical: create_lexical_lists(rule.list_lexical) if rule.config: while True: uuid = generate_uuid() file_lexical = DIR_LEXICAL.joinpath("{}.xml".format(uuid)) if not file_lexical.exists(): break try: with open(file_lexical, "w") as f: f.write( TEMPLATE_LEXICAL.substitute( count=str(len(rule.config)), name=quoteattr("Config - {}".format(name)), uuid=uuid, phrases="".join([ TEMPLATE_PHRASE. substitute(text=quoteattr( TEMPLATE_PARAMETER.substitute( name=parameter, type=rule.config[parameter].type, description=rule.config[parameter]. description, value=rule.config[parameter].value) ), uuid=generate_uuid()) for parameter in sorted(rule.config.keys()) ]))) chown(file_lexical, user=CS_USER, group=CS_GROUP) except Exception: raise Exception( "Cannot write config lexical list file '{}'".format( file_lexical)) while True: uuid = generate_uuid() file_rule = DIR_RULES.joinpath("{}.xml".format(uuid)) if not file_rule.exists(): break try: with open(file_rule, "w") as f: f.write( TEMPLATE_RULE.substitute( name=quoteattr(name), uuid_rule=uuid, media_types="".join([ TEMPLATE_MEDIA.substitute( uuid=dict_media_type[mnemonic].uuid, sub_types="{}{}{}{}{}".format( ' enc="true"' if MediaSubtype.ENCRYPTED in dict_media_type[mnemonic].sub_types and MediaSubtype.ENCRYPTED in sub_types else "", ' digsign="true"' if MediaSubtype.SIGNED in dict_media_type[mnemonic].sub_types and MediaSubtype.SIGNED in sub_types else "", ' digsignenc="true"' if MediaSubtype.SIGNED_ENCRYPTED in dict_media_type[mnemonic].sub_types and MediaSubtype.SIGNED_ENCRYPTED in sub_types else "", ' drm="true"' if MediaSubtype.DRM in dict_media_type[mnemonic].sub_types and MediaSubtype.DRM in sub_types else "", ' notprotect="true"' if MediaSubtype.NOT_PROTECTED in dict_media_type[mnemonic].sub_types and MediaSubtype.NOT_PROTECTED in sub_types else "")) for (mnemonic, sub_types) in rule.media_types.items() ]), uuid_media=generate_uuid(), uuid_direction=generate_uuid(), uuid_command=generate_uuid(), command=DIR_SCRIPTS.joinpath( "{}.py".format(command)), parameters=escape(rule.parameters), return_codes="".join([ TEMPLATE_RETURN.substitute( action=result.action, return_code=return_code, description=result.description) for (return_code, result) in rule.return_codes.items() ]), timeout=rule.timeout, uuid_deliver=dict_disposal_action["deliver"], uuid_none=dict_disposal_action["none"], uuid_deliver_action=generate_uuid(), uuid_deliver_web=generate_uuid(), uuid_modified_primary=dict_disposal_action[ rule.disposal_actions.modified.primary], uuid_modified_secondary=dict_disposal_action[ rule.disposal_actions.modified.secondary], uuid_modified_action=generate_uuid(), uuid_modified_web=generate_uuid(), uuid_detected_primary=dict_disposal_action[ rule.disposal_actions.detected.primary], uuid_detected_secondary=dict_disposal_action[ rule.disposal_actions.detected.secondary], uuid_detected_action=generate_uuid(), uuid_detected_web=generate_uuid())) chown(file_rule, user=CS_USER, group=CS_GROUP) except Exception: raise Exception( "Cannot write policy rule file '{}'".format(file_rule)) try: with open(FILE_STATUS, "r") as f: content = f.read() except Exception: raise Exception("Cannot read status file '{}'".format(FILE_STATUS)) try: with open(FILE_STATUS, "w") as f: f.write( content.replace(' changesMade="false" ', ' changesMade="true" ')) except Exception: raise Exception("Cannot write status file '{}'".format(FILE_STATUS)) if args.reload: try: run("source /etc/profile.d/cs-vars.sh; /opt/cs-gateway/bin/cs-servicecontrol restart tomcat", shell=True, stdout=DEVNULL, stderr=DEVNULL, check=True) except Exception: raise Exception("Cannot restart Tomcat service")
def __init__(self, id=None, robot_actions=dict(), team_robot_ids=list(), earliest_start_time=-1, latest_start_time=-1, estimated_duration=-1, pickup_pose=Area(), delivery_pose=Area(), **kwargs): """Constructor for the Task object Args: id (str): A string of the format UUID robot_actions (dict): A dictionary with robot IDs as keys, and the list of actions to execute as values loadType (str): Valid values are "MobiDik", "Sickbed". Defaults to MobiDik loadId (str): A string of the format UUID team_robot_ids (list): A list of strings containing the UUIDs of the robots in the task earliest_start_time (TimeStamp): The earliest a task can start latest_start_time (TimeStamp): The latest a task can start estimated_duration (timedelta): A timedelta object specifying the duration pickup_pose (Area): The location where the robot should collect the load delivery_pose (Area): The location where the robot must drop off its load priority (constant): The task priority as defined by the constants EMERGENCY, HIGH, NORMAL, LOW hard_constraints (bool): False if the task can be scheduled ASAP, True if the task is not flexible. Defaults to True """ if not id: self.id = generate_uuid() else: self.id = id self.robot_actions = robot_actions self.loadType = kwargs.get('loadType', 'MobiDik') self.loadId = kwargs.get('loadId', generate_uuid()) self.team_robot_ids = team_robot_ids self.earliest_start_time = earliest_start_time self.latest_start_time = latest_start_time self.estimated_duration = estimated_duration self.earliest_finish_time = earliest_start_time + estimated_duration self.latest_finish_time = latest_start_time + estimated_duration self.start_time = kwargs.get('start_time', None) self.finish_time = kwargs.get('finish_time', None) self.hard_constraints = kwargs.get('hard_constraints', True) if isinstance(pickup_pose, Area): self.pickup_pose = pickup_pose else: raise Exception('pickup_pose must be an object of type Area') if isinstance(delivery_pose, Area): self.delivery_pose = delivery_pose else: raise Exception('delivery_pose must be an object of type Area') self.status = TaskStatus(self.id) priority = kwargs.get('priority', self.NORMAL) if priority in (self.EMERGENCY, self.NORMAL, self.HIGH, self.LOW): self.priority = priority else: raise Exception("Priority must have one of the following values:\n" "0) Urgent\n" "1) High\n" "2) Normal\n" "3) Low")
def get_by_key(self, key): if isinstance(key, basestring): key = generate_uuid(key) return self.get(guid=key)
def get_active_scientific_categories_by_key(model_customization, model_key): vocabulary_key = get_vocabulary_key_from_model_key(model_key) component_key = get_component_key_from_model_key(model_key) if vocabulary_key == DEFAULT_VOCABULARY_KEY and component_key == DEFAULT_COMPONENT_KEY: return [] if model_customization.model_show_all_categories: return model_customization.scientific_category_customizations.filter(vocabulary_key=generate_uuid(vocabulary_key), component_key=generate_uuid(component_key)) else: return model_customization.scientific_category_customizations.filter(vocabulary_key=generate_uuid(vocabulary_key), component_key=generate_uuid(component_key), scientific_properties__displayed=True).distinct()
def setup_job(): job = generate_uuid() jobdir = path.join(config.inputdir, str(job)) os.mkdir(jobdir, 0700) return job, jobdir
def get_id(self): self.id = generate_uuid()
def random_test_data() -> str: return str(generate_uuid()) + str(generate_uuid())
def validate_edit_section_key(section_key): # section_key format is: # [ <version_key> | # <model_key> | # <vocabulary_key> | # <component_key> | # 'standard_properties" or "scientific_properties" # <category_key> | # <property_key> | # # ] (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) = \ (True, None, None, None, None, None, None, None, "") section_keys = section_key.split('|') # try to get the version... try: version_key = section_keys[0] version = MetadataVersion.objects.get(key=version_key, is_registered=True) except IndexError: msg = "Invalid section key; did not specify version" validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataVersion.DoesNotExist: msg = "Invalid section key; unable to find a registered version w/ key=%s" % version_key validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # try to get the model proxy... try: model_proxy_key = section_keys[1] model_proxy = MetadataModelProxy.objects.get(ontology=version, name__iexact=model_proxy_key) except IndexError: msg = "Invalid section key; did not specify model" validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataModelProxy.DoesNotExist: msg = "Invalid section key; unable to find a model w/ name=%s" % model_proxy_key validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # try to get the vocabulary... vocabulary_key = section_keys[2] if not vocabulary_key: msg = "Invalid section key; did not specify vocabulary" validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) if vocabulary_key == DEFAULT_VOCABULARY_KEY: vocabulary = None else: try: vocabulary = MetadataVocabulary.objects.get(guid=generate_uuid(vocabulary_key)) except MetadataVocabulary.DoesNotExist: msg = "Invalid section key; unable to find a vocabulary w/ guid=%s" % vocabulary_key validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # try to get the component_proxy... component_proxy_key = section_keys[3] if not component_proxy_key: msg = "Invalid section key; did not specify component" validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) if component_proxy_key == DEFAULT_COMPONENT_KEY: component_proxy = None else: try: component_proxy = MetadataComponentProxy.objects.get(guid=generate_uuid(component_proxy_key), vocabulary=vocabulary) except MetadataComponentProxy.DoesNotExist: msg = "Invalid section key; unable to find a component w/ key=%s" % component_proxy_key validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # from this point on the keys don't need to be present # (so I can just return on IndexError) # try to get the property type... try: property_type = section_keys[4] if property_type not in VALID_PROPERTY_TYPES: msg = "Invalid section key; must specify a property type from %s" % ", ".join(VALID_PROPERTY_TYPES) validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except IndexError: return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # try to get the category_proxy... try: category_proxy_key = section_keys[5] if property_type == STANDARD_PROPERTY_TYPE: category_proxy = MetadataStandardCategoryProxy.objects.get(key=category_proxy_key, categorization=version.categorization) else: # property_type == SCIENTIFIC_PROPERTY_TYPES # scientific properties can only come from CVs, so I am sure that there will be a component category_proxy = MetadataScientificCategoryProxy.objects.get(key=category_proxy_key, component=component_proxy) except IndexError: return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataStandardCategoryProxy.DoesNotExist: msg = "Invalid section key; cannot find a standard property category w/ key=%s from version %s" % (category_proxy_key, version) validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataScientificCategoryProxy.DoesNotExist: msg = "Invalid section key; cannot find a scientific property category w/ key=%s from component %s" % (category_proxy_key, component_proxy) validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # try to get the property_proxy... try: property_proxy_key = section_keys[6] if property_type == STANDARD_PROPERTY_TYPE: property_proxy = MetadataStandardPropertyProxy.objects.get(name__iexact=property_proxy_key, model_proxy=model_proxy, category=category_proxy) else: # property_type == SCIENTIFIC_PROPERTY_TYPE property_proxy = MetadataScientificPropertyProxy.objects.get(name__iexact=property_proxy_key, component=component_proxy, category=category_proxy) except IndexError: return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataStandardPropertyProxy.DoesNotExist: msg = "Invalid section key; cannot find a standard property w/ name=%s from model %s with category %s" % (property_proxy_key, model_proxy, category_proxy) validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) except MetadataScientificPropertyProxy.DoesNotExist: msg = "Invalid section key; cannot find a scientific property w/ name=%s from component %s with category %s" % (property_proxy_key, model_proxy, category_proxy) validity = False return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg) # TODO: DEAL W/ SUBFORMS IN SECTION_KEY return (validity, version, model_proxy, vocabulary, component_proxy, property_type, category_proxy, property_proxy, msg)