def wql_parse_pull_response(self): item_fragment_path = objectify.ObjectPath( "Envelope.Body.{%s}PullResponse.Items.{%s}XmlFragment" % (WSMAN_Constants.WSEN, WSMAN_Constants.WSMAN)) item_full_path = objectify.ObjectPath( "Envelope.Body.{%s}PullResponse.Items" % (WSMAN_Constants.WSEN)) xml = objectify.fromstring(self.response.content) items = None try: items = item_fragment_path.find(xml) except AttributeError: try: items = item_full_path.find(xml) except AttributeError: pass for item in items: entry = {} for i in item.iterchildren(): if hasattr(i, "countchildren") and i.countchildren() == 1: for subchild in i.iterchildren(): if subchild.tag == "Datetime": entry[i.tag] = date_parse(str(subchild)) else: entry[i.tag] = i if entry: self.values.append(entry) self.wql_check_enum_context()
def parse_shell_output(self): stdout = "" stderr = "" state = "" sequence = 0 while (state != "Done"): xml = objectify.fromstring(self.response.content) stream_path = objectify.ObjectPath(WSMAN_Constants.STREAM) stream_el = stream_path.find(xml) for stream in stream_el: if stream.attrib["Name"] == "stdout": if stream.text: stdout += base64.b64decode(stream.text) if stream.attrib["Name"] == "stderr": if stream.text: stderr += base64.b64decode(stream.text) command_state_path = objectify.ObjectPath( WSMAN_Constants.COMMAND_STATE) command_state_el = command_state_path.find(xml) state = command_state_el.attrib["State"].split("/")[-1] sequence += 1 if state != "Done": self.shell_fetch_output(sequence) self.output = {"stdout": stdout, "stderr": stderr} self.current_action = "ReceiveDone" self.next_state()
def load_meta_data(meta_data_file: Readable) -> EPTSMetaData: root = objectify.fromstring(meta_data_file.read()) meta_data = root.find("Metadata") score_path = objectify.ObjectPath( "Metadata.Sessions.Session[0].MatchParameters.Score") score_elm = score_path.find(meta_data) _team_map = { score_elm.attrib["idLocalTeam"]: Team.HOME, score_elm.attrib["idVisitingTeam"]: Team.AWAY, } players = _load_players(meta_data.find("Players"), _team_map) data_format_specifications = _load_data_format_specifications( root.find("DataFormatSpecifications")) device_path = objectify.ObjectPath("Metadata.Devices.Device[0].Sensors") sensors = _load_sensors(device_path.find(meta_data)) _channel_map = { channel.channel_id: channel for sensor in sensors for channel in sensor.channels } _player_map = {player.player_id: player for player in players} player_channels = [ PlayerChannel( player_channel_id=player_channel_elm.attrib["id"], player=_player_map[player_channel_elm.attrib["playerId"]], channel=_channel_map[player_channel_elm.attrib["channelId"]], ) for player_channel_elm in meta_data.find( "PlayerChannels").iterchildren(tag="PlayerChannel") ] team_name_map = { _team_map[team_elm.attrib["id"]]: str(team_elm.find("Name")) for team_elm in meta_data.find("Teams").iterchildren(tag="Team") } frame_rate = int(meta_data.find("GlobalConfig").find("FrameRate")) periods = _load_periods(meta_data.find("GlobalConfig"), frame_rate) pitch_dimensions = _load_pitch_dimensions(meta_data, sensors) return EPTSMetaData( home_team_name=team_name_map[Team.HOME], away_team_name=team_name_map[Team.AWAY], players=players, periods=periods, pitch_dimensions=pitch_dimensions, data_format_specifications=data_format_specifications, player_channels=player_channels, frame_rate=frame_rate, sensors=sensors, )
def shell_fetch_output(self, sequence): commandId_path = WSMAN_Constants.COMMAND_ID if self.commandId != None: commandId = self.commandId else: commandId = objectify.ObjectPath(commandId_path).find( objectify.fromstring(self.response.content)).text self.commandId = commandId sub_body = ET.Element(WSMAN_Constants.RECEIVE_TAG, nsmap={"rsp": WSMAN_Constants.SHELL}) sub_body.attrib["SequenceId"] = str(sequence) desiredStream = ET.SubElement(sub_body, WSMAN_Constants.DESIRED_STREAM_TAG) desiredStream.attrib["CommandId"] = commandId desiredStream.text = "stdout stderr" root = self.generate_message(WSMAN_Constants.RECEIVE_ACTION, self.host, WSMAN_Constants.CMD, uuid.uuid4(), sub_body, self.get_shell_sub_header()) receive_msg = ET.tostring(root) r = self.session.post(self.host, auth=self.auth, data=receive_msg, headers=self.headers, timeout=self.timeout) try: self.check_response(r) except WSManFault_NoShellOutput: self.current_action = States.FETCH_OUTPUT['action'] self.next_state()
def _get_attributes(self): if not self.__attribute_map: res = {} for element in self._xml_root: find = objectify.ObjectPath("Workflow.Attributes") if find.hasattr(element): for attr in find(element).iterchildren(): if attr.tag == "{http://www.gonicus.de/Workflows}Attribute": if attr.Name.text not in res: res[attr.Name.text] = {} res[attr.Name.text] = { 'description': str(self._load(attr, "Description", "")), 'type': attr.Type.text, 'default': bool(self._load(attr, "Default", None)), 'multivalue': bool(self._load(attr, "MultiValue", False)), 'mandatory': bool(self._load(attr, "Mandatory", False)), 'readonly': bool(self._load(attr, "ReadOnly", False)), 'case_sensitive': bool(self._load(attr, "CaseSensitive", False)), 'unique': bool(self._load(attr, "Unique", False)) } self.__attribute_map = res return self.__attribute_map
def sequence_insert(node, child, successor_sections): insert_function = node.append for section in successor_sections: path = objectify.ObjectPath(node.tag + '.' + section) if path.hasattr(node): insert_function = node[section].addprevious break insert_function(child)
def handle_fault(self): xml = objectify.fromstring(self.response.content) is_fault = objectify.ObjectPath("Envelope.Body.Fault") if is_fault.hasattr(xml): fault = is_fault.find(xml) fault_text = fault.Reason.Text.text has_wsman_details = objectify.ObjectPath( ".Detail.{%s}WSManFault" % (WSMAN_Constants.WSMAN_FAULT)) if has_wsman_details.hasattr(fault): details = has_wsman_details.find(fault) if details.attrib.__contains__( "Code") and details.attrib["Code"] == "2150858793": #retry self.shell_fetch_output(0) else: detail_text = details.Message.text if detail_text != None: raise WSManFault(detail_text.encode('utf-8'))
def check_response(self, r): if 'Content-Type' not in r.headers: r.raise_for_status() if r.headers['Content-Type'].startswith( WSMAN_Constants.CONTENT_TYPE) is False: raise TypeError(r.content) answer = objectify.fromstring(r.content) action_path = objectify.ObjectPath(WSMAN_Constants.ACTION_PATH) action_el = action_path.find(answer) self.current_action = action_el.text self.response = r self.next_state()
def get_event_identifier_by_name(self, event_name): """ Get the identifier of an event with a given name. @param event_name: Name of the @return: event identifier value """ find = objectify.ObjectPath("premis.object") xpath_expr = "//*[local-name() = 'eventType' and text()='%s']/parent::*/descendant::*[local-name() = 'eventIdentifierValue']" % event_name xpath_result_nodes = self.root.xpath(xpath_expr) dip_acquire_aips_id = find(self.root) if len( xpath_result_nodes) != 1 else xpath_result_nodes[0].text return dip_acquire_aips_id
def __add(self, wf_path): """ Read workflow from the given path and add it so the registry. """ schema = etree.XMLSchema( file=resource_filename("gosa.backend", "data/workflow.xsd")) parser = objectify.makeparser(schema=schema) root = objectify.parse(os.path.join(wf_path, "workflow.xml"), parser).getroot() description = None try: description = objectify.ObjectPath("Workflow.Description")( root)[0].text except: pass icon = None try: icon = objectify.ObjectPath("Workflow.Icon")(root)[0].text except: pass category = None try: category = objectify.ObjectPath("Workflow.Category")(root)[0].text except: pass id = objectify.ObjectPath("Workflow.Id")(root)[0].text entry = dict(id=id, file_path=wf_path, display_name=objectify.ObjectPath("Workflow.DisplayName")( root)[0].text, description=description, icon=icon, category=category) self._workflows[id] = entry
def _load_provider(metadata_elm, provider: Provider = None) -> Provider: provider_path = objectify.ObjectPath("Metadata.GlobalConfig.ProviderName") provider_name = provider_path.find(metadata_elm) provider_from_file = _parse_provider(provider_name) if provider: if provider_from_file and provider_from_file != provider: warnings.warn( f"Given provider name is different to the name of the Provider read from the XML-file", Warning, ) else: provider = provider_from_file return provider
def get_templates(self): templates = {} find = objectify.ObjectPath("Workflow.Templates") for idx, template in enumerate(find(self._xml_root[0]).getchildren()): with open( os.path.join(self._path, self.uuid, "templates", template.text), "r") as ftpl: templates[template.text] = { "index": idx, "content": ftpl.read() } return templates
def importXML(self, xml_tree): self.tree = etree.ElementTree(xml_tree) self.beam = self.tree.getroot() for trait, xml in TRAITS_TO_XML: try: exec('self.' + xml) except AttributeError: path = objectify.ObjectPath(xml.replace('beam.', '.')) path.setattr(self.beam, '') try: self.beam.MeasurementDetails.ModificationHistory except AttributeError: path = objectify.ObjectPath( '.MeasurementDetails.ModificationHistory.Record') path.setattr(self.beam, '') self.initialize_traits() # a = self.list_traits() # abscissa = [] ordinate = [] mod_history = [] for i in self.beam.Data.Abscissa.iterchildren(): abscissa.append(float(i.text)) for i in self.beam.Data.Ordinate.iterchildren(): ordinate.append(float(i.text)) for i in self.beam.MeasurementDetails.ModificationHistory.iterchildren( ): mod_history.append(str(i.text)) self.Data_Abscissa = numpy.array(abscissa) self.Data_Ordinate = numpy.array(ordinate) self.Data_Quantity = str(self.beam.Data.Quantity) self.MeasurementDetails_ModificationHistory = mod_history
def get_translations(self, locale): translations = {} find = objectify.ObjectPath("Workflow.Templates") for template in find(self._xml_root[0]).getchildren(): translation = template.text[:-5] translation_path = os.path.join(self._path, self.uuid, "i18n", translation, "%s.json" % locale) if os.path.isfile(translation_path): with open(translation_path, "r") as ftpl: translations[template.text] = ftpl.read() else: translations[template.text] = None return translations
def get_object_by_identifier(self, identifier=None): """ Find the object with the identifier of the package. If this object does not exist, take the first object of the premis file. @param identifier: Identifier of the package @return: premis object element """ if not identifier: find = objectify.ObjectPath("premis.object") return find(self.root)[0] xpath_expr = "//*[local-name() = 'object']/descendant::*[local-name() = 'objectIdentifierValue' and text()='%s']/parent::*/parent::*" % identifier xpath_result_nodes = self.root.xpath(xpath_expr) object_elm = None if len( xpath_result_nodes) != 1 else xpath_result_nodes[0] print(len(xpath_result_nodes)) print("Type: %s" % type(object_elm)) return object_elm
def next_state(self): # Only look for faults if we already sent something, the initial state has no response if hasattr(self, "response"): # A Fault can happen on any action - look for Fault element not for action xml = objectify.fromstring(self.response.content) is_fault = objectify.ObjectPath("Envelope.Body.Fault") if is_fault.hasattr(xml): self.current_state = States.FAULT self.handle_fault() #remote shell states if self.current_action == States.CREATE_SHELL['action']: self.current_state = States.CREATE_SHELL self.create_shell() elif self.current_action == States.CREATE_RESPONSE['action']: self.current_state = States.CREATE_RESPONSE self.shell_execute_command() elif self.current_action == States.COMMAND_RESPONSE['action']: self.current_state = States.COMMAND_RESPONSE self.shell_fetch_output(0) elif not self.current_state == States.RECEIVE_RESPONSE and self.current_action == States.RECEIVE_RESPONSE[ 'action']: self.current_state = States.RECEIVE_RESPONSE self.parse_shell_output() elif self.current_action == States.RECEIVE_DONE['action']: self.current_state = States.RECEIVE_DONE self.shell_terminate_signal() elif self.current_action == States.SIGNAL_RESPONSE['action']: self.current_state = States.SIGNAL_RESPONSE self.delete_shell() elif self.current_action == States.FETCH_OUTPUT['action']: self.current_state = States.FETCH_OUTPUT self.shell_fetch_output(0) #wql states elif self.current_action == States.ENUMERATE_RESPONSE['action']: self.current_state = States.ENUMERATE_RESPONSE self.wql_check_enum_context() elif self.current_action == States.PULL['action']: self.current_state = States.PULL self.wql_send_pull_request() elif self.current_action == States.PULL_RESPONSE['action']: self.current_state = States.PULL_RESPONSE self.wql_parse_pull_response()
def add_related_aips(self, related_aips, related_event_name, identifier=None): """ Add relationship information of AIPs which were used to create the DIP to the premis object of the package. @type related_aips: list[string] @param related_aips: List of related AIP identifiers @type related_event_name: string @param related_event_name: Name of the related event @type identifier: string @param identifier: Identifier of the object to which the elements are added (first object if none given) @rtype: bool @return: Success/failure of adding the related AIP identifiers """ object_node = self.get_object_by_identifier(identifier) find = objectify.ObjectPath( "object.objectIdentifier.objectIdentifierValue") print(find(object_node).text) dip_acquire_aips_id = self.get_event_identifier_by_name( related_event_name) sequence_number = 1 for related_aip in related_aips: sequence_insert( object_node, P.relationship( P.relationshipType('derivation'), P.relationshipSubType('AIP to DIP conversion'), P.relatedObjectIdentification( P.relatedObjectIdentifierType("repository"), P.relatedObjectIdentifierValue(related_aip), P.relatedObjectSequence(str(sequence_number)), ), P.relatedEventIdentification( P.relatedEventIdentifierType("local"), P.relatedEventIdentifierValue(dip_acquire_aips_id), ), ), self.premis_successor_sections) sequence_number += 1 return True
def _parse_team_players( f7_root, team_ref: str) -> Tuple[str, Dict[str, Dict[str, str]]]: matchdata_path = objectify.ObjectPath("SoccerFeed.SoccerDocument") team_elms = list(matchdata_path.find(f7_root).iterchildren("Team")) for team_elm in team_elms: if team_elm.attrib["uID"] == team_ref: team_name = str(team_elm.find("Name")) players = { player_elm.attrib["uID"]: dict( first_name=str( player_elm.find("PersonName").find("First")), last_name=str(player_elm.find("PersonName").find("Last")), ) for player_elm in team_elm.iterchildren("Player") } break else: raise Exception(f"Could not parse players for {team_ref}") return team_name, players
def extract(self, fn, real_name): try: with ZipFile(fn) as workflow_zip: if workflow_zip.testzip(): self.log.error("bad workflow zip uploaded") return env = Environment.getInstance() schema = etree.XMLSchema(file=resource_filename( "gosa.backend", "data/workflow.xsd")) parser = objectify.makeparser(schema=schema) try: with workflow_zip.open('workflow.xml') as dsc: root = objectify.fromstring(dsc.read(), parser) id = objectify.ObjectPath("Workflow.Id")(root)[0].text target = os.path.join( env.config.get("core.workflow_path", "/var/lib/gosa/workflows"), id) workflow_zip.extractall(target) WorkflowRegistry.get_instance().refresh() # send the event to the clients e = EventMaker() ev = e.Event( e.WorkflowUpdate(e.Id(id), e.ChangeType("create"))) event_object = objectify.fromstring( etree.tostring(ev, pretty_print=True).decode('utf-8')) SseHandler.notify(event_object, channel="broadcast") except KeyError: self.log.error( "bad workflow zip uploaded - no workflow.xml present") except Exception as e: print(e) raise e
def _load_pitch_dimensions( meta_data_elm, sensors: List[Sensor]) -> Union[None, PitchDimensions]: normalized = False for sensor in sensors: if sensor.sensor_id == 'position': if sensor.channels[0].unit == 'normalized': normalized = True break field_size_path = objectify.ObjectPath("Metadata.Sessions.Session[0]") field_size_elm = field_size_path.find(meta_data_elm).find('FieldSize') if field_size_elm is not None and normalized: return PitchDimensions( x_dim=Dimension(0, 1), y_dim=Dimension(0, 1), x_per_meter=1 / int(field_size_elm.find('Width')), y_per_meter=1 / int(field_size_elm.find('Height'))) else: return None
def calculate_severity(self, vuln): severity = None # ["critical", "high", "medium", "low", "informational", "unclassified"] rulepath = objectify.ObjectPath("FVDL.EngineData.RuleInfo.Rule") impact = None probability = None accuracy = None # XML path /FVDL/EngineData/RuleInfo/Rule (many)/MetaInfo/Group (many) the attribute "name" # are keys for vuln properties for rule in rulepath(self.fvdl): if rule.get('id') == vuln.ClassInfo.ClassID: for group in rule.MetaInfo.iterchildren(): if group.get('name') == "Probability": probability = group if group.get('name') == "Impact": impact = group if group.get('name') == "Accuracy": accuracy = group likelihood = (accuracy * vuln.InstanceInfo.Confidence * probability) / 25.0 if impact and probability: if impact >= 2.5 and likelihood >= 2.5: severity = 'critical' elif impact >= 2.5 > likelihood: severity = 'high' elif impact < 2.5 <= likelihood: severity = 'medium' elif impact < 2.5 and likelihood < 2.5: severity = 'low' else: print("missing severity") # print("{}:{}:{}".format(vuln.InstanceInfo.InstanceID, vuln.InstanceInfo.InstanceSeverity, severity)) return severity
def _load_pitch_dimensions( metadata_elm, sensors: List[Sensor]) -> Union[None, PitchDimensions]: normalized = False for sensor in sensors: if sensor.sensor_id == "position": if sensor.channels[0].unit == "normalized": normalized = True break field_size_path = objectify.ObjectPath("Metadata.Sessions.Session[0]") field_size_elm = field_size_path.find(metadata_elm).find("FieldSize") if field_size_elm is not None and normalized: return PitchDimensions( x_dim=Dimension(0, 1), y_dim=Dimension(0, 1), length=int(field_size_elm.find("Width")), width=int(field_size_elm.find("Height")), ) else: return None
def _get_attributes(self): if not self.__attribute_map: res = {} references = {} for element in self._xml_root: find = objectify.ObjectPath("Workflow.Attributes") if find.hasattr(element): for attr in find(element).iterchildren(): if attr.tag == "{http://www.gonicus.de/Workflows}Attribute": if attr.Name.text not in res: res[attr.Name.text] = {} values_populate = None value_inherited_from = None re_populate_on_update = False values = [] if 'Values' in attr.__dict__: avalues = [] dvalues = {} if 'populate' in attr.__dict__[ 'Values'].attrib: values_populate = attr.__dict__[ 'Values'].attrib['populate'] if 'refresh-on-update' in attr.__dict__[ 'Values'].attrib: re_populate_on_update = attr.__dict__[ 'Values'].attrib[ 'refresh-on-update'].lower( ) == "true" else: for d in attr.__dict__[ 'Values'].iterchildren(): if 'key' in d.attrib: dvalues[d.attrib['key']] = d.text else: avalues.append(d.text) if avalues: values = avalues else: values = dvalues if 'InheritFrom' in attr.__dict__: value_inherited_from = { "rpc": str(self._load(attr, "InheritFrom", "")), "reference_attribute": attr.__dict__['InheritFrom']. attrib['relation'] } if value_inherited_from[ "reference_attribute"] not in references: references[value_inherited_from[ "reference_attribute"]] = {} if value_inherited_from[ "rpc"] not in references[ value_inherited_from[ "reference_attribute"]]: references[value_inherited_from[ "reference_attribute"]][ value_inherited_from["rpc"]] = [] references[value_inherited_from[ "reference_attribute"]][ value_inherited_from["rpc"]].append( attr.Name.text) if 'Validators' in attr.__dict__: self.__attribute_config[attr.Name.text] = { 'validators': self.__xml_parsing.build_filter( attr['Validators']) } blocked_by = [] if "BlockedBy" in attr.__dict__: for d in attr.__dict__[ 'BlockedBy'].iterchildren(): blocked_by.append({ 'name': d.text, 'value': None if d.attrib['value'] == 'null' else d.attrib['value'] }) res[attr.Name.text] = { 'description': str(self._load(attr, "Description", "")), 'type': attr.Type.text, 'default': str(self._load(attr, "Default", "")), 'multivalue': bool(self._load(attr, "MultiValue", False)), 'mandatory': bool(self._load(attr, "Mandatory", False)), 'readonly': bool(self._load(attr, "ReadOnly", False)), 'is_reference_dn': bool(self._load(attr, "IsReferenceDn", False)), 'case_sensitive': bool(self._load(attr, "CaseSensitive", False)), 'unique': bool(self._load(attr, "Unique", False)), 'blocked_by': blocked_by, 'values_populate': values_populate, 're_populate_on_update': re_populate_on_update, 'value_inherited_from': value_inherited_from, 'values': values } for attr, referenced_attrs in references.items(): res[attr]['value_inheriting_to'] = referenced_attrs self.__attribute_map = res return self.__attribute_map
def deserialize(self, inputs: Dict[str, Readable], options: Dict = None) -> EventDataset: """ Deserialize Opta event data into a `EventDataset`. Parameters ---------- inputs : dict input `f24_data` should point to a `Readable` object containing the 'xml' formatted event data. input `f7_data` should point to a `Readable` object containing the 'xml' formatted f7 data. options : dict Options for deserialization of the Opta file. Possible options are `event_types` (list of event types) to specify the event types that should be returned. Valid types: "shot", "pass", "carry", "take_on" and "generic". Generic is everything other than the first 4. Those events are barely parsed. This type of event can be used to do the parsing yourself. Every event has a 'raw_event' attribute which contains the original dictionary. Returns ------- dataset : EventDataset Raises ------ See Also -------- Examples -------- >>> serializer = OptaSerializer() >>> with open("123_f24.xml", "rb") as f24_data, \ >>> open("123_f7.xml", "rb") as f7_data: >>> >>> dataset = serializer.deserialize( >>> inputs={ >>> 'f24_data': f24_data, >>> 'f7_data': f7_data >>> }, >>> options={ >>> 'event_types': ["pass", "take_on", "carry", "shot"] >>> } >>> ) """ self.__validate_inputs(inputs) if not options: options = {} with performance_logging("load data", logger=logger): f7_root = objectify.fromstring(inputs["f7_data"].read()) f24_root = objectify.fromstring(inputs["f24_data"].read()) wanted_event_types = [ EventType[event_type.upper()] for event_type in options.get("event_types", []) ] with performance_logging("parse data", logger=logger): matchdata_path = objectify.ObjectPath( "SoccerFeed.SoccerDocument.MatchData") team_elms = list( matchdata_path.find(f7_root).iterchildren("TeamData")) away_player_map = {} home_player_map = {} home_team_id = None away_team_id = None for team_elm in team_elms: player_map = { player_elm.attrib["PlayerRef"].lstrip("p"): player_elm.attrib["ShirtNumber"] for player_elm in team_elm.find( "PlayerLineUp").iterchildren("MatchPlayer") } team_id = team_elm.attrib["TeamRef"].lstrip("t") if team_elm.attrib["Side"] == "Home": home_player_map = player_map home_team_id = team_id elif team_elm.attrib["Side"] == "Away": away_player_map = player_map away_team_id = team_id else: raise Exception(f"Unknown side: {team_elm.attrib['Side']}") if not away_player_map or not home_player_map: raise Exception("LineUp incomplete") game_elm = f24_root.find("Game") periods = [ Period( id=1, start_timestamp=None, end_timestamp=None, ), Period( id=2, start_timestamp=None, end_timestamp=None, ), ] events = [] for event_elm in game_elm.iterchildren("Event"): event_id = event_elm.attrib["id"] type_id = int(event_elm.attrib["type_id"]) timestamp = _parse_f24_datetime(event_elm.attrib["timestamp"]) period_id = int(event_elm.attrib["period_id"]) for period in periods: if period.id == period_id: break else: logger.debug( f"Skipping event {event_id} because period doesn't match {period_id}" ) continue if type_id == EVENT_TYPE_START_PERIOD: logger.debug( f"Set start of period {period.id} to {timestamp}") period.start_timestamp = timestamp elif type_id == EVENT_TYPE_END_PERIOD: logger.debug( f"Set end of period {period.id} to {timestamp}") period.end_timestamp = timestamp else: if not period.start_timestamp: # not started yet continue if event_elm.attrib["team_id"] == home_team_id: team = Team.HOME current_team_map = home_player_map elif event_elm.attrib["team_id"] == away_team_id: team = Team.AWAY current_team_map = away_player_map else: raise Exception( f"Unknown team_id {event_elm.attrib['team_id']}") x = float(event_elm.attrib["x"]) y = float(event_elm.attrib["y"]) outcome = int(event_elm.attrib["outcome"]) qualifiers = { int(qualifier_elm.attrib["qualifier_id"]): qualifier_elm.attrib.get("value") for qualifier_elm in event_elm.iterchildren("Q") } player_jersey_no = None if "player_id" in event_elm.attrib: player_jersey_no = current_team_map[ event_elm.attrib["player_id"]] generic_event_kwargs = dict( # from DataRecord period=period, timestamp=timestamp - period.start_timestamp, ball_owning_team=None, ball_state=BallState.ALIVE, # from Event event_id=event_id, team=team, player_jersey_no=player_jersey_no, position=Point(x=x, y=y), raw_event=event_elm, ) if type_id == EVENT_TYPE_PASS: pass_event_kwargs = _parse_pass(qualifiers, outcome) event = PassEvent( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_OFFSIDE_PASS: pass_event_kwargs = _parse_offside_pass() event = PassEvent( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_TAKE_ON: take_on_event_kwargs = _parse_take_on(outcome) event = TakeOnEvent( **take_on_event_kwargs, **generic_event_kwargs, ) elif type_id in ( EVENT_TYPE_SHOT_MISS, EVENT_TYPE_SHOT_POST, EVENT_TYPE_SHOT_SAVED, EVENT_TYPE_SHOT_GOAL, ): shot_event_kwargs = _parse_shot( qualifiers, type_id, position=generic_event_kwargs["position"], ) kwargs = {} kwargs.update(generic_event_kwargs) kwargs.update(shot_event_kwargs) event = ShotEvent(**kwargs) else: event = GenericEvent(**generic_event_kwargs, result=None) if (not wanted_event_types or event.event_type in wanted_event_types): events.append(event) return EventDataset( flags=DatasetFlag.BALL_OWNING_TEAM, orientation=Orientation.ACTION_EXECUTING_TEAM, pitch_dimensions=PitchDimensions(x_dim=Dimension(0, 100), y_dim=Dimension(0, 100)), periods=periods, records=events, )
def get_id(self): find = objectify.ObjectPath("Workflow.Id") return find(self._xml_root[0]).text
def load_metadata(metadata_file: Readable, provider: Provider = None) -> EPTSMetadata: root = objectify.fromstring(metadata_file.read()) metadata = root.find("Metadata") score_path = objectify.ObjectPath( "Metadata.Sessions.Session[0].MatchParameters.Score") score_elm = score_path.find(metadata) score = Score(home=score_elm.LocalTeamScore, away=score_elm.VisitingTeamScore) _team_map = { Ground.HOME: score_elm.attrib["idLocalTeam"], Ground.AWAY: score_elm.attrib["idVisitingTeam"], } _team_name_map = { team_elm.attrib["id"]: str(team_elm.find("Name")) for team_elm in metadata.find("Teams").iterchildren(tag="Team") } teams_metadata = {} for ground, team_id in _team_map.items(): team = Team(team_id=team_id, name=_team_name_map[team_id], ground=ground) team.players = _load_players(metadata.find("Players"), team) teams_metadata.update({ground: team}) data_format_specifications = _load_data_format_specifications( root.find("DataFormatSpecifications")) device_path = objectify.ObjectPath("Metadata.Devices.Device[0].Sensors") sensors = _load_sensors(device_path.find(metadata)) _channel_map = { channel.channel_id: channel for sensor in sensors for channel in sensor.channels } _all_players = [ player for key, value in teams_metadata.items() for player in value.players ] _player_map = {player.player_id: player for player in _all_players} player_channels = [ PlayerChannel( player_channel_id=player_channel_elm.attrib["id"], player=_player_map[player_channel_elm.attrib["playerId"]], channel=_channel_map[player_channel_elm.attrib["channelId"]], ) for player_channel_elm in metadata.find("PlayerChannels").iterchildren( tag="PlayerChannel") ] frame_rate = int(metadata.find("GlobalConfig").find("FrameRate")) pitch_dimensions = _load_pitch_dimensions(metadata, sensors) periods = _load_periods(metadata.find("GlobalConfig"), frame_rate) if periods: start_attacking_direction = periods[0].attacking_direction else: start_attacking_direction = None orientation = ( (Orientation.FIXED_HOME_AWAY if start_attacking_direction == AttackingDirection.HOME_AWAY else Orientation.FIXED_AWAY_HOME) if start_attacking_direction != AttackingDirection.NOT_SET else None) metadata.orientation = orientation return EPTSMetadata( teams=list(teams_metadata.values()), periods=periods, pitch_dimensions=pitch_dimensions, data_format_specifications=data_format_specifications, player_channels=player_channels, frame_rate=frame_rate, sensors=sensors, score=score, orientation=None, provider=provider, flags=~(DatasetFlag.BALL_STATE | DatasetFlag.BALL_OWNING_TEAM), )
def deserialize(self, inputs: OptaInputs) -> EventDataset: transformer = self.get_transformer(length=100, width=100) with performance_logging("load data", logger=logger): f7_root = objectify.fromstring(inputs.f7_data.read()) f24_root = objectify.fromstring(inputs.f24_data.read()) with performance_logging("parse data", logger=logger): matchdata_path = objectify.ObjectPath( "SoccerFeed.SoccerDocument.MatchData") team_elms = list( matchdata_path.find(f7_root).iterchildren("TeamData")) home_score = None away_score = None for team_elm in team_elms: if team_elm.attrib["Side"] == "Home": home_score = team_elm.attrib["Score"] home_team = _team_from_xml_elm(team_elm, f7_root) elif team_elm.attrib["Side"] == "Away": away_score = team_elm.attrib["Score"] away_team = _team_from_xml_elm(team_elm, f7_root) else: raise DeserializationError( f"Unknown side: {team_elm.attrib['Side']}") score = Score(home=home_score, away=away_score) teams = [home_team, away_team] if len(home_team.players) == 0 or len(away_team.players) == 0: raise DeserializationError("LineUp incomplete") game_elm = f24_root.find("Game") periods = [ Period( id=1, start_timestamp=None, end_timestamp=None, ), Period( id=2, start_timestamp=None, end_timestamp=None, ), ] possession_team = None events = [] for event_elm in game_elm.iterchildren("Event"): event_id = event_elm.attrib["id"] type_id = int(event_elm.attrib["type_id"]) timestamp = _parse_f24_datetime(event_elm.attrib["timestamp"]) period_id = int(event_elm.attrib["period_id"]) for period in periods: if period.id == period_id: break else: logger.debug( f"Skipping event {event_id} because period doesn't match {period_id}" ) continue if type_id == EVENT_TYPE_START_PERIOD: logger.debug( f"Set start of period {period.id} to {timestamp}") period.start_timestamp = timestamp elif type_id == EVENT_TYPE_END_PERIOD: logger.debug( f"Set end of period {period.id} to {timestamp}") period.end_timestamp = timestamp else: if not period.start_timestamp: # not started yet continue if event_elm.attrib["team_id"] == home_team.team_id: team = teams[0] elif event_elm.attrib["team_id"] == away_team.team_id: team = teams[1] else: raise DeserializationError( f"Unknown team_id {event_elm.attrib['team_id']}") x = float(event_elm.attrib["x"]) y = float(event_elm.attrib["y"]) outcome = int(event_elm.attrib["outcome"]) raw_qualifiers = { int(qualifier_elm.attrib["qualifier_id"]): qualifier_elm.attrib.get("value") for qualifier_elm in event_elm.iterchildren("Q") } player = None if "player_id" in event_elm.attrib: player = team.get_player_by_id( event_elm.attrib["player_id"]) if type_id in BALL_OWNING_EVENTS: possession_team = team generic_event_kwargs = dict( # from DataRecord period=period, timestamp=timestamp - period.start_timestamp, ball_owning_team=possession_team, ball_state=BallState.ALIVE, # from Event event_id=event_id, team=team, player=player, coordinates=Point(x=x, y=y), raw_event=event_elm, ) if type_id == EVENT_TYPE_PASS: pass_event_kwargs = _parse_pass( raw_qualifiers, outcome) event = PassEvent.create( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_OFFSIDE_PASS: pass_event_kwargs = _parse_offside_pass(raw_qualifiers) event = PassEvent.create( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_TAKE_ON: take_on_event_kwargs = _parse_take_on(outcome) event = TakeOnEvent.create( qualifiers=None, **take_on_event_kwargs, **generic_event_kwargs, ) elif type_id in ( EVENT_TYPE_SHOT_MISS, EVENT_TYPE_SHOT_POST, EVENT_TYPE_SHOT_SAVED, EVENT_TYPE_SHOT_GOAL, ): if type_id == EVENT_TYPE_SHOT_GOAL: if 374 in raw_qualifiers.keys(): generic_event_kwargs["timestamp"] = ( _parse_f24_datetime( raw_qualifiers.get(374).replace( " ", "T")) - period.start_timestamp) shot_event_kwargs = _parse_shot( raw_qualifiers, type_id, coordinates=generic_event_kwargs["coordinates"], ) kwargs = {} kwargs.update(generic_event_kwargs) kwargs.update(shot_event_kwargs) event = ShotEvent.create(**kwargs) elif type_id == EVENT_TYPE_RECOVERY: event = RecoveryEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_FOUL_COMMITTED: event = FoulCommittedEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) elif type_id in BALL_OUT_EVENTS: generic_event_kwargs["ball_state"] = BallState.DEAD event = BallOutEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_FORMATION_CHANGE: formation_change_event_kwargs = ( _parse_formation_change(raw_qualifiers)) event = FormationChangeEvent.create( result=None, qualifiers=None, **formation_change_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_CARD: generic_event_kwargs["ball_state"] = BallState.DEAD card_event_kwargs = _parse_card(raw_qualifiers) event = CardEvent.create( **card_event_kwargs, **generic_event_kwargs, ) else: event = GenericEvent.create( **generic_event_kwargs, result=None, qualifiers=None, event_name=_get_event_type_name(type_id), ) if self.should_include_event(event): events.append(transformer.transform_event(event)) metadata = Metadata( teams=teams, periods=periods, pitch_dimensions=transformer.get_to_coordinate_system(). pitch_dimensions, score=score, frame_rate=None, orientation=Orientation.ACTION_EXECUTING_TEAM, flags=DatasetFlag.BALL_OWNING_TEAM, provider=Provider.OPTA, coordinate_system=transformer.get_to_coordinate_system(), ) return EventDataset( metadata=metadata, records=events, )
def __fill_method_map(self): from gosa.backend.objects.factory import load class Klass(Object): #noinspection PyMethodParameters def __init__(me, *args, **kwargs): #@NoSelf Object.__init__(me, *args, **kwargs) #noinspection PyMethodParameters def __setattr__(me, name, value): #@NoSelf me._setattr_(name, value) #noinspection PyMethodParameters def __getattr__(me, name): #@NoSelf return me._getattr_(name) #noinspection PyMethodParameters def __delattr__(me, name): #@NoSelf me._delattr_(name) for element in self._xml_root: find = objectify.ObjectPath('Workflow.Methods') if (find.hasattr(element)): for method in find(element).iterchildren(): if method.tag == "{http://www.gonicus.de/Workflows}Method": # method = attr.Command.text # self.__method_map[method.text] = getattr(self.__base, method) # Extract method information out of the xml tag method_name = method['Name'].text command = method['Command'].text # Get the list of method parameters m_params = [] if 'MethodParameters' in method.__dict__: for param in method['MethodParameters'][ 'MethodParameter']: p_name = param['Name'].text p_type = param['Type'].text p_required = bool( load(param, "Required", False)) p_default = str(load(param, "Default")) m_params.append( (p_name, p_type, p_required, p_default)) # Get the list of command parameters c_params = [] if 'CommandParameters' in method.__dict__: for param in method['CommandParameters']['Value']: c_params.append(param.text) # Append the method to the list of registered methods for this object cr = PluginRegistry.getInstance('CommandRegistry') self.__method_map[method_name] = { 'ref': self.__create_class_method( Klass, method_name, command, m_params, c_params, cr.callNeedsUser(command), cr.callNeedsSession(command)) }
def parse_shell_id(self): selectorset_selector_path = objectify.ObjectPath( WSMAN_Constants.SELECTOR_PATH) selectorset_selector_el = selectorset_selector_path.find( objectify.fromstring(self.response.content)) return selectorset_selector_el.text
def deserialize(self, inputs: Dict[str, Readable], options: Dict = None) -> EventDataset: """ Deserialize Opta event data into a `EventDataset`. Parameters ---------- inputs : dict input `f24_data` should point to a `Readable` object containing the 'xml' formatted event data. input `f7_data` should point to a `Readable` object containing the 'xml' formatted f7 data. options : dict Options for deserialization of the Opta file. Possible options are `event_types` (list of event types) to specify the event types that should be returned. Valid types: "shot", "pass", "carry", "take_on" and "generic". Generic is everything other than the first 4. Those events are barely parsed. This type of event can be used to do the parsing yourself. Every event has a 'raw_event' attribute which contains the original dictionary. Returns ------- dataset : EventDataset Raises ------ See Also -------- Examples -------- >>> serializer = OptaSerializer() >>> with open("123_f24.xml", "rb") as f24_data, \ >>> open("123_f7.xml", "rb") as f7_data: >>> >>> dataset = serializer.deserialize( >>> inputs={ >>> 'f24_data': f24_data, >>> 'f7_data': f7_data >>> }, >>> options={ >>> 'event_types': ["pass", "take_on", "carry", "shot"] >>> } >>> ) """ self.__validate_inputs(inputs) if not options: options = {} with performance_logging("load data", logger=logger): f7_root = objectify.fromstring(inputs["f7_data"].read()) f24_root = objectify.fromstring(inputs["f24_data"].read()) wanted_event_types = [ EventType[event_type.upper()] for event_type in options.get("event_types", []) ] with performance_logging("parse data", logger=logger): matchdata_path = objectify.ObjectPath( "SoccerFeed.SoccerDocument.MatchData") team_elms = list( matchdata_path.find(f7_root).iterchildren("TeamData")) home_score = None away_score = None for team_elm in team_elms: if team_elm.attrib["Side"] == "Home": home_score = team_elm.attrib["Score"] home_team = _team_from_xml_elm(team_elm, f7_root) elif team_elm.attrib["Side"] == "Away": away_score = team_elm.attrib["Score"] away_team = _team_from_xml_elm(team_elm, f7_root) else: raise Exception(f"Unknown side: {team_elm.attrib['Side']}") score = Score(home=home_score, away=away_score) teams = [home_team, away_team] if len(home_team.players) == 0 or len(away_team.players) == 0: raise Exception("LineUp incomplete") game_elm = f24_root.find("Game") periods = [ Period( id=1, start_timestamp=None, end_timestamp=None, ), Period( id=2, start_timestamp=None, end_timestamp=None, ), ] possession_team = None events = [] for event_elm in game_elm.iterchildren("Event"): event_id = event_elm.attrib["id"] type_id = int(event_elm.attrib["type_id"]) timestamp = _parse_f24_datetime(event_elm.attrib["timestamp"]) period_id = int(event_elm.attrib["period_id"]) for period in periods: if period.id == period_id: break else: logger.debug( f"Skipping event {event_id} because period doesn't match {period_id}" ) continue if type_id == EVENT_TYPE_START_PERIOD: logger.debug( f"Set start of period {period.id} to {timestamp}") period.start_timestamp = timestamp elif type_id == EVENT_TYPE_END_PERIOD: logger.debug( f"Set end of period {period.id} to {timestamp}") period.end_timestamp = timestamp else: if not period.start_timestamp: # not started yet continue if event_elm.attrib["team_id"] == home_team.team_id: team = teams[0] elif event_elm.attrib["team_id"] == away_team.team_id: team = teams[1] else: raise Exception( f"Unknown team_id {event_elm.attrib['team_id']}") x = float(event_elm.attrib["x"]) y = float(event_elm.attrib["y"]) outcome = int(event_elm.attrib["outcome"]) raw_qualifiers = { int(qualifier_elm.attrib["qualifier_id"]): qualifier_elm.attrib.get("value") for qualifier_elm in event_elm.iterchildren("Q") } player = None if "player_id" in event_elm.attrib: player = team.get_player_by_id( event_elm.attrib["player_id"]) if type_id in BALL_OWNING_EVENTS: possession_team = team generic_event_kwargs = dict( # from DataRecord period=period, timestamp=timestamp - period.start_timestamp, ball_owning_team=possession_team, ball_state=BallState.ALIVE, # from Event event_id=event_id, team=team, player=player, coordinates=Point(x=x, y=y), raw_event=event_elm, ) if type_id == EVENT_TYPE_PASS: pass_event_kwargs = _parse_pass( raw_qualifiers, outcome) event = PassEvent.create( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_OFFSIDE_PASS: pass_event_kwargs = _parse_offside_pass(raw_qualifiers) event = PassEvent.create( **pass_event_kwargs, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_TAKE_ON: take_on_event_kwargs = _parse_take_on(outcome) event = TakeOnEvent.create( qualifiers=None, **take_on_event_kwargs, **generic_event_kwargs, ) elif type_id in ( EVENT_TYPE_SHOT_MISS, EVENT_TYPE_SHOT_POST, EVENT_TYPE_SHOT_SAVED, EVENT_TYPE_SHOT_GOAL, ): shot_event_kwargs = _parse_shot( raw_qualifiers, type_id, coordinates=generic_event_kwargs["coordinates"], ) kwargs = {} kwargs.update(generic_event_kwargs) kwargs.update(shot_event_kwargs) event = ShotEvent.create(**kwargs) elif type_id == EVENT_TYPE_RECOVERY: event = RecoveryEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) elif type_id == EVENT_TYPE_FOUL_COMMITTED: event = FoulCommittedEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) elif type_id in BALL_OUT_EVENTS: generic_event_kwargs["ball_state"] = BallState.DEAD event = BallOutEvent.create( result=None, qualifiers=None, **generic_event_kwargs, ) else: event = GenericEvent.create( **generic_event_kwargs, result=None, qualifiers=None, event_name=_get_event_type_name(type_id), ) if (not wanted_event_types or event.event_type in wanted_event_types): events.append(event) metadata = Metadata( teams=teams, periods=periods, pitch_dimensions=PitchDimensions(x_dim=Dimension(0, 100), y_dim=Dimension(0, 100)), score=score, frame_rate=None, orientation=Orientation.ACTION_EXECUTING_TEAM, flags=DatasetFlag.BALL_OWNING_TEAM, provider=Provider.OPTA, ) return EventDataset( metadata=metadata, records=events, )