def loads(self, serialized_activity): # handle the FeedEndMarker if serialized_activity == FEED_END: activity = FeedEndMarker() else: parts = serialized_activity.split(',') # convert these to ids actor_id, verb_id, object_id, target_id, entity_id = map( int, parts[:5]) activity_datetime = epoch_to_datetime(float(parts[5])) pickle_string = parts[6] if not target_id: target_id = None verb = get_verb_by_id(verb_id) extra_context = {} if pickle_string: extra_context = pickle.loads(str(pickle_string)) if entity_id: extra_context['entity_id'] = entity_id activity = Activity(actor_id, verb, object_id, target_id, time=activity_datetime, extra_context=extra_context) return activity
def loads(self, serialized_aggregated): try: serialized_aggregated = serialized_aggregated[2:] parts = serialized_aggregated.split(';;') # start with the group group = parts[0] aggregated = self.aggregated_class(group) # get the date and activities date_dict = dict(zip(self.date_fields, parts[1:5])) for k, v in date_dict.items(): date_value = None if v != '-1': date_value = epoch_to_datetime(float(v)) setattr(aggregated, k, date_value) # write the activities serializations = parts[5].split(';') activities = [LoveActivitySerializer.loads(self, s) for s in serializations] aggregated.activities = activities # write the minimized activities minimized = int(parts[6]) aggregated.minimized_activities = minimized return aggregated except Exception, e: msg = unicode(e) raise SerializationException(msg)
def loads(self, serialized_aggregated): try: serialized_aggregated = serialized_aggregated[2:] parts = serialized_aggregated.split(';;') # start with the group group = parts[0] aggregated = self.aggregated_class(group) # get the date and activities date_dict = dict(zip(self.date_fields, parts[1:5])) for k, v in date_dict.items(): date_value = None if v != '-1': date_value = epoch_to_datetime(float(v)) setattr(aggregated, k, date_value) # write the activities serializations = parts[5].split(';') activities = [ LoveActivitySerializer.loads(self, s) for s in serializations ] aggregated.activities = activities # write the minimized activities minimized = int(parts[6]) aggregated.minimized_activities = minimized return aggregated except Exception, e: msg = unicode(e) raise SerializationException(msg)
def loads(self, serialized_activity): parts = serialized_activity.split(',') # convert these to ids actor_id, verb_id, object_id, target_id = map( int, parts[:4]) activity_datetime = epoch_to_datetime(float(parts[4])) pickle_string = parts[5] if not target_id: target_id = None verb = get_verb_by_id(verb_id) extra_context = {} if pickle_string: extra_context = pickle.loads(pickle_string) activity = Activity(actor_id, verb, object_id, target_id, time=activity_datetime, extra_context=extra_context) return activity
def loads(self, serialized_activity): parts = serialized_activity.split('|') # convert these to ids actor_id, verb_id, object_id, target_id = map( int, parts[:4]) activity_datetime = epoch_to_datetime(float(parts[4])) pickle_string = str(parts[5]) if not target_id: target_id = None verb = get_verb_by_id(verb_id) extra_context = {} if pickle_string: extra_context = pickle.loads(pickle_string) activity = Activity(actor_id, verb, object_id, target_id, time=activity_datetime, extra_context=extra_context) return activity
def loads(self, serialized_activity): #handle the FeedEndMarker if serialized_activity == FEED_END: activity = FeedEndMarker() else: parts = serialized_activity.split(',') #convert these to ids actor_id, verb_id, object_id, target_id, entity_id = map(int, parts[:5]) activity_datetime = epoch_to_datetime(float(parts[5])) pickle_string = parts[6] if not target_id: target_id = None verb = get_verb_by_id(verb_id) extra_context = {} if pickle_string: extra_context = pickle.loads(pickle_string) if entity_id: extra_context['entity_id'] = entity_id activity = Activity(actor_id, verb, object_id, target_id, time=activity_datetime, extra_context=extra_context) return activity
def loads(self, serialized_aggregated): activity_serializer = self.activity_serializer_class() try: serialized_aggregated = serialized_aggregated[2:] parts = serialized_aggregated.split('\t\t') # start with the group group = parts[0] aggregated = self.aggregated_class(group) # get the date and activities date_dict = dict(zip(self.date_fields, parts[1:5])) for k, v in date_dict.items(): date_value = None if v != '-1': date_value = epoch_to_datetime(float(v)) setattr(aggregated, k, date_value) # write the activities serializations = parts[5].split('\t') if self.dehydrate: activity_ids = map(int, serializations) aggregated._activity_ids = activity_ids aggregated.dehydrated = True else: activities = [ activity_serializer.loads(s) for s in serializations ] aggregated.activities = activities aggregated.dehydrated = False # write the minimized activities minimized = int(parts[6]) aggregated.minimized_activities = minimized return aggregated except Exception, e: msg = unicode(e) raise SerializationException(msg)
def loads(self, serialized_aggregated): activity_serializer = self.activity_serializer_class() try: serialized_aggregated = serialized_aggregated[2:] parts = serialized_aggregated.split('\t\t') # start with the group group = parts[0] aggregated = self.aggregated_class(group) # get the date and activities date_dict = dict(zip(self.date_fields, parts[1:5])) for k, v in date_dict.items(): date_value = None if v != '-1': date_value = epoch_to_datetime(float(v)) setattr(aggregated, k, date_value) # write the activities serializations = parts[5].split('\t') if self.dehydrate: activity_ids = map(int, serializations) aggregated._activity_ids = activity_ids aggregated.dehydrated = True else: activities = [activity_serializer.loads(s) for s in serializations] aggregated.activities = activities aggregated.dehydrated = False # write the minimized activities minimized = int(parts[6]) aggregated.minimized_activities = minimized return aggregated except Exception, e: msg = unicode(e) raise SerializationException(msg)
def unpack(strval): return epoch_to_datetime(strval)