def test_working_hours_nonexistant(self): entry = TimelineEntry( teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 22, 0), # does not fit end=self.tzdatetime(2032, 5, 3, 22, 30), ) self.assertFalse(entry.is_fitting_working_hours()) # should not throw anything
def _generate_pdf_previews(entry: Entry, overwrite=False): original_path = Path(entry.extra_attributes['file']['path']) entry.extra_attributes['previews'] = entry.extra_attributes.get( 'previews', {}) for preview_name, preview_params in settings.DOCUMENT_PREVIEW_SIZES.items( ): preview_path = _get_previews_dir(entry, mkdir=True) / f'{preview_name}.png' try: generate_pdf_preview( original_path, preview_path, (preview_params['width'], preview_params['height']), overwrite=overwrite) entry.extra_attributes['previews'][preview_name] = str( preview_path) except FileExistsError: logger.debug( f'"{preview_name}" preview for #{entry.pk} already exists.') entry.extra_attributes['previews'][preview_name] = str( preview_path) except KeyboardInterrupt: raise except: logger.exception( f'Could not generate PDF preview for entry #{entry.pk} ({str(original_path)}).' ) raise
def _generate_image_previews(entry: Entry, overwrite=False): original_path = Path(entry.extra_attributes['file']['path']) if 'width' not in entry.extra_attributes.get('media', {}): logger.warning( f"Image entry #{entry.id} ({entry.extra_attributes['file']['path']}) does not have a width" ) return entry.extra_attributes['previews'] = {} for preview_name, preview_params in settings.IMAGE_PREVIEW_SIZES.items(): preview_path = _get_previews_dir(entry, mkdir=True) / f'{preview_name}.jpg' try: generate_image_preview( original_path, preview_path, (preview_params['width'], preview_params['height']), overwrite=overwrite) entry.extra_attributes['previews'][preview_name] = str( preview_path) except FileExistsError: logger.debug( f'"{preview_name}" preview for #{entry.pk} already exists.') entry.extra_attributes['previews'][preview_name] = str( preview_path) except KeyboardInterrupt: raise except: logger.exception( f'Could not generate image preview for entry #{entry.pk} ({str(original_path)}).' ) raise
def extract_entries(self) -> Generator[Entry, None, None]: account_info = self.get_account_info() js_file_path = self.extracted_files_path / 'data/tweet.js' json_file_path = self.extracted_files_path / 'data/tweet.json' remove_twitter_js(js_file_path, json_file_path) with json_file_path.open('r', encoding='utf-8') as json_file: json_entries = [t['tweet'] for t in json.load(json_file)] logger.info(f"Adding tweets found in {str(json_file_path)}") for tweet in json_entries: entry = Entry( schema='social.twitter.tweet', title='', description=tweet['full_text'], date_on_timeline=twitter_date_to_datetime( tweet['created_at']), extra_attributes={ "post_id": tweet['id'], "post_user": account_info['username'], "source": self.entry_source, }, source=self.entry_source, ) if tweet.get('in_reply_to_status_id'): entry.extra_attributes['post_parent_id'] = tweet[ 'in_reply_to_status_id'] yield entry
def test_cant_save_due_to_overlap(self): overlapping_entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime(2016, 1, 3, 4, 0), end=self.tzdatetime(2016, 1, 3, 4, 30), ) with self.assertRaises(AutoScheduleExpcetion): # should conflict with self.big_entry overlapping_entry.clean()
def test_no_validation_when_more_then_one_student_has_signed(self): """ There is no need to validate a timeline entry when it has students """ overlapping_entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime(2016, 1, 3, 4, 0), end=self.tzdatetime(2016, 1, 3, 4, 30), taken_slots=1, ) overlapping_entry.clean() # should not throw anything self.assertTrue(True)
def test_cant_save_due_to_not_fitting_working_hours(self): """ Create an entry that does not fit into teachers working hours """ entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime(2032, 5, 3, 13, 30), # monday end=self.tzdatetime(2032, 5, 3, 14, 0), allow_besides_working_hours=False ) with self.assertRaises(DoesNotFitWorkingHours, msg='Entry does not fit teachers working hours'): entry.clean()
def test_working_hours(self): mixer.blend(WorkingHours, teacher=self.teacher, start='12:00', end='13:00', weekday=0) entry_besides_hours = TimelineEntry( teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 4, 0), end=self.tzdatetime(2032, 5, 3, 4, 30), ) self.assertFalse(entry_besides_hours.is_fitting_working_hours()) entry_within_hours = TimelineEntry( teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 12, 30), end=self.tzdatetime(2032, 5, 3, 13, 0), ) self.assertTrue(entry_within_hours.is_fitting_working_hours())
def test_get_teachers_by_lesson(self): """ Find teachers for a particular lesson """ first_master_class = mixer.blend(lessons.MasterClass, host=self.teacher) first_entry = TimelineEntry(teacher=self.teacher, lesson=first_master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) first_entry.save() free_teachers = list( Teacher.objects.find_free(date=self.tzdatetime(2032, 5, 3), lesson_id=first_master_class.pk)) self.assertEquals(len(free_teachers), 1)
def test_free_slots_for_lesson_type_validates_with_auto_schedule(self): master_class = mixer.blend(lessons.MasterClass, host=self.teacher) entry = TimelineEntry(teacher=self.teacher, lesson=master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) entry.save() lesson_type = ContentType.objects.get_for_model(master_class) with patch('timeline.models.Entry.clean') as clean: clean.side_effect = AutoScheduleExpcetion(message='testing') slots = self.teacher.find_free_slots(date=self.tzdatetime( 2032, 5, 3), lesson_type=lesson_type.pk) self.assertEqual(len(slots), 0)
def test_cant_save_due_to_teacher_has_events(self): entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime(2016, 5, 3, 13, 30), end=self.tzdatetime(2016, 5, 3, 14, 00), ) mixer.blend( ExternalEvent, teacher=self.teacher, start=self.tzdatetime(2016, 5, 2, 00, 00), end=self.tzdatetime(2016, 5, 5, 23, 59), ) with self.assertRaises(AutoScheduleExpcetion): entry.clean()
class TestCheckEntry(ClientTestCase): """ :view:`timeline.check_entry` is a helper for the timeline creating form which checks entry validity — working hours and overlaping """ def setUp(self): self.teacher = create_teacher() self.lesson = mixer.blend(lessons.MasterClass, host=self.teacher) self.entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime('Europe/Moscow', 2016, 1, 18, 14, 10), end=self.tzdatetime('Europe/Moscow', 2016, 1, 18, 14, 40), ) self.entry.save() mixer.blend(WorkingHours, teacher=self.teacher, weekday=0, start='13:00', end='15:00') self.absence = Absence( type='vacation', teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 0, 0), end=self.tzdatetime(2032, 5, 3, 23, 59), ) self.absence.save() def test_check_overlap_true(self): res = self.__check_entry( start='2016-01-18 14:30', end='2016-01-18 15:00', ) self.assertEqual(res, 'TeacherHasOtherLessons') def test_teacher_is_present_false(self): res = self.__check_entry( start='2032-05-03 14:00', # this day teacher is on vacation end='2032-05-03 14:30', ) self.assertEqual(res, 'TeacherIsAbsent') def __check_entry(self, start, end): response = self.c.get( '/timeline/%s/check_entry/%s/%s/' % (self.teacher.user.username, start, end) ) self.assertEqual(response.status_code, 200) result = json.loads(response.content.decode('utf-8')) return result['result']
def test_cant_save_due_to_teacher_absence(self): entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime(2016, 5, 3, 13, 30), end=self.tzdatetime(2016, 5, 3, 14, 00), ) vacation = Absence( type='vacation', teacher=self.teacher, start=self.tzdatetime(2016, 5, 2, 00, 00), end=self.tzdatetime(2016, 5, 5, 23, 59), ) vacation.save() with self.assertRaises(AutoScheduleExpcetion): entry.clean()
def entry_from_call(self, account: dict, chat: dict, message: dict) -> Entry: if message['actor_id'] == self.account_id(account): # Outgoing call caller1 = {'name': message['actor'], 'id': message['actor_id']} caller2 = { 'name': chat['name'], 'id': f"user{chat['id']}" } # The chat ID is the other user's ID else: # Incoming call caller1 = { 'name': self.account_name(account), 'id': self.account_id(account) } caller2 = {'name': message['actor'], 'id': message['actor_id']} return Entry( source=self.entry_source, schema='call.telegram', title='', description='', extra_attributes={ 'duration': message.get('duration_seconds', 0), # Not set for failed calls 'caller1_name': caller1['name'], 'caller1_id': caller1['id'], 'caller2_name': caller2['name'], 'caller2_id': caller2['id'], }, date_on_timeline=self.get_message_date(message), )
def extract_history_entries(self, json_files: Iterable[Path], schema: str, prefix: str) -> Generator[Entry, None, None]: for json_file in json_files: logger.info(f'Processing activity in "{str(json_file)}"') for entry in json.load(json_file.open('r')): if entry['title'].startswith(prefix): try: time = pytz.utc.localize(datetime.strptime(entry['time'], '%Y-%m-%dT%H:%M:%S.%fZ')) except ValueError: time = json_to_datetime(entry['time']) extra_attributes = {} if entry.get('titleUrl'): extra_attributes['url'] = entry['titleUrl'] try: yield Entry( title=entry['title'].replace(prefix, '', 1), description='', source=self.entry_source, schema=schema, date_on_timeline=time, extra_attributes=extra_attributes ) except KeyboardInterrupt: raise except: logging.exception(f"Could not parse entry: {entry}") raise
def extract_entries(self) -> Generator[Entry, None, None]: for ics_file in self.get_archive_files(): with open(ics_file, 'r') as file: calendar = Calendar.from_ical(file.read()) for event in calendar.walk('VEVENT'): event_metadata = defaultdict(dict) event_metadata['event']['start_date'] = datetime_to_json(self.normalize_date(event['DTSTART'].dt)) if event.get('DTEND'): event_metadata['event']['end_date'] = datetime_to_json(self.normalize_date(event['DTEND'].dt)) if event.get('DTSTAMP'): event_metadata['event']['creation_date'] = datetime_to_json(self.normalize_date(event['DTSTAMP'].dt)) if event.get('LOCATION'): event_metadata['location']['name'] = event['LOCATION'] yield Entry( source=self.entry_source, schema='event', title=str(event.get('SUMMARY', '')), description=str(event.get('DESCRIPTION', '')), date_on_timeline=self.normalize_date(event['DTSTART'].dt), extra_attributes=dict(event_metadata), )
def test_working_hours_multidate(self): """ Test checking working hours when lesson starts in one day, and ends on another. This will be frequent situations, because our teachers are in different timezones. """ mixer.blend(WorkingHours, teacher=self.teacher, start='23:00', end='23:59', weekday=0) mixer.blend(WorkingHours, teacher=self.teacher, start='00:00', end='02:00', weekday=1) entry_besides_hours = TimelineEntry( teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 22, 0), # does not fit end=self.tzdatetime(2032, 5, 4, 0, 30) ) self.assertFalse(entry_besides_hours.is_fitting_working_hours()) entry_besides_hours.start = self.tzdatetime(2032, 5, 3, 22, 30) # does fit entry_besides_hours.end = self.tzdatetime(2016, 7, 26, 2, 30) # does not fit self.assertFalse(entry_besides_hours.is_fitting_working_hours()) entry_within_hours = TimelineEntry( teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 23, 30), end=self.tzdatetime(2032, 5, 4, 0, 30) ) self.assertTrue(entry_within_hours.is_fitting_working_hours())
def extract_entries(self) -> Generator[Entry, None, None]: for json_file in self.get_archive_files(): json_entries = json.load(json_file) for json_entry in json_entries: json_entry['source'] = self.entry_source json_entry.pop('id', None) serializer = EntrySerializer(data=json_entry) assert serializer.is_valid() yield Entry(**serializer.validated_data)
def test_two_teachers_for_single_slot(self): """ Check if find_free_slots returns only slots of selected teacher """ other_teacher = create_teacher() master_class = mixer.blend(lessons.MasterClass, host=other_teacher) entry = TimelineEntry(teacher=other_teacher, lesson=master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) entry.save() lesson_type = ContentType.objects.get_for_model(master_class) slots = self.teacher.find_free_slots(date=self.tzdatetime(2032, 5, 3), lesson_type=lesson_type.pk) self.assertEquals( len(slots), 0 ) # should not return anything — we are checking slots for self.teacher, not other_teacher
def browsing_history_entry(date_on_timeline: datetime, archive: 'BaseArchive', url: str, title: str = '') -> Entry: return Entry( title=title or '', description='', schema='activity.browsing.website', source=archive.entry_source, extra_attributes={ 'url': url, }, date_on_timeline=date_on_timeline, )
def test_free_slots_for_lesson_type(self): """ Test for getting free time slots for a certain lesson type. """ master_class = mixer.blend(lessons.MasterClass, host=self.teacher) entry = TimelineEntry(teacher=self.teacher, lesson=master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) entry.save() lesson_type = ContentType.objects.get_for_model(master_class) slots = self.teacher.find_free_slots(date=self.tzdatetime(2032, 5, 3), lesson_type=lesson_type.pk) self.assertEquals(len(slots), 1) slots = self.teacher.find_free_slots(date=self.tzdatetime(2032, 5, 5), lesson_type=lesson_type.pk) self.assertEquals( len(slots), 0) # there is no master classes, planned on 2032-05-05
def test_schedule_existsing_entry(self): """ Create a timeline entry, that class.__get_entry should return instead of creating a new one """ lesson = products.OrdinaryLesson.get_default() c = self._buy_a_lesson(lesson) date = self.tzdatetime(2016, 8, 17, 10, 1) entry = TimelineEntry( teacher=self.host, start=date, lesson=lesson ) entry.save() c.schedule( teacher=self.host, date=self.tzdatetime(2016, 8, 17, 10, 1), allow_besides_working_hours=True, ) c.save() self.assertEquals(c.timeline, entry)
def _generate_video_previews(entry: Entry, overwrite=False): original_path = Path(entry.extra_attributes['file']['path']) if 'duration' not in entry.extra_attributes.get('media', {}): logger.warning( f"Video entry #{entry.id} ({entry.extra_attributes['file']['path']}) does not have a duration" ) return entry.extra_attributes['previews'] = {} for preview_name, preview_params in settings.VIDEO_PREVIEW_SIZES.items(): preview_path = _get_previews_dir(entry, mkdir=True) / f'{preview_name}.mp4' try: generate_video_preview( original_path, preview_path, video_duration=entry.extra_attributes['media']['duration'], max_dimensions=(preview_params['width'], preview_params['height']), overwrite=overwrite) entry.extra_attributes['previews'][preview_name] = str( preview_path) except FileExistsError: logger.debug( f'"{preview_name}" preview for #{entry.pk} already exists.') entry.extra_attributes['previews'][preview_name] = str( preview_path) except VideoDurationError as e: logger.debug( f'Could not generate video preview for entry #{entry.pk} ({str(original_path)}). {str(e)}' ) break # Same error will happen for other preview sizes except KeyboardInterrupt: raise except: logger.exception( f'Could not generate video preview for entry #{entry.pk} ({str(original_path)}).' ) raise
def test_free_slots_for_lesson(self): """ Test for getting free time slots for a particular teacher with particular lesson """ other_teacher = create_teacher() master_class = mixer.blend(lessons.MasterClass, host=self.teacher) other_master_class = mixer.blend(lessons.MasterClass, host=other_teacher) entry = TimelineEntry(teacher=self.teacher, lesson=master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) entry.save() other_entry = TimelineEntry(teacher=other_teacher, lesson=other_master_class, start=self.tzdatetime(2032, 5, 3, 14, 10), end=self.tzdatetime(2032, 5, 3, 14, 40)) other_entry.save() slots = self.teacher.find_free_slots(self.tzdatetime(2032, 5, 3), lesson_id=master_class.pk) self.assertEquals(len(slots), 1) slots = self.teacher.find_free_slots(self.tzdatetime(2032, 5, 3), lesson_id=other_master_class.pk) self.assertEquals(len(slots), 0)
def entry_from_point(self, point) -> Entry: return Entry(schema='activity.location', source=self.entry_source, title=getattr(point, 'name') or '', description=getattr(point, 'description') or getattr(point, 'comment') or '', extra_attributes={ 'location': { 'latitude': point.latitude, 'longitude': point.longitude, 'altitude': point.elevation, }, }, date_on_timeline=datetime_to_json(point.time))
def geolocation_entry(date_on_timeline: datetime, latitude: float, longitude: float, archive: 'BaseArchive', altitude: float = None, accuracy: int = None, title: str = '') -> Entry: entry = Entry( title=title or '', description='', schema='activity.location', source=archive.entry_source, extra_attributes={ 'location': { 'latitude': latitude, 'longitude': longitude, }, }, date_on_timeline=date_on_timeline, ) if altitude is not None: entry.extra_attributes['location']['altitude'] = altitude if accuracy is not None: entry.extra_attributes['location']['accuracy'] = accuracy return entry
def entry_from_file_path(file_path: Path, source: BaseSource) -> Entry: """ Creates an Entry template from a file path, filling the fields with file metadata. """ mimetype = get_mimetype(file_path) entry = Entry( title=file_path.name, source=source.entry_source, schema=get_schema_from_mimetype(mimetype), extra_attributes={ 'file': { 'checksum': get_checksum(file_path), 'path': str(file_path.resolve()), 'mimetype': mimetype, }, }, ) entry.date_on_timeline = get_file_entry_date(entry) if mimetype: if mimetype.startswith('image/'): entry.schema = 'file.image' entry.extra_attributes.update( get_image_extra_attributes(file_path)) if mimetype.startswith('video/'): entry.schema = 'file.video' try: entry.extra_attributes.update( get_video_extra_attributes(file_path)) except FileFormatError: logger.exception( f"Could not read metadata for video {str(file_path)}") if mimetype.startswith('audio/'): entry.schema = 'file.audio' entry.extra_attributes.update( get_audio_extra_attributes(file_path)) if mimetype.startswith('text/'): entry.schema = 'file.text' with file_path.open('r') as text_file: entry.description = text_file.read( settings.MAX_PLAINTEXT_PREVIEW_SIZE) return entry
def extract_entries(self) -> Generator[Entry, None, None]: default_currency = 'EUR' default_timezone = 'Europe/Berlin' # TODO: If this thing gets a million users, that assumption could be wrong income_types = ('Income', 'Direct Debit Reversal') for csv_file in self.get_archive_files(): for line in csv.DictReader(codecs.iterdecode(csv_file.open('rb'), 'utf-8'), delimiter=',', quotechar='"'): schema = 'finance.income' if line['Transaction type'] in income_types else 'finance.expense' you = { 'currency': default_currency, 'amount': Decimal(line['Amount (EUR)']).copy_abs(), 'name': None, } other_party = { 'currency': line['Type Foreign Currency'] or default_currency, 'amount': Decimal(line['Amount (Foreign Currency)'] or line['Amount (EUR)']).copy_abs(), 'name': line['Payee'], } sender = you if schema == 'finance.expense' else other_party recipient = other_party if schema == 'finance.expense' else you # The transactions don't have a time. Set it to noon, Berlin time entry_date = pytz.timezone(default_timezone)\ .localize(datetime.strptime(line['Date'], '%Y-%m-%d'))\ .replace(hour=12)\ .astimezone(pytz.UTC) yield Entry( schema=schema, source=self.entry_source, title=line['Transaction type'], description=line['Payment reference'], extra_attributes={ 'sender_amount': str(sender['amount']), 'sender_currency': sender['currency'], 'sender_name': sender['name'], 'recipient_amount': str(recipient['amount']), 'recipient_currency': recipient['currency'], 'recipient_name': recipient['name'], }, date_on_timeline=entry_date )
def setUp(self): self.teacher = create_teacher() self.lesson = mixer.blend(lessons.MasterClass, host=self.teacher) self.entry = TimelineEntry( teacher=self.teacher, lesson=self.lesson, start=self.tzdatetime('Europe/Moscow', 2016, 1, 18, 14, 10), end=self.tzdatetime('Europe/Moscow', 2016, 1, 18, 14, 40), ) self.entry.save() mixer.blend(WorkingHours, teacher=self.teacher, weekday=0, start='13:00', end='15:00') self.absence = Absence( type='vacation', teacher=self.teacher, start=self.tzdatetime(2032, 5, 3, 0, 0), end=self.tzdatetime(2032, 5, 3, 23, 59), ) self.absence.save()
def process(self, force=False) -> Tuple[int, int]: filters = {} if self.author_name: filters['only_authors'] = [ self.author_name, ] commits = Repository(self.repo_url, **filters).traverse_commits() self.get_entries().delete() entries_to_create = [] for commit in commits: entries_to_create.append( Entry(title=commit.msg, description=commit.hash, date_on_timeline=commit.committer_date.astimezone( pytz.UTC), schema='commit', source=self.entry_source, extra_attributes={ 'hash': commit.hash, 'url': self.get_commit_url(commit), 'author': { 'email': commit.author.email, 'name': commit.author.name, }, 'changes': { 'files': commit.files, 'insertions': commit.insertions, 'deletions': commit.deletions, }, 'repo': { 'name': self.get_repo_name() or commit.project_name, 'url': self.get_repo_url(), }, })) Entry.objects.bulk_create(entries_to_create) return len(entries_to_create), 0