def repeat_task(task, reset_task=True): """ Create repeated task. If task has repeat_pattern create new task bassed on it and set new values for due_date, start_date and alarm. Args: task: Based task Returns: New task with updated values or None if no task is created """ # repeat_from : 1= from completed, 0= from start # TODO: repeat_end (??) sprawdzić czy to jest używane if not task.repeat_pattern or task.repeat_pattern == "Norepeat": return None _LOG.info("repeat_task %r", task) ntask = task.clone() ntask.uuid = OBJ.generate_uuid() ntask.update_modify_time() repeat_pattern = task.repeat_pattern repeat_from = task.repeat_from if repeat_pattern == "WITHPARENT": if not ntask.parent_uuid: _LOG.warn("repeat_task WITHPARENT parent_uuid == None: %r", task) return ntask repeat_pattern = ntask.parent.repeat_pattern repeat_from = ntask.parent.repeat_from offset = None if task.due_date: ntask.due_date = _move_date_repeat(_get_date(task.due_date, task.completed, repeat_from), repeat_pattern) offset = ntask.due_date - task.due_date if ntask.start_date: ntask.start_date += offset elif ntask.start_date: ntask.start_date = _move_date_repeat(_get_date(task.start_date, task.completed, repeat_from), repeat_pattern) if task.alarm: if task.alarm_pattern: update_task_alarm(ntask) elif offset: ntask.alarm += offset else: ntask.alarm = _move_date_repeat(task.alarm, repeat_pattern) update_task_hide(ntask) ntask.completed = None # reset repeat pattern on previous task if reset_task: task.repeat_pattern = "Norepeat" return ntask
def connect(filename, debug=False, *args, **kwargs): """ Create connection to database & initiate it. Args: filename: path to sqlite database file debug: (bool) turn on debugging args, kwargs: other arguments for sqlalachemy engine Return: Sqlalchemy Session class """ _LOG.info('connect %r', (filename, args, kwargs)) engine = sqlalchemy.create_engine("sqlite:///" + filename, echo=debug, connect_args={'detect_types': sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES}, native_datetime=True) for schema in sqls.SCHEMA_DEF: for sql in schema: engine.execute(sql) sqls.fix_synclog(engine) objects.Session.configure(bind=engine) # pylint: disable=E1120 if debug: @sqlalchemy.event.listens_for(Engine, "before_cursor_execute") def before_cursor_execute(_conn, _cursor, # pylint: disable=W0612 _stmt, _params, context, _executemany): context.app_query_start = time.time() @sqlalchemy.event.listens_for(Engine, "after_cursor_execute") def after_cursor_execute(_conn, _cursor, # pylint: disable=W0612 _stmt, _params, context, _executemany): _LOG.debug("Query time: %.02fms", (time.time() - context.app_query_start) * 1000) _LOG.info('Database create_all START') objects.Base.metadata.create_all(engine) _LOG.info('Database create_all COMPLETED') # bootstrap _LOG.info('Database bootstrap START') session = objects.Session() # 1. deviceId conf = session.query( # pylint: disable=E1101 objects.Conf).filter_by(key='deviceId').first() if conf is None: conf = objects.Conf(key='deviceId') conf.val = objects.generate_uuid() session.add(conf) # pylint: disable=E1101 _LOG.info('DB bootstrap: create deviceId=%r', conf.val) session.commit() # pylint: disable=E1101 _LOG.info('Database bootstrap cleanup') # 2. cleanup engine.execute("delete from task_tags " "where task_uuid not in (select uuid from tasks)" "or tag_uuid not in (select uuid from tags)") date_threshold = datetime.datetime.now() - datetime.timedelta(days=90) _LOG.debug('Cleanup deleted tasks older than %r', date_threshold) engine.execute("delete from tasks " "where deleted < ? and prevent_auto_purge = 0", (date_threshold, )) _LOG.debug('Cleanup deleted folders older than %r', date_threshold) engine.execute("delete from folders where deleted < ?", (date_threshold, )) _LOG.debug('Cleanup deleted goals older than %r', date_threshold) engine.execute("delete from goals where deleted < ?", (date_threshold, )) _LOG.debug('Cleanup deleted tags older than %r', date_threshold) engine.execute("delete from tags where deleted < ?", (date_threshold, )) _LOG.debug('Cleanup deleted pages older than %r', date_threshold) engine.execute("delete from notebook_pages where deleted < ?", (date_threshold, )) _LOG.debug("Cleanup synclog") engine.execute("delete from synclog where sync_time is null") _LOG.info('Database bootstrap COMPLETED') return objects.Session
def _dump_tasks(session, notify_cb, folders_cache, contexts_cache, goals_cache): notify_cb(16, _("Saving task, alarms...")) _LOG.info("dump_database_to_json: tasks") tasks_cache = _build_uuid_map(session, objects.Task) tasks = [] alarms = [] task_folders = [] task_contexts = [] task_goals = [] for task in session.query(objects.Task).filter( # pylint: disable=E1101 objects.Task.deleted.is_(None)): tasks.append({'_id': tasks_cache[task.uuid], 'parent_id': tasks_cache[task.parent_uuid] if task.parent_uuid else 0, 'uuid': task.uuid, 'created': fmt_date(task.created), 'modified': fmt_date(task.modified or task.created), 'completed': fmt_date(task.completed), 'deleted': fmt_date(task.deleted), 'ordinal': task.ordinal or 0, 'title': task.title or '', 'note': task.note or "", 'type': task.type or 0, 'starred': 1 if task.starred else 0, 'status': task.status or 0, 'priority': task.priority or 0, 'importance': task.importance or 0, 'start_date': fmt_date(task.start_date), 'start_time_set': task.start_time_set or 0, 'due_date': fmt_date(task.due_date), "due_date_project": fmt_date(task.due_date_project), "due_time_set": task.due_time_set or 0, "due_date_mod": task.due_date_mod or 0, "floating_event": task.floating_event, "duration": task.duration or 0, "energy_required": task.energy_required, "repeat_from": task.repeat_from or 0, "repeat_pattern": task.repeat_pattern or "", "repeat_end": task.repeat_end or 0, "hide_pattern": task.hide_pattern or "", "hide_until": fmt_date(task.hide_until), "prevent_auto_purge": task.prevent_auto_purge or 0, "trash_bin": task.trash_bin or 0, "metainf": task.metainf or ''}) if task.alarm: alarms.append({'_id': len(alarms), 'task_id': tasks_cache[task.uuid], 'uuid': objects.generate_uuid(), 'created': fmt_date(task.created), 'modified': fmt_date(task.modified or task.created), 'alarm': fmt_date(task.alarm), 'reminder': 0, 'active': 1, 'note': ""}) if task.folder_uuid: task_folders.append({'task_id': tasks_cache[task.uuid], 'folder_id': folders_cache[task.folder_uuid], 'created': fmt_date(task.created), 'modified': fmt_date(task.modified or task.created)}) if task.context_uuid: task_contexts.append({'task_id': tasks_cache[task.uuid], 'context_id': contexts_cache[task.context_uuid], 'created': fmt_date(task.created), 'modified': fmt_date(task.modified or task.created)}) if task.goal_uuid: task_goals.append({'task_id': tasks_cache[task.uuid], 'goal_id': goals_cache[task.goal_uuid], 'created': fmt_date(task.created), 'modified': fmt_date(task.modified or task.created)}) res = {'task': tasks, 'alarm': alarms, 'task_folder': task_folders, 'task_context': task_contexts, 'task_goal': task_goals} notify_cb(49, _("Saved %d tasks") % len(tasks)) notify_cb(51, _("Saved %d alarms") % len(alarms)) notify_cb(52, _("Saved %d task folders") % len(task_folders)) notify_cb(53, _("Saved %d task contexts") % len(task_contexts)) notify_cb(54, _("Saved %d task goals") % len(task_goals)) return res, tasks_cache