示例#1
0
    def write_all(self, tasks_ftype=None, tasks_fname=None,
                  log_ftype=None, log_fname=None):
        """Writes out projects, tasks, task groupings, and working slots to
        files as dictated by configuration settings.

        """
        self.write_projects()
        if tasks_ftype is None:
            tasks_ftype = self.config['TASKS_FTYPE_OUT']
        if log_ftype is None:
            log_ftype = self.config['LOG_FTYPE_OUT']
        if tasks_fname is None:
            tasks_fname = self.config['TASKS_FNAME_OUT']
        if log_fname is None:
            log_fname = self.config['LOG_FNAME_OUT']
        # The only special case so far.
        if (tasks_ftype == FTYPE_XML and log_ftype == FTYPE_XML
                and tasks_fname == log_fname):
            from backend.xml import XmlBackend
            # TODO: Use the context manager at other places too.
            with open_backed_up(tasks_fname,
                                'wb',
                                suffix=self.config['BACKUP_SUFFIX']) \
                    as outfile:
                XmlBackend.write_all(self.tasks, self.groups, self.wslots,
                                     outfile)
        else:
            # FIXME: The type of file is not looked at, unless the file name is
            # supplied too. Provide some default filename for the supported
            # file types.
            self.write_log(outftype=log_ftype, outfname=log_fname)
            self.write_tasks(outftype=tasks_ftype, outfname=tasks_fname)
示例#2
0
 def write_log(self, outfname=None, outftype=None):
     """TODO: Update docstring."""
     if outfname is None:
         outfname = self.config['LOG_FNAME_OUT']
         outftype = self.config['LOG_FTYPE_OUT']
     if outftype == FTYPE_PICKLE:
         import pickle
         with open(outfname, 'wb') as outfile:
             for wtime in self.wslots:
                 pickle.dump(wtime, outfile)
     elif outftype == FTYPE_XML:
         from backend.xml import XmlBackend
         # XXX This assumes that `write_log' was called soon after
         # `write_tasks'.
         mode = 'r+b' if self._xml_header_written else 'wb'
         with open_backed_up(outfname, mode,
                             suffix=self.config['BACKUP_SUFFIX']) \
                 as outfile:
             if self._xml_header_written:
                 # Skip before the last line (assumed to read
                 # "</wyrdinData>").
                 outfile.seek(-len(b'</wyrdinData>\n'), 2)
             XmlBackend.write_workslots(self.wslots, outfile,
                                        not self._xml_header_written)
             if self._xml_header_written:
                 outfile.write(b'</wyrdinData>\n')
             self._xml_header_written = True
     else:
         raise NotImplementedError("Session.write_log() is not "
                                   "implemented for this type of files.")
示例#3
0
    def write_tasks(self, outfname=None, outftype=None):
        """
        Writes out the current list of tasks and task groupings from memory to
        a file.

        TODO: Update docstring.

        """
        if DEBUG:
            print("Tasks:")
            print("------")
            for task in self.tasks:
                pprint(task)
            print("")
        if outfname is None:
            outfname = self.config['TASKS_FNAME_OUT']
            outftype = self.config['TASKS_FTYPE_OUT']
        if outftype == FTYPE_CSV:
            # FIXME: May have been broken when groups were added.
            import csv
            with open(outfname, newline='') as outfile:
                taskwriter = csv.writer(outfile)
                for task in self.tasks:
                    taskwriter.writerow(task)
                for group in self.groups:
                    taskwriter.writerow(group)
        elif outftype == FTYPE_XML:
            from backend.xml import XmlBackend
            mode = 'r+b' if self._xml_header_written else 'wb'
            with open_backed_up(outfname, mode,
                                suffix=self.config['BACKUP_SUFFIX']) \
                    as outfile:
                if self._xml_header_written:
                    # Skip before the last line (assumed to read
                    # "</wyrdinData>").
                    outfile.seek(-len(b'</wyrdinData>\n'), 2)
                else:
                    outfile.seek(0, 2)
                XmlBackend.write_tasks(self.tasks,
                                       self.groups,
                                       outfile=outfile,
                                       standalone=not self._xml_header_written)
                if self._xml_header_written:
                    outfile.write(b'</wyrdinData>\n')
                self._xml_header_written = True
        elif outftype == FTYPE_PICKLE:
            import pickle
            with open_backed_up(outfname, 'wb',
                                suffix=self.config['BACKUP_SUFFIX']) \
                    as outfile:
                for task in self.tasks:
                    pickle.dump(task, outfile)
                for group in self.groups:
                    pickle.dump(group, outfile)
        else:
            raise NotImplementedError("Session.write_tasks() is not "
                                      "implemented for this type of files.")
示例#4
0
    def write_tasks(self, outfname=None, outftype=None):
        """
        Writes out the current list of tasks and task groupings from memory to
        a file.

        TODO: Update docstring.

        """
        if DEBUG:
            print("Tasks:")
            print("------")
            for task in self.tasks:
                pprint(task)
            print("")
        if outfname is None:
            outfname = self.config['TASKS_FNAME_OUT']
            outftype = self.config['TASKS_FTYPE_OUT']
        if outftype == FTYPE_CSV:
            # FIXME: May have been broken when groups were added.
            import csv
            with open(outfname, newline='') as outfile:
                taskwriter = csv.writer(outfile)
                for task in self.tasks:
                    taskwriter.writerow(task)
                for group in self.groups:
                    taskwriter.writerow(group)
        elif outftype == FTYPE_XML:
            from backend.xml import XmlBackend
            mode = 'r+b' if self._xml_header_written else 'wb'
            with open_backed_up(outfname, mode,
                                suffix=self.config['BACKUP_SUFFIX']) \
                    as outfile:
                if self._xml_header_written:
                    # Skip before the last line (assumed to read
                    # "</wyrdinData>").
                    outfile.seek(-len(b'</wyrdinData>\n'), 2)
                else:
                    outfile.seek(0, 2)
                XmlBackend.write_tasks(self.tasks, self.groups,
                                       outfile=outfile,
                                       standalone=not self._xml_header_written)
                if self._xml_header_written:
                    outfile.write(b'</wyrdinData>\n')
                self._xml_header_written = True
        elif outftype == FTYPE_PICKLE:
            import pickle
            with open_backed_up(outfname, 'wb',
                                suffix=self.config['BACKUP_SUFFIX']) \
                    as outfile:
                for task in self.tasks:
                    pickle.dump(task, outfile)
                for group in self.groups:
                    pickle.dump(group, outfile)
        else:
            raise NotImplementedError("Session.write_tasks() is not "
                                      "implemented for this type of files.")
示例#5
0
 def read_log(self, infname=None, inftype=None):
     """Reads the log of how time was spent."""
     # TODO: Think of when this really has to be done, and when only
     # a subset of the log needs to be read. In the latter case, allow for
     # doing so.
     if infname is None:
         infname = self.config['LOG_FNAME_IN']
         inftype = self.config['LOG_FTYPE_IN']
     # If no work slots have been written to the file yet, do not load any.
     if not os.path.exists(infname):
         return
     if inftype == FTYPE_PICKLE:
         import pickle
         if not os.path.exists(infname):
             open(infname, 'wb').close()
         with open(infname, 'rb') as infile:
             self.wslots = []
             while True:
                 try:
                     worktime = pickle.load(infile)
                     self.wslots.append(worktime)
                 except EOFError:
                     break
     elif inftype == FTYPE_XML:
         from backend.xml import XmlBackend
         with open(infname, 'rb') as infile:
             self.wslots = XmlBackend.read_workslots(infile)
     else:
         raise NotImplementedError("Session.read_log() is not "
                                   "implemented for this type of files.")
示例#6
0
 def read_groups(self, infname=None, inftype=None):
     # TODO: docstring
     if infname is None:
         infname = self.config['TASKS_FNAME_IN']
         inftype = self.config['TASKS_FTYPE_IN']
     # If no tasks have been written yet, don't load any.
     if not os.path.exists(infname):
         return
     if inftype == FTYPE_XML:
         from backend.xml import XmlBackend
         # TODO The tasks_dict used just here is a provisionary solution.
         tasks_dict = dict()
         for task in self.tasks:
             tasks_dict[task.id] = task
         with open(infname, 'rb') as infile:
             self.groups = XmlBackend.read_groups(infile, tasks_dict)
     else:
         raise NotImplementedError("Session.read_groups() is not "
                                   "implemented for this type of files.")
示例#7
0
 def read_groups(self, infname=None, inftype=None):
     # TODO: docstring
     if infname is None:
         infname = self.config['TASKS_FNAME_IN']
         inftype = self.config['TASKS_FTYPE_IN']
     # If no tasks have been written yet, don't load any.
     if not os.path.exists(infname):
         return
     if inftype == FTYPE_XML:
         from backend.xml import XmlBackend
         # TODO The tasks_dict used just here is a provisionary solution.
         tasks_dict = dict()
         for task in self.tasks:
             tasks_dict[task.id] = task
         with open(infname, 'rb') as infile:
             self.groups = XmlBackend.read_groups(infile, tasks_dict)
     else:
         raise NotImplementedError("Session.read_groups() is not "
                                   "implemented for this type of files.")
示例#8
0
    def read_tasks(self, infname=None, inftype=None):
        """
        Reads in tasks from files listing the user's tasks. Which files these
        are, can be found in `self.config'.

        TODO: Update docstring.

        """
        if infname is None:
            infname = self.config['TASKS_FNAME_IN']
            inftype = self.config['TASKS_FTYPE_IN']
        # If no tasks have been written yet, don't load any.
        if not os.path.exists(infname):
            return
        # This is a primitive implementation for the backend as a CSV.
        if inftype == FTYPE_CSV:
            import csv
            # Read the tasks from the file to the memory.
            with open(infname, newline='') as infile:
                taskreader = csv.reader(infile)
                self.tasks = [task for task in taskreader]
        elif inftype == FTYPE_XML:
            from backend.xml import XmlBackend
            with open(infname, 'rb') as infile:
                self.tasks = XmlBackend.read_tasks(infile)
        elif inftype == FTYPE_PICKLE:
            import pickle
            if not os.path.exists(infname):
                open(infname, 'wb').close()
            with open(infname, 'rb') as infile:
                self.tasks = []
                while True:
                    try:
                        task = pickle.load(infile)
                        self.tasks.append(task)
                    except EOFError:
                        break
        else:
            raise NotImplementedError("Session.read_tasks() is not "
                                      "implemented for this type of files.")