Exemplo n.º 1
0
    def setup(self):
        if not TaskWarriorShellout.can_use():
            # Sometimes the 'task' command line tool is not installed.
            raise nose.SkipTest("taskwarrior not installed")

        # Create some temporary config stuff
        fd, fname = tempfile.mkstemp(prefix='taskw-testsrc')
        dname = tempfile.mkdtemp(prefix='taskw-tests-data')

        with open(fname, 'w') as f:
            f.writelines([
                'data.location=%s\n' % dname,
                'uda.somestring.label=Testing String\n',
                'uda.somestring.type=string\n',
                'uda.somedate.label=Testing Date\n',
                'uda.somedate.type=date\n',
                'uda.somenumber.label=Testing Number\n',
                'uda.somenumber.type=numeric\n',
            ])

        # Create empty .data files
        for piece in ['completed', 'pending', 'undo']:
            with open(os.path.sep.join([dname, piece + '.data']), 'w'):
                pass

        # Save names for .tearDown()
        self.fname, self.dname = fname, dname

        # Create the taskwarrior db object that each test will use.
        self.tw = TaskWarriorShellout(config_filename=fname, marshal=True)
Exemplo n.º 2
0
class TestRecursibe(object):
    def setup(self):
        if not TaskWarriorShellout.can_use():
            # Sometimes the 'task' command line tool is not installed.
            raise nose.SkipTest("taskwarrior not installed")

        # Create some temporary config stuff
        fd, fname = tempfile.mkstemp(prefix='taskw-testsrc')
        dname = tempfile.mkdtemp(prefix='taskw-tests-data')

        with open(fname, 'w') as f:
            f.writelines([
                'data.location=%s\n' % dname,
                'uda.somestring.label=Testing String\n',
                'uda.somestring.type=string\n',
                'uda.somedate.label=Testing Date\n',
                'uda.somedate.type=date\n',
                'uda.somenumber.label=Testing Number\n',
                'uda.somenumber.type=numeric\n',
            ])

        # Create empty .data files
        for piece in ['completed', 'pending', 'undo']:
            with open(os.path.sep.join([dname, piece + '.data']), 'w'):
                pass

        # Save names for .tearDown()
        self.fname, self.dname = fname, dname

        # Create the taskwarrior db object that each test will use.
        self.tw = TaskWarriorShellout(config_filename=fname, marshal=True)

    def tearDown(self):
        os.remove(self.fname)
        shutil.rmtree(self.dname)

    def test_set_dep_on_one_uuid(self):
        task1 = self.tw.task_add('task1')
        task2 = self.tw.task_add('task2', depends=[task1['uuid']])
        eq_(task2['depends'][0], task1['uuid'])

    def test_set_dep_on_two_uuid(self):
        task1 = self.tw.task_add('task1')
        task2 = self.tw.task_add('task2')
        depends = [task1['uuid'], task2['uuid']]
        task3 = self.tw.task_add('task3', depends=depends)
        eq_(task3['depends'], [task1['uuid'], task2['uuid']])
Exemplo n.º 3
0
class TwCurrent(object):

    def __init__(self, file=None):
        self.tw = TaskWarriorShellout()
        self.tw.config_filename = file

    def get_current(self):
        tw = TaskWarriorShellout()
        tw.config_filename = self.tw.config_filename
        tasks = tw.filter_tasks({'tags.contains': 'current'})
        current = tasks[0]
        return current

    def set_current(self, id):
        tasks = self.tw.filter_tasks({'tags.contains': 'current'})
        for task in tasks:
            task['tags'].remove('current')
            self.tw.task_update(task)
        id, task = self.tw.get_task()
        try:
            task['tags'].extend('current')
        except KeyError:
            task['tags'] = ['current']
        self.tw.task_update(task)

    def get_pending(self):
        tasks = self.tw.filter_tasks({'status': 'pending'})
        return tasks
Exemplo n.º 4
0
 def get_converted_hours(self, estimated_hours):
     tw = TaskWarriorShellout()
     calc = tw._execute('calc', estimated_hours)
     return (calc[0].rstrip())
Exemplo n.º 5
0
def synchronize(issues, conf):

    def _bool_option(section, option, default):
        try:
            return section in conf.sections() and \
                asbool(conf.get(section, option, default))
        except NoOptionError:
            return default

    notify = _bool_option('notifications', 'notifications', 'False')
    experimental = _bool_option('general', 'experimental', 'False')

    if experimental is True:
        tw = TaskWarriorShellout()
    else:
        tw = TaskWarriorDirect()

    # Load info about the task database
    tasks = tw.load_tasks()

    is_bugwarrior_task = lambda task: \
        task.get('description', '').startswith(MARKUP)

    # Prune down to only tasks managed by bugwarrior
    for key in tasks.keys():
        tasks[key] = filter(is_bugwarrior_task, tasks[key])

    # Build a list of only the descriptions of those local bugwarrior tasks
    local_descs = [t['description'] for t in sum(tasks.values(), [])
                   if t['status'] not in ('deleted')]

    # Now for the remote data.
    # Build a list of only the descriptions of those remote issues
    remote_descs = [i['description'] for i in issues]

    # Build the list of tasks that need to be added
    is_new = lambda issue: issue['description'] not in local_descs
    new_issues = filter(is_new, issues)
    old_issues = filter(lambda i: not is_new(i), issues)

    # Build the list of local tasks that need to be completed
    is_done = lambda task: task['description'] not in remote_descs
    done_tasks = filter(is_done, tasks['pending'])

    log.name('db').struct(new=len(new_issues), completed=len(done_tasks))

    # Add new issues
    for issue in new_issues:
        log.name('db').info(
            "Adding task {0}",
            issue['description'].encode("utf-8")
        )
        if notify:
            send_notification(issue, 'Created', conf)
        tw.task_add(**issue)

    # Update any issues that may have had new properties added.  These are
    # usually annotations that come from comments in the issue thread.
    pending_descriptions = [t['description'] for t in tasks['pending']]
    for upstream_issue in old_issues:
        if upstream_issue['description'] not in pending_descriptions:
            continue

        id, task = tw.get_task(description=upstream_issue['description'])
        for key in upstream_issue:
            if key not in task:
                if experimental is True and "annotation_" in key:
                    # TaskWarrior doesn't currently (2.2.0) allow for setting
                    # the annotation entry key. This means that each annotation
                    # key will always be updated to the current date and time,
                    # which in turn means BW will always think a task has been
                    # updated. Until this is resolved in 2.3.0, ignore
                    # annotation updates in experimental mode.
                    continue
                log.name('db').info(
                    "Updating {0} on {1}",
                    key,
                    upstream_issue['description'].encode("utf-8"),
                )
                if notify:
                    send_notification(upstream_issue, 'Updated', conf)
                task[key] = upstream_issue[key]
                id, task = tw.task_update(task)

    # Delete old issues
    for task in done_tasks:
        log.name('db').info(
            "Completing task {0}",
            task['description'].encode("utf-8"),
        )
        if notify:
            send_notification(task, 'Completed', conf)

        tw.task_done(uuid=task['uuid'])
        if experimental is True:
            # `task merge` only updates/adds tasks, it won't delete them, so
            # call task_done() on the primary TW task database.
            tw_done = TaskWarriorExperimental()
            tw_done.task_done(uuid=task['uuid'])

    # Merge tasks with users local DB
    if experimental is True:
        # Call task merge from users local database
        config = tw.load_config(config_filename='~/.bugwarrior_taskrc')
        bwtask_data = "%s/" % config['data']['location']
        subprocess.call([
            'task', 'rc.verbose=nothing', 'rc.merge.autopush=no',
            'merge', bwtask_data])
        # Delete completed tasks from Bugwarrior tasks DB. This allows for
        # assigning/unassigning tasks in a remote service, and tracking status
        # changes in Bugwarrior.
        subprocess.call([
            'task', 'rc:~/.bugwarrior_taskrc', 'rc.verbose=nothing',
            'rc.confirmation=no', 'rc.bulk=100', 'status:completed',
            'delete'])

    # Send notifications
    if notify:
        send_notification(
            dict(description="New: %d, Completed: %d" % (
                len(new_issues), len(done_tasks)
            )),
            'bw_finished',
            conf,
        )
Exemplo n.º 6
0
Arquivo: db.py Projeto: FvD/bugwarrior
def synchronize(issue_generator, conf):
    def _bool_option(section, option, default):
        try:
            return section in conf.sections() and \
                asbool(conf.get(section, option, default))
        except NoOptionError:
            return default

    targets = [t.strip() for t in conf.get('general', 'targets').split(',')]
    services = set([conf.get(target, 'service') for target in targets])
    key_list = build_key_list(services)
    uda_list = build_uda_config_overrides(services)

    if uda_list:
        log.name('bugwarrior').info(
            'Service-defined UDAs (you can optionally add these to your '
            '~/.taskrc for use in reports):'
        )
        for uda in convert_override_args_to_taskrc_settings(uda_list):
            log.name('bugwarrior').info(uda)

    static_fields = static_fields_default = ['priority']
    if conf.has_option('general', 'static_fields'):
        static_fields = conf.get('general', 'static_fields').split(',')

    # Before running CRUD operations, call the pre_import hook(s).
    run_hooks(conf, 'pre_import')

    notify = _bool_option('notifications', 'notifications', 'False')

    tw = TaskWarriorShellout(
        config_filename=get_taskrc_path(conf),
        config_overrides=uda_list,
        marshal=True,
    )

    legacy_matching = _bool_option('general', 'legacy_matching', 'True')

    issue_updates = {
        'new': [],
        'existing': [],
        'changed': [],
        'closed': get_managed_task_uuids(tw, key_list, legacy_matching),
    }

    for issue in issue_generator:
        if isinstance(issue, tuple) and issue[0] == ABORT_PROCESSING:
            raise RuntimeError(issue[1])
        try:
            existing_uuid = find_local_uuid(
                tw, key_list, issue, legacy_matching=legacy_matching
            )
            issue_dict = dict(issue)
            _, task = tw.get_task(uuid=existing_uuid)

            # Drop static fields from the upstream issue.  We don't want to
            # overwrite local changes to fields we declare static.
            for field in static_fields:
                del issue_dict[field]

            # Merge annotations & tags from online into our task object
            merge_left('annotations', task, issue_dict, hamming=True)
            merge_left('tags', task, issue_dict)

            issue_dict.pop('annotations', None)
            issue_dict.pop('tags', None)

            task.update(issue_dict)

            if task.get_changes(keep=True):
                issue_updates['changed'].append(task)
            else:
                issue_updates['existing'].append(task)

            if existing_uuid in issue_updates['closed']:
                issue_updates['closed'].remove(existing_uuid)

        except MultipleMatches as e:
            log.name('db').error("Multiple matches: {0}", six.text_type(e))
            log.name('db').trace(e)
        except NotFound:
            issue_updates['new'].append(dict(issue))

    # Add new issues
    log.name('db').info("Adding {0} tasks", len(issue_updates['new']))
    for issue in issue_updates['new']:
        log.name('db').info(
            "Adding task {0}",
            issue['description'].encode("utf-8")
        )
        if notify:
            send_notification(issue, 'Created', conf)

        try:
            tw.task_add(**issue)
        except TaskwarriorError as e:
            log.name('db').trace(e)

    log.name('db').info("Updating {0} tasks", len(issue_updates['changed']))
    for issue in issue_updates['changed']:
        changes = '; '.join([
            '{field}: {f} -> {t}'.format(
                field=field,
                f=repr(ch[0]),
                t=repr(ch[1])
            )
            for field, ch in six.iteritems(issue.get_changes(keep=True))
        ])
        log.name('db').info(
            "Updating task {0}; {1}",
            issue['description'].encode("utf-8"),
            changes,
        )
        try:
            tw.task_update(issue)
        except TaskwarriorError as e:
            log.name('db').trace(e)

    log.name('db').info("Closing {0} tasks", len(issue_updates['closed']))
    for issue in issue_updates['closed']:
        _, task_info = tw.get_task(uuid=issue)
        log.name('db').info(
            "Completing task {0} {1}",
            task_info['uuid'],
            task_info['description'],
        )
        if notify:
            send_notification(task_info, 'Completed', conf)

        try:
            tw.task_done(uuid=issue)
        except TaskwarriorError as e:
            log.name('db').trace(e)

    # Send notifications
    if notify:
        send_notification(
            dict(
                description="New: %d, Changed: %d, Completed: %d" % (
                    len(issue_updates['new']),
                    len(issue_updates['changed']),
                    len(issue_updates['closed'])
                )
            ),
            'bw_finished',
            conf,
        )
Exemplo n.º 7
0
import editor
import sys
import argparse
import os
import logging
import json

from .utils import expand_tree, create_notes_dir, dump_yaml, init_logging
from .utils import worker_pool

logger = logging.getLogger('taskn')

try:
    from taskw import TaskWarriorShellout
    warrior = TaskWarriorShellout()
except ImportError:
    logger.critical('ERROR - taskw module required.')
    exit(1)

# ############ setup function #############


def get_or_make_task(task=None):
    if task is None or not task:
        logger.critical('cannot add a note to an unspecified task')
        exit(1)
    else:
        if len(task) == 1:
            try:
                return warrior.get_task(id=task[0])
Exemplo n.º 8
0
 def should_skip(self):
     """ If 'task' is not installed, we can't run these tests. """
     return not TaskWarriorShellout.can_use()
Exemplo n.º 9
0
Arquivo: db.py Projeto: irl/bugwarrior
def synchronize(issue_generator, conf, main_section, dry_run=False):
    def _bool_option(section, option, default):
        try:
            return asbool(conf.get(section, option))
        except (NoSectionError, NoOptionError):
            return default

    targets = [t.strip() for t in conf.get(main_section, 'targets').split(',')]
    services = set([conf.get(target, 'service') for target in targets])
    key_list = build_key_list(services)
    uda_list = build_uda_config_overrides(services)

    if uda_list:
        log.name('bugwarrior').info(
            'Service-defined UDAs exist: you can optionally use the '
            '`bugwarrior-uda` command to export a list of UDAs you can '
            'add to your ~/.taskrc file.'
        )

    static_fields = static_fields_default = ['priority']
    if conf.has_option(main_section, 'static_fields'):
        static_fields = conf.get(main_section, 'static_fields').split(',')

    # Before running CRUD operations, call the pre_import hook(s).
    run_hooks(conf, 'pre_import')

    notify = _bool_option('notifications', 'notifications', False) and not dry_run

    tw = TaskWarriorShellout(
        config_filename=get_taskrc_path(conf, main_section),
        config_overrides=uda_list,
        marshal=True,
    )

    legacy_matching = _bool_option(main_section, 'legacy_matching', False)
    merge_annotations = _bool_option(main_section, 'merge_annotations', True)
    merge_tags = _bool_option(main_section, 'merge_tags', True)

    issue_updates = {
        'new': [],
        'existing': [],
        'changed': [],
        'closed': get_managed_task_uuids(tw, key_list, legacy_matching),
    }

    for issue in issue_generator:
        try:
            existing_uuid = find_local_uuid(
                tw, key_list, issue, legacy_matching=legacy_matching
            )
            issue_dict = dict(issue)
            _, task = tw.get_task(uuid=existing_uuid)

            # Drop static fields from the upstream issue.  We don't want to
            # overwrite local changes to fields we declare static.
            for field in static_fields:
                del issue_dict[field]

            # Merge annotations & tags from online into our task object
            if merge_annotations:
                merge_left('annotations', task, issue_dict, hamming=True)

            if merge_tags:
                merge_left('tags', task, issue_dict)

            issue_dict.pop('annotations', None)
            issue_dict.pop('tags', None)

            task.update(issue_dict)

            if task.get_changes(keep=True):
                issue_updates['changed'].append(task)
            else:
                issue_updates['existing'].append(task)

            if existing_uuid in issue_updates['closed']:
                issue_updates['closed'].remove(existing_uuid)

        except MultipleMatches as e:
            log.name('db').error("Multiple matches: {0}", six.text_type(e))
            log.name('db').trace(e)
        except NotFound:
            issue_updates['new'].append(dict(issue))

    notreally = ' (not really)' if dry_run else ''
    # Add new issues
    log.name('db').info("Adding {0} tasks", len(issue_updates['new']))
    for issue in issue_updates['new']:
        log.name('db').info(
            "Adding task {0}{1}",
            issue['description'].encode("utf-8"),
            notreally
        )
        if dry_run:
            continue
        if notify:
            send_notification(issue, 'Created', conf)

        try:
            tw.task_add(**issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to add task: %s" % e.stderr)
            log.name('db').trace(e)

    log.name('db').info("Updating {0} tasks", len(issue_updates['changed']))
    for issue in issue_updates['changed']:
        changes = '; '.join([
            '{field}: {f} -> {t}'.format(
                field=field,
                f=repr(ch[0]),
                t=repr(ch[1])
            )
            for field, ch in six.iteritems(issue.get_changes(keep=True))
        ])
        log.name('db').info(
            "Updating task {0}, {1}; {2}{3}",
            six.text_type(issue['uuid']).encode("utf-8"),
            issue['description'].encode("utf-8"),
            changes,
            notreally
        )
        if dry_run:
            continue

        try:
            tw.task_update(issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to modify task: %s" % e.stderr)
            log.name('db').trace(e)

    log.name('db').info("Closing {0} tasks", len(issue_updates['closed']))
    for issue in issue_updates['closed']:
        _, task_info = tw.get_task(uuid=issue)
        log.name('db').info(
            "Completing task {0} {1}{2}",
            issue,
            task_info.get('description', '').encode('utf-8'),
            notreally
        )
        if dry_run:
            continue

        if notify:
            send_notification(task_info, 'Completed', conf)

        try:
            tw.task_done(uuid=issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to close task: %s" % e.stderr)
            log.name('db').trace(e)

    # Send notifications
    if notify:
        only_on_new_tasks = _bool_option('notifications', 'only_on_new_tasks', False)
        if not only_on_new_tasks or len(issue_updates['new']) + len(issue_updates['changed']) + len(issue_updates['closed']) > 0:
            send_notification(
                dict(
                    description="New: %d, Changed: %d, Completed: %d" % (
                        len(issue_updates['new']),
                        len(issue_updates['changed']),
                        len(issue_updates['closed'])
                    )
                ),
                'bw_finished',
                conf,
            )
Exemplo n.º 10
0
def synchronize(issue_generator, conf, main_section, dry_run=False):
    def _bool_option(section, option, default):
        try:
            return asbool(conf.get(section, option))
        except (NoSectionError, NoOptionError):
            return default

    targets = aslist(conf.get(main_section, 'targets'))
    services = set([conf.get(target, 'service') for target in targets])
    key_list = build_key_list(services)
    uda_list = build_uda_config_overrides(services)

    if uda_list:
        log.info('Service-defined UDAs exist: you can optionally use the '
                 '`bugwarrior-uda` command to export a list of UDAs you can '
                 'add to your taskrc file.')

    static_fields = ['priority']
    if conf.has_option(main_section, 'static_fields'):
        static_fields = aslist(conf.get(main_section, 'static_fields'))

    # Before running CRUD operations, call the pre_import hook(s).
    run_hooks(conf, 'pre_import')

    notify = _bool_option('notifications', 'notifications',
                          False) and not dry_run

    tw = TaskWarriorShellout(
        config_filename=get_taskrc_path(conf, main_section),
        config_overrides=uda_list,
        marshal=True,
    )

    legacy_matching = _bool_option(main_section, 'legacy_matching', False)
    merge_annotations = _bool_option(main_section, 'merge_annotations', True)
    merge_tags = _bool_option(main_section, 'merge_tags', True)

    issue_updates = {
        'new': [],
        'existing': [],
        'changed': [],
        'closed': get_managed_task_uuids(tw, key_list, legacy_matching),
    }

    for issue in issue_generator:

        try:
            issue_dict = dict(issue)
            # We received this issue from The Internet, but we're not sure what
            # kind of encoding the service providers may have handed us. Let's try
            # and decode all byte strings from UTF8 off the bat.  If we encounter
            # other encodings in the wild in the future, we can revise the handling
            # here. https://github.com/ralphbean/bugwarrior/issues/350
            for key in issue_dict.keys():
                if isinstance(issue_dict[key], six.binary_type):
                    try:
                        issue_dict[key] = issue_dict[key].decode('utf-8')
                    except UnicodeDecodeError:
                        log.warn("Failed to interpret %r as utf-8" % key)

            existing_uuid = find_local_uuid(tw,
                                            key_list,
                                            issue,
                                            legacy_matching=legacy_matching)
            _, task = tw.get_task(uuid=existing_uuid)

            # Drop static fields from the upstream issue.  We don't want to
            # overwrite local changes to fields we declare static.
            for field in static_fields:
                if field in issue_dict:
                    del issue_dict[field]

            # Merge annotations & tags from online into our task object
            if merge_annotations:
                merge_left('annotations', task, issue_dict, hamming=True)

            if merge_tags:
                merge_left('tags', task, issue_dict)

            issue_dict.pop('annotations', None)
            issue_dict.pop('tags', None)

            task.update(issue_dict)

            if task.get_changes(keep=True):
                issue_updates['changed'].append(task)
            else:
                issue_updates['existing'].append(task)

            if existing_uuid in issue_updates['closed']:
                issue_updates['closed'].remove(existing_uuid)

        except MultipleMatches as e:
            log.exception("Multiple matches: %s", six.text_type(e))
        except NotFound:
            issue_updates['new'].append(issue_dict)

    notreally = ' (not really)' if dry_run else ''
    # Add new issues
    log.info("Adding %i tasks", len(issue_updates['new']))
    for issue in issue_updates['new']:
        log.info(u"Adding task %s%s", issue['description'], notreally)
        if dry_run:
            continue
        if notify:
            send_notification(issue, 'Created', conf)

        try:
            tw.task_add(**issue)
        except TaskwarriorError as e:
            log.exception("Unable to add task: %s" % e.stderr)

    log.info("Updating %i tasks", len(issue_updates['changed']))
    for issue in issue_updates['changed']:
        changes = '; '.join([
            '{field}: {f} -> {t}'.format(field=field,
                                         f=repr(ch[0]),
                                         t=repr(ch[1]))
            for field, ch in six.iteritems(issue.get_changes(keep=True))
        ])
        log.info("Updating task %s, %s; %s%s", six.text_type(issue['uuid']),
                 issue['description'], changes, notreally)
        if dry_run:
            continue

        try:
            tw.task_update(issue)
        except TaskwarriorError as e:
            log.exception("Unable to modify task: %s" % e.stderr)

    log.info("Closing %i tasks", len(issue_updates['closed']))
    for issue in issue_updates['closed']:
        _, task_info = tw.get_task(uuid=issue)
        log.info("Completing task %s %s%s", issue,
                 task_info.get('description', ''), notreally)
        if dry_run:
            continue

        if notify:
            send_notification(task_info, 'Completed', conf)

        try:
            tw.task_done(uuid=issue)
        except TaskwarriorError as e:
            log.exception("Unable to close task: %s" % e.stderr)

    # Send notifications
    if notify:
        only_on_new_tasks = _bool_option('notifications', 'only_on_new_tasks',
                                         False)
        if not only_on_new_tasks or len(issue_updates['new']) + len(
                issue_updates['changed']) + len(issue_updates['closed']) > 0:
            send_notification(
                dict(description="New: %d, Changed: %d, Completed: %d" %
                     (len(issue_updates['new']), len(issue_updates['changed']),
                      len(issue_updates['closed']))),
                'bw_finished',
                conf,
            )
Exemplo n.º 11
0
def synchronize(issue_generator, conf, main_section, dry_run=False):
    def _bool_option(section, option, default):
        try:
            return section in conf.sections() and \
                asbool(conf.get(section, option, default))
        except NoOptionError:
            return default

    targets = [t.strip() for t in conf.get(main_section, 'targets').split(',')]
    services = set([conf.get(target, 'service') for target in targets])
    key_list = build_key_list(services)
    uda_list = build_uda_config_overrides(services)

    if uda_list:
        log.name('bugwarrior').info(
            'Service-defined UDAs exist: you can optionally use the '
            '`bugwarrior-uda` command to export a list of UDAs you can '
            'add to your ~/.taskrc file.')

    static_fields = static_fields_default = ['priority']
    if conf.has_option(main_section, 'static_fields'):
        static_fields = conf.get(main_section, 'static_fields').split(',')

    # Before running CRUD operations, call the pre_import hook(s).
    run_hooks(conf, 'pre_import')

    notify = _bool_option('notifications', 'notifications',
                          'False') and not dry_run

    tw = TaskWarriorShellout(
        config_filename=get_taskrc_path(conf, main_section),
        config_overrides=uda_list,
        marshal=True,
    )

    legacy_matching = _bool_option(main_section, 'legacy_matching', 'True')
    merge_annotations = _bool_option(main_section, 'merge_annotations', 'True')
    merge_tags = _bool_option(main_section, 'merge_tags', 'True')

    issue_updates = {
        'new': [],
        'existing': [],
        'changed': [],
        'closed': get_managed_task_uuids(tw, key_list, legacy_matching),
    }

    for issue in issue_generator:
        if isinstance(issue, tuple) and issue[0] == ABORT_PROCESSING:
            raise RuntimeError(issue[1])
        try:
            existing_uuid = find_local_uuid(tw,
                                            key_list,
                                            issue,
                                            legacy_matching=legacy_matching)
            issue_dict = dict(issue)
            _, task = tw.get_task(uuid=existing_uuid)

            # Drop static fields from the upstream issue.  We don't want to
            # overwrite local changes to fields we declare static.
            for field in static_fields:
                del issue_dict[field]

            # Merge annotations & tags from online into our task object
            if merge_annotations:
                merge_left('annotations', task, issue_dict, hamming=True)

            if merge_tags:
                merge_left('tags', task, issue_dict)

            issue_dict.pop('annotations', None)
            issue_dict.pop('tags', None)

            task.update(issue_dict)

            if task.get_changes(keep=True):
                issue_updates['changed'].append(task)
            else:
                issue_updates['existing'].append(task)

            if existing_uuid in issue_updates['closed']:
                issue_updates['closed'].remove(existing_uuid)

        except MultipleMatches as e:
            log.name('db').error("Multiple matches: {0}", six.text_type(e))
            log.name('db').trace(e)
        except NotFound:
            issue_updates['new'].append(dict(issue))

    notreally = ' (not really)' if dry_run else ''
    # Add new issues
    log.name('db').info("Adding {0} tasks", len(issue_updates['new']))
    for issue in issue_updates['new']:
        log.name('db').info("Adding task {0}{1}",
                            issue['description'].encode("utf-8"), notreally)
        if dry_run:
            continue
        if notify:
            send_notification(issue, 'Created', conf)

        try:
            tw.task_add(**issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to add task: %s" % e.stderr)
            log.name('db').trace(e)

    log.name('db').info("Updating {0} tasks", len(issue_updates['changed']))
    for issue in issue_updates['changed']:
        changes = '; '.join([
            '{field}: {f} -> {t}'.format(field=field,
                                         f=repr(ch[0]),
                                         t=repr(ch[1]))
            for field, ch in six.iteritems(issue.get_changes(keep=True))
        ])
        log.name('db').info("Updating task {0}, {1}; {2}{3}",
                            six.text_type(issue['uuid']).encode("utf-8"),
                            issue['description'].encode("utf-8"), changes,
                            notreally)
        if dry_run:
            continue

        try:
            tw.task_update(issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to modify task: %s" % e.stderr)
            log.name('db').trace(e)

    log.name('db').info("Closing {0} tasks", len(issue_updates['closed']))
    for issue in issue_updates['closed']:
        _, task_info = tw.get_task(uuid=issue)
        log.name('db').info("Completing task {0} {1}{2}", issue,
                            task_info.get('description', '').encode('utf-8'),
                            notreally)
        if dry_run:
            continue

        if notify:
            send_notification(task_info, 'Completed', conf)

        try:
            tw.task_done(uuid=issue)
        except TaskwarriorError as e:
            log.name('db').error("Unable to close task: %s" % e.stderr)
            log.name('db').trace(e)

    # Send notifications
    if notify:
        send_notification(
            dict(description="New: %d, Changed: %d, Completed: %d" %
                 (len(issue_updates['new']), len(issue_updates['changed']),
                  len(issue_updates['closed']))),
            'bw_finished',
            conf,
        )
Exemplo n.º 12
0
 def should_skip(self):
     """ If 'task' is not installed, we can't run these tests. """
     return not TaskWarriorShellout.can_use()
Exemplo n.º 13
0
 def get_current(self):
     tw = TaskWarriorShellout()
     tw.config_filename = self.tw.config_filename
     tasks = tw.filter_tasks({'tags.contains': 'current'})
     current = tasks[0]
     return current
Exemplo n.º 14
0
 def __init__(self, file=None):
     self.tw = TaskWarriorShellout()
     self.tw.config_filename = file
Exemplo n.º 15
0
def synchronize(issue_generator, conf, main_section, dry_run=False):
    def _bool_option(section, option, default):
        try:
            return asbool(conf.get(section, option))
        except (NoSectionError, NoOptionError):
            return default

    targets = aslist(conf.get(main_section, 'targets'))
    services = set([conf.get(target, 'service') for target in targets])
    key_list = build_key_list(services)
    uda_list = build_uda_config_overrides(services)

    if uda_list:
        log.info(
            'Service-defined UDAs exist: you can optionally use the '
            '`bugwarrior-uda` command to export a list of UDAs you can '
            'add to your taskrc file.'
        )

    static_fields = ['priority']
    if conf.has_option(main_section, 'static_fields'):
        static_fields = aslist(conf.get(main_section, 'static_fields'))

    # Before running CRUD operations, call the pre_import hook(s).
    run_hooks(conf, 'pre_import')

    notify = _bool_option('notifications', 'notifications', False) and not dry_run

    tw = TaskWarriorShellout(
        config_filename=get_taskrc_path(conf, main_section),
        config_overrides=uda_list,
        marshal=True,
    )

    legacy_matching = _bool_option(main_section, 'legacy_matching', False)
    merge_annotations = _bool_option(main_section, 'merge_annotations', True)
    merge_tags = _bool_option(main_section, 'merge_tags', True)

    issue_updates = {
        'new': [],
        'existing': [],
        'changed': [],
        'closed': get_managed_task_uuids(tw, key_list, legacy_matching),
    }

    for issue in issue_generator:

        try:
            issue_dict = dict(issue)
            # We received this issue from The Internet, but we're not sure what
            # kind of encoding the service providers may have handed us. Let's try
            # and decode all byte strings from UTF8 off the bat.  If we encounter
            # other encodings in the wild in the future, we can revise the handling
            # here. https://github.com/ralphbean/bugwarrior/issues/350
            for key in issue_dict.keys():
                if isinstance(issue_dict[key], six.binary_type):
                    try:
                        issue_dict[key] = issue_dict[key].decode('utf-8')
                    except UnicodeDecodeError:
                        log.warn("Failed to interpret %r as utf-8" % key)

            existing_uuid = find_local_uuid(
                tw, key_list, issue, legacy_matching=legacy_matching
            )
            _, task = tw.get_task(uuid=existing_uuid)

            # Drop static fields from the upstream issue.  We don't want to
            # overwrite local changes to fields we declare static.
            for field in static_fields:
                if field in issue_dict:
                    del issue_dict[field]

            # Merge annotations & tags from online into our task object
            if merge_annotations:
                merge_left('annotations', task, issue_dict, hamming=True)

            if merge_tags:
                merge_left('tags', task, issue_dict)

            issue_dict.pop('annotations', None)
            issue_dict.pop('tags', None)

            task.update(issue_dict)

            if task.get_changes(keep=True):
                issue_updates['changed'].append(task)
            else:
                issue_updates['existing'].append(task)

            if existing_uuid in issue_updates['closed']:
                issue_updates['closed'].remove(existing_uuid)

        except MultipleMatches as e:
            log.exception("Multiple matches: %s", six.text_type(e))
        except NotFound:
            issue_updates['new'].append(issue_dict)

    notreally = ' (not really)' if dry_run else ''
    # Add new issues
    log.info("Adding %i tasks", len(issue_updates['new']))
    for issue in issue_updates['new']:
        log.info(u"Adding task %s%s", issue['description'], notreally)
        if dry_run:
            continue
        if notify:
            send_notification(issue, 'Created', conf)

        try:
            tw.task_add(**issue)
        except TaskwarriorError as e:
            log.exception("Unable to add task: %s" % e.stderr)

    log.info("Updating %i tasks", len(issue_updates['changed']))
    for issue in issue_updates['changed']:
        changes = '; '.join([
            '{field}: {f} -> {t}'.format(
                field=field,
                f=repr(ch[0]),
                t=repr(ch[1])
            )
            for field, ch in six.iteritems(issue.get_changes(keep=True))
        ])
        log.info(
            "Updating task %s, %s; %s%s",
            six.text_type(issue['uuid']),
            issue['description'],
            changes,
            notreally
        )
        if dry_run:
            continue

        try:
            tw.task_update(issue)
        except TaskwarriorError as e:
            log.exception("Unable to modify task: %s" % e.stderr)

    log.info("Closing %i tasks", len(issue_updates['closed']))
    for issue in issue_updates['closed']:
        _, task_info = tw.get_task(uuid=issue)
        log.info(
            "Completing task %s %s%s",
            issue,
            task_info.get('description', ''),
            notreally
        )
        if dry_run:
            continue

        if notify:
            send_notification(task_info, 'Completed', conf)

        try:
            tw.task_done(uuid=issue)
        except TaskwarriorError as e:
            log.exception("Unable to close task: %s" % e.stderr)

    # Send notifications
    if notify:
        only_on_new_tasks = _bool_option('notifications', 'only_on_new_tasks', False)
        if not only_on_new_tasks or len(issue_updates['new']) + len(issue_updates['changed']) + len(issue_updates['closed']) > 0:
            send_notification(
                dict(
                    description="New: %d, Changed: %d, Completed: %d" % (
                        len(issue_updates['new']),
                        len(issue_updates['changed']),
                        len(issue_updates['closed'])
                    )
                ),
                'bw_finished',
                conf,
            )
Exemplo n.º 16
0
 def get_converted_hours(self, estimated_hours):
     tw = TaskWarriorShellout()
     calc = tw._execute('calc', estimated_hours)
     return (
         calc[0].rstrip()
     )