Exemplo n.º 1
0
def find_tasks (filename, condition):
    tasks = []
    org = open(filename, 'r').read()
    root = orgparse.loads(org)
    for node in root:
        if condition(node):
            if hasattr(node, 'heading'):
                joined = []
                n = node
                while True:
                    if n.is_root():
                        break
                    joined.insert(0, n.heading)
                    n = n.parent
                
                combined_str = ''
                for ji, j in enumerate(joined):
                    if ji < len(joined) - 1:
                        combined_str += '[' + j + '] '
                    else:
                        combined_str += j

                tasks.append(combined_str)
                #tasks.append(node.heading)
    return tasks
    def test_parse_orgmode_list(self):
        org = orgparse.loads('''#+STARTUP: showall

- Lorem ipsum foo. <2019-01-17 Thu>
- bar <2019-01-18 Fri 11:30>
- spam [2021-05-13 Thu]
''')
        subprovider = 'my_provider'
        result = list(parse_orgmode_list(org, subprovider))
        expected = [
            Item.normalized(
                datetime_=datetime.datetime(2019, 1, 17),
                text='Lorem ipsum foo.',
                provider='orgmodelist',
                subprovider=subprovider,
                all_day=True,
            ),
            Item.normalized(
                datetime_=datetime.datetime(2019, 1, 18, 11, 30),
                text='bar',
                provider='orgmodelist',
                subprovider=subprovider,
                all_day=False,
            ),
            Item.normalized(
                datetime_=datetime.datetime(2021, 5, 13),
                text='spam',
                provider='orgmodelist',
                subprovider=subprovider,
                all_day=True,
            ),
        ]
        self.assertListEqual(result, expected)
Exemplo n.º 3
0
def org_meta(apath: Path) -> Meta:
    # TODO extract get_meta method??
    o = orgparse.loads(apath.read_text())
    ttl_ = o.get_file_property('TITLE')
    assert ttl_ is not None
    ttl: str = ttl_
    summ = o.get_file_property('SUMMARY')
    upid = o.get_file_property('UPID')
    tags = o.get_file_property_list('FILETAGS')
    if len(tags) == 0:
        tags = None
    dates = o.get_file_property('DATE')
    draft = None if (draftp := o.get_file_property('DRAFT')) is None else True

    date: Optional[datetime]
    if dates is not None:
        dates = dates[1:1 + len('0000-00-00 Abc')]
        date = datetime.strptime(dates, '%Y-%m-%d %a')
    else:
        date = None
    d = dict(
        title=ttl,
        summary=summ,
        date=date,
        draft=draft,
        tags=tags,
        upid=upid,  # todo perhaps should always have upid?
    )
    return d
Exemplo n.º 4
0
def process_file(filename):
    ''' Start parser, read file, process content.

    :param filename: file to process
    :type param: str
    '''
    with open(filename, 'r') as fb:
        content = orgparse.loads(fb.read())
        convert_to_html(content)
Exemplo n.º 5
0
def test_org_to_exercise() -> None:
    s = '''
* hollow rocks :wlog:
** [2020-10-10 Sat 10:26] 90 sec
** [2020-10-10 Sat 10:38] 90 sec
** [2020-10-10 Sat 10:47] 90 sec
** [2020-10-10 Sat 10:56] 90 sec
* push ups diamond :wlog:
this should be handled by workout processor.. need to test?
- [2020-10-04 Sun 13:45] 25
- [2020-10-04 Sun 14:00] 25
- [2020-10-04 Sun 14:14] 21.5F
- [2020-10-04 Sun 14:33] 16.5F
* [2019-01-05 Sat 13:03] static hollow leg holds tabata 120/240 :wlog:good:
** 120 secs
** 90 secs (gave up)
** 120 secs
* [2018-10-04 Thu 07:46] 20 pu diamond :wlog:
* [2018-10-04 Thu 07:50] 30.5F pu wide
'''
    os = orgparse.loads(s)
    o = os.children[0]
    xx = list(org_to_exercise(o))
    for x in xx:
        assert not isinstance(x, Exception)
        assert x.dt is not None
        assert x.reps == 90

    o = os.children[1]
    yy = list(org_to_exercise(o))
    assert len(yy) == 4
    for y in yy:
        assert not isinstance(y, Exception)
        assert y.dt is not None
        reps = y.reps
        assert reps is not None
        assert reps > 15  # todo more specific tests
    o = os.children[2]
    zz = list(org_to_exercise(o))
    [a, b, c] = zz
    assert isinstance(b, Exercise)
    assert isinstance(c, Exercise)
    assert b.reps == 90
    assert c.reps == 120

    o = os.children[3]
    zz = list(org_to_exercise(o))
    [x] = zz
    assert isinstance(x, Exercise)
    assert x.reps == 20

    o = os.children[4]
    zz = list(org_to_exercise(o))
    [x] = zz
    assert isinstance(x, Exercise)
    assert x.reps == 30.5
Exemplo n.º 6
0
 def aux(tags_file: MPath) -> Iterator[str]:
     path = tags_file.path
     root = orgparse.loads(path.read_text())
     for ch in root.children:
         tag = ch.properties.get('CUSTOM_ID')
         if tag is not None:
             yield tag
         for ch2 in ch.children:
             tag = ch2.properties.get('CUSTOM_ID')
             if tag is not None:
                 yield tag
Exemplo n.º 7
0
def test_command_oi(preprocessor, data_folder):
    f = data_folder / "oi.org"
    a = preprocessor.preprocess_file(str(f))
    b = orgparse.loads(a)

    expected = "\nThis tests the command OI\n\n\nThis should be duplicated\n\n"
    assert b.children[0].body == expected

    expected = "\n\nmore body\n"
    assert b.children[2].body == expected

    assert b.children[3].body == "\n\ntest body\n"
    assert b.children[3].children[0].heading == "Nested"
    assert b.children[3].children[0].children[0].heading == "More"
Exemplo n.º 8
0
    def __init__(self, o_file, f_list, backend, **kwargs):

        # TODO(mato): This method is duplicate
        # Try to deteremine output file if none was specified
        if o_file is None:
            root, ext = os.path.splitext(f_list[0])
            o_file = root + backend.get_ext()

        # Parse the org files into 1 tree
        # TODO(mato): For now we are expecting only one file, make it more generic
        for f in f_list[:1]:
            source = Preprocessor().preprocess_file(f)
            tree = orgparse.loads(source)

            with open(o_file, "w") as o_stream:
                backend.convert(tree, o_stream, **kwargs)
Exemplo n.º 9
0
def extract_from_file(fname: PathIsh) -> Results:
    """
    Note that org-mode doesn't keep timezone, so we don't really have choice but make it tz-agnostic
    """
    path = Path(fname)
    o = orgparse.loads(path.read_text())
    # meh. but maybe ok to start with?
    root = o.root

    fallback_dt = datetime.fromtimestamp(path.stat().st_mtime)

    ex = RuntimeError(f'while extracting from {fname}')

    for wr in walk_node(node=root, dt=fallback_dt):
        if isinstance(wr, Exception):
            yield echain(ex, wr)
            continue

        (parsed, n) = wr
        dt = parsed.dt
        assert dt is not None  # shouldn't be because of fallback
        for r in iter_urls(n):
            try:
                # TODO get body recursively? not sure
                tags = n.tags
                if len(tags) == 0:
                    tagss = ''
                else:
                    # TODO not sure... perhaps keep the whole heading intact?
                    tagss = f'   :{":".join(sorted(tags))}:'
                ctx = parsed.heading + tagss + '\n' + _get_body(n)
            except Exception as e:
                yield echain(ex, e)
                ctx = 'ERROR'  # TODO more context?

            if isinstance(r, Url):
                yield Visit(
                    url=r,
                    dt=dt,
                    locator=Loc.file(fname),  # TODO line number
                    context=ctx,
                )
            else:  # error
                yield echain(ex, r)
Exemplo n.º 10
0
def check(path: Path) -> Iterator[Failed]:
    print(f"checking {path}")
    for x in F_CHECKS:
        yield from search(
            '-F',
            x,
            path,
        )
    for x in WORD_CHECKS:
        yield from search(
            '--word-regexp',
            x,
            path,
        )

    # TODO not sure if should rely on a unit test?
    ts = orgparse.date.TIMESTAMP_RE
    for line in path.read_text().splitlines():
        m = ts.search(line)
        if m is None:
            continue
        allowed = {
            'inactive_year',
            'inactive_month',
            'inactive_day',
        }
        d = {
            k: v
            for k, v in m.groupdict().items()
            if v is not None and k not in allowed
        }
        if len(d) != 0:
            yield Failed((d, line))

    o = orgparse.loads(path.read_text())
    for n in o:
        found = n.tags.intersection(TAG_CHECKS)
        if len(found) > 0:
            yield Failed((path, n.heading, found))
Exemplo n.º 11
0
 def from_string(s: str):
     base = orgparse.loads(s)
     return Org(base, parent=None)
Exemplo n.º 12
0
    def convert(self, in_file_str: str, out_file_str: str = None) -> None:
        """Convert a single file to ANKI deck."""

        log.info(
            f"Converting file '{in_file_str}' to ANKI deck @ '{out_file_str}'")

        # Create paths
        in_file = pathlib.Path(in_file_str)
        if out_file_str is not None:
            assert out_file_str.endswith(AnkiConvertor.ANKI_EXT)
            out_file = pathlib.Path(out_file_str).resolve()
        else:
            out_file = in_file.with_suffix(AnkiConvertor.ANKI_EXT).resolve()
        tmp_out_file = out_file.with_suffix(".tmp.apkg").resolve()

        # Convert the org nodes into list of Notes
        cards: typing.List[genanki.Note] = []

        # Preprocess and parse the file
        preprocessed_source = self.preprocessor.preprocess_file(str(in_file))
        org_file = orgparse.loads(preprocessed_source)

        # If user did not supplied the convert mode - try to get the convert mode
        # from the org file header fall back to NORMAL mode
        if not self.user_supplied_convert_mode:
            try:
                self.convert_mode = AnkiConvertMode[org_file._special_comments[
                    self.COMMENT_ANKI_CONVERT_MODE][0].upper()]
            except KeyError:
                self.convert_mode = AnkiConvertMode.NORMAL
        else:
            self.convert_mode = self._convert_mode

        self._get_cards(org_file, cards)

        # Try to set the deck name to a org file title comment
        try:
            deck_name = org_file._special_comments["TITLE"][0]
        except (KeyError, IndexError):
            deck_name = out_file.stem

        # TODO: Hash should be calculated from the cards
        deck = genanki.Deck(random.randrange(1 << 30, 1 << 31), deck_name)

        for c in cards:
            deck.add_note(c)

        package = genanki.Package(deck)
        package.media_files = self.node_convertor.media_files
        package.write_to_file(str(tmp_out_file))

        # Import and export the collection using Anki
        # This is neccessary to make mobile version work (include rendered equations)
        with utils.create_empty_anki_collection() as col:
            log.debug("Importing to tmp ANKI collection")
            imp = AnkiPackageImporter(col, str(tmp_out_file))
            imp.run()
            log.debug("Exporting from tmp ANKI collection")
            exp = AnkiPackageExporter(col)
            exp.exportInto(str(out_file))

        tmp_out_file.unlink()
Exemplo n.º 13
0
 def get_id(cls, desc):
     from orgparse import loads
     root = loads(desc)
     node = root.children[0]
     return node.get_property('ID')
Exemplo n.º 14
0
 def test_nodes_are_equal(self):
     orgtree = orgparse.loads(self.orgtext)
     unequal_nodes = orgtree.children[0].children[:2]
     equal_nodes = orgtree.children[0].children[2:4]
     self.assertFalse(orgmode.nodes_are_equal(*unequal_nodes))
     self.assertTrue(orgmode.nodes_are_equal(*equal_nodes))