Пример #1
0
def create_doc_local(pname, dname, release, syncer, input_url=None):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    mkdir_safe(doc_path)

    model = DocumentStatus(syncer=syncer, input_url=input_url)
    dump_model(model, pname, DOC_PATH, doc_key)
Пример #2
0
def create_doc_local(pname, dname, release, syncer, input_url=None):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    mkdir_safe(doc_path)

    model = DocumentStatus(syncer=syncer, input_url=input_url)
    dump_model(model, pname, DOC_PATH, doc_key)
Пример #3
0
def toc_refresh(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    try:
        syncer = import_clazz(model.syncer_clazz)()
        syncer.toc_refresh(model)
        dump_model(model, pname, STHREAD_PATH, cname)
    except Exception:
        logger.exception('Error while refreshing toc')
Пример #4
0
def sync_doc(pname, dname, release):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    model = load_model(pname, DOC_PATH, doc_key)
    syncer = import_clazz(model.syncer)(model.input_url, doc_path)
    pages = syncer.sync()
    model.pages = pages
    dump_model(model, pname, DOC_PATH, doc_key)
Пример #5
0
def sync_doc(pname, dname, release):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    model = load_model(pname, DOC_PATH, doc_key)
    syncer = import_clazz(model.syncer)(model.input_url, doc_path)
    pages = syncer.sync()
    model.pages = pages
    dump_model(model, pname, DOC_PATH, doc_key)
Пример #6
0
def toc_refresh(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    try:
        syncer = import_clazz(model.syncer_clazz)()
        syncer.toc_refresh(model)
        dump_model(model, pname, STHREAD_PATH, cname)
    except Exception:
        logger.exception('Error while refreshing toc')
Пример #7
0
def parse_channel(pname, cname, parse_refs=True):
    model = load_model(pname, STHREAD_PATH, cname)
    channel = SupportChannel.objects.filter(project__dir_name=pname).\
            get(dir_name=cname)
    pm = CLIProgressMonitor()
    generic_parser.parse_channel(channel, model, progress_monitor=pm,
            parse_refs=parse_refs)
    dump_model(model, pname, STHREAD_PATH, cname)
    return channel
Пример #8
0
def parse_channel(pname, cname, parse_refs=True):
    model = load_model(pname, STHREAD_PATH, cname)
    channel = SupportChannel.objects.filter(project__dir_name=pname).\
            get(dir_name=cname)
    pm = CLIProgressMonitor()
    generic_parser.parse_channel(channel,
                                 model,
                                 progress_monitor=pm,
                                 parse_refs=parse_refs)
    dump_model(model, pname, STHREAD_PATH, cname)
    return channel
Пример #9
0
def clear_channel_elements(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    for entry in model.entries:
        entry.parsed = False
    dump_model(model, pname, STHREAD_PATH, cname)

    channel = SupportChannel.objects.filter(project__dir_name=pname).\
            get(dir_name=cname)
    query = Message.objects.filter(sthread__channel=channel)
    print('Deleting {0} messages'.format(query.count()))
    for message in query.all():
        message.code_references.all().delete()
        message.code_snippets.all().delete()
        message.delete()
    SupportThread.objects.filter(channel=channel).delete()
Пример #10
0
def clear_channel_elements(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    for entry in model.entries:
        entry.parsed = False
    dump_model(model, pname, STHREAD_PATH, cname)

    channel = SupportChannel.objects.filter(project__dir_name=pname).\
            get(dir_name=cname)
    query = Message.objects.filter(sthread__channel=channel)
    print('Deleting {0} messages'.format(query.count()))
    for message in query.all():
        message.code_references.all().delete()
        message.code_snippets.all().delete()
        message.delete()
    SupportThread.objects.filter(channel=channel).delete()
Пример #11
0
def toc_download_section(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for section in model.toc_sections:
        index = section.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif section.downloaded and not force:
            continue
        try:
            syncer.toc_download_section(model, section)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded section {0}'.format(section.index))
        except Exception:
            logger.exception('Error while downloading toc section')
Пример #12
0
def toc_download_section(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for section in model.toc_sections:
        index = section.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif section.downloaded and not force:
            continue
        try:
            syncer.toc_download_section(model, section)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded section {0}'.format(section.index))
        except Exception:
            logger.exception('Error while downloading toc section')
Пример #13
0
def toc_download_entries(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    channel_path = get_channel_path(pname, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for entry in model.entries:
        index = entry.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif entry.downloaded and not force:
            continue
        try:
            syncer.download_entry(entry, channel_path)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded {0}'.format(entry.url))
        except Exception:
            logger.exception('Error while downloading entry')
Пример #14
0
def toc_download_entries(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    channel_path = get_channel_path(pname, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for entry in model.entries:
        index = entry.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif entry.downloaded and not force:
            continue
        try:
            syncer.download_entry(entry, channel_path)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded {0}'.format(entry.url))
        except Exception:
            logger.exception('Error while downloading entry')
Пример #15
0
def create_channel_local(pname, cname, syncer, url):
    channel_path = get_channel_path(pname, cname)
    mkdir_safe(channel_path)
    status = SupportChannelStatus(syncer, url)
    dump_model(status, pname, STHREAD_PATH, cname)
Пример #16
0
def create_channel_local(pname, cname, syncer, url):
    channel_path = get_channel_path(pname, cname)
    mkdir_safe(channel_path)
    status = SupportChannelStatus(syncer, url)
    dump_model(status, pname, STHREAD_PATH, cname)