Beispiel #1
0
def link_code(pname, bname, release, linker_name, source, source_release=None):
    project = Project.objects.get(dir_name=pname)
    prelease = ProjectRelease.objects.filter(project=project).\
            filter(release=release)[0]
    if source_release is not None:
        srelease = ProjectRelease.objects.filter(project=project).\
            filter(release=source_release)[0]
    else:
        srelease = None
    codebase = CodeBase.objects.filter(project_release=prelease).\
            filter(name=bname)[0]
    linker_cls_name = LINKERS[linker_name]
    linker_cls = import_clazz(linker_cls_name)
    linker = linker_cls(project, prelease, codebase, source, srelease)

    progress_monitor = CLIProgressMonitor(min_step=1.0)
    progress_monitor.info('Cache Count {0} miss of {1}'
            .format(cache_util.cache_miss, cache_util.cache_total))

    start = time.clock()

    linker.link_references(progress_monitor)

    stop = time.clock()
    progress_monitor.info('Cache Count {0} miss of {1}'
            .format(cache_util.cache_miss, cache_util.cache_total))
    progress_monitor.info('Time: {0}'.format(stop - start))
Beispiel #2
0
def link_code(pname, bname, release, linker_name, source, source_release=None):
    project = Project.objects.get(dir_name=pname)
    prelease = ProjectRelease.objects.filter(project=project).\
            filter(release=release)[0]
    if source_release is not None:
        srelease = ProjectRelease.objects.filter(project=project).\
            filter(release=source_release)[0]
    else:
        srelease = None
    codebase = CodeBase.objects.filter(project_release=prelease).\
            filter(name=bname)[0]
    linker_cls_name = LINKERS[linker_name]
    linker_cls = import_clazz(linker_cls_name)
    linker = linker_cls(project, prelease, codebase, source, srelease)

    progress_monitor = CLIProgressMonitor(min_step=1.0)
    progress_monitor.info('Cache Count {0} miss of {1}'.format(
        cache_util.cache_miss, cache_util.cache_total))

    start = time.clock()

    linker.link_references(progress_monitor)

    stop = time.clock()
    progress_monitor.info('Cache Count {0} miss of {1}'.format(
        cache_util.cache_miss, cache_util.cache_total))
    progress_monitor.info('Time: {0}'.format(stop - start))
Beispiel #3
0
def sync_doc(pname, dname, release):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    model = load_model(pname, DOC_PATH, doc_key)
    syncer = import_clazz(model.syncer)(model.input_url, doc_path)
    pages = syncer.sync()
    model.pages = pages
    dump_model(model, pname, DOC_PATH, doc_key)
Beispiel #4
0
def toc_refresh(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    try:
        syncer = import_clazz(model.syncer_clazz)()
        syncer.toc_refresh(model)
        dump_model(model, pname, STHREAD_PATH, cname)
    except Exception:
        logger.exception('Error while refreshing toc')
Beispiel #5
0
def toc_refresh(pname, cname):
    model = load_model(pname, STHREAD_PATH, cname)
    try:
        syncer = import_clazz(model.syncer_clazz)()
        syncer.toc_refresh(model)
        dump_model(model, pname, STHREAD_PATH, cname)
    except Exception:
        logger.exception('Error while refreshing toc')
Beispiel #6
0
def sync_doc(pname, dname, release):
    doc_key = dname + release
    doc_path = get_doc_path(pname, dname, release)
    model = load_model(pname, DOC_PATH, doc_key)
    syncer = import_clazz(model.syncer)(model.input_url, doc_path)
    pages = syncer.sync()
    model.pages = pages
    dump_model(model, pname, DOC_PATH, doc_key)
Beispiel #7
0
def sub_process_parse(pinput):
    try:
        # Unecessary if already closed by parent process.
        # But it's ok to be sure.
        connection.close()
        (parser_clazz, doc_pk, parse_refs, pages) = pinput
        parser = import_clazz(parser_clazz)(doc_pk)
        for page_input in pages:
            if page_input is not None:
                (local_path, page_url) = page_input
                parser.parse_page(local_path, page_url, parse_refs)
        return True
    except Exception:
        print_exc()
        return False
    finally:
        # Manually close this connection
        connection.close()
Beispiel #8
0
def parse_code(pname, bname, release, parser_name, opt_input=None):
    '''

    autocommit is necessary here to prevent goofs. Parsers can be
    multi-threaded and transaction management in django uses thread local...
    '''
    project_key = pname + bname + release
    prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
            filter(release=release)[0]
    codebase = CodeBase.objects.filter(project_release=prelease).\
            filter(name=bname)[0]

    parser_cls_name = PARSERS[parser_name]
    parser_cls = import_clazz(parser_cls_name)
    parser = parser_cls(codebase, project_key, opt_input)
    parser.parse(CLILockProgressMonitor())

    return codebase
Beispiel #9
0
def toc_download_section(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for section in model.toc_sections:
        index = section.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif section.downloaded and not force:
            continue
        try:
            syncer.toc_download_section(model, section)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded section {0}'.format(section.index))
        except Exception:
            logger.exception('Error while downloading toc section')
Beispiel #10
0
def sub_process_parse(pinput):
    try:
        # Unecessary if already closed by parent process.
        # But it's ok to be sure.
        connection.close()
        (parser_clazz, doc_pk, parse_refs, pages) = pinput
        parser = import_clazz(parser_clazz)(doc_pk)
        for page_input in pages:
            if page_input is not None:
                (local_path, page_url) = page_input
                parser.parse_page(local_path, page_url, parse_refs)
        return True
    except Exception:
        print_exc()
        return False
    finally:
        # Manually close this connection
        connection.close()
Beispiel #11
0
def toc_download_section(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for section in model.toc_sections:
        index = section.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif section.downloaded and not force:
            continue
        try:
            syncer.toc_download_section(model, section)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded section {0}'.format(section.index))
        except Exception:
            logger.exception('Error while downloading toc section')
Beispiel #12
0
def parse_code(pname, bname, release, parser_name, opt_input=None):
    '''

    autocommit is necessary here to prevent goofs. Parsers can be
    multi-threaded and transaction management in django uses thread local...
    '''
    project_key = pname + bname + release
    prelease = ProjectRelease.objects.filter(project__dir_name=pname).\
            filter(release=release)[0]
    codebase = CodeBase.objects.filter(project_release=prelease).\
            filter(name=bname)[0]

    parser_cls_name = PARSERS[parser_name]
    parser_cls = import_clazz(parser_cls_name)
    parser = parser_cls(codebase, project_key, opt_input)
    parser.parse(CLILockProgressMonitor())

    return codebase
Beispiel #13
0
def toc_download_entries(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    channel_path = get_channel_path(pname, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for entry in model.entries:
        index = entry.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif entry.downloaded and not force:
            continue
        try:
            syncer.download_entry(entry, channel_path)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded {0}'.format(entry.url))
        except Exception:
            logger.exception('Error while downloading entry')
Beispiel #14
0
def toc_download_entries(pname, cname, start=None, end=None, force=False):
    model = load_model(pname, STHREAD_PATH, cname)
    channel_path = get_channel_path(pname, cname)
    syncer = import_clazz(model.syncer_clazz)()
    for entry in model.entries:
        index = entry.index
        if start is not None and start > index:
            continue
        elif end is not None and end <= index:
            continue
        elif entry.downloaded and not force:
            continue
        try:
            syncer.download_entry(entry, channel_path)
            dump_model(model, pname, STHREAD_PATH, cname)

            print('Downloaded {0}'.format(entry.url))
        except Exception:
            logger.exception('Error while downloading entry')
Beispiel #15
0
def sub_process_parse(einput):
    try:
        # Unecessary if already closed by parent process.
        # But it's ok to be sure.
        connection.close()
        (parser_cls, channel_pk, entry_chunk, parse_refs, lock) = einput

        parser = import_clazz(parser_cls)(channel_pk, parse_refs, lock)
        for entry_input in entry_chunk:
            if entry_input is not None:
                (local_paths, url) = entry_input
                # Check if downloaded
                if local_paths is not None and len(local_paths) > 0:
                    parser.parse_entry(local_paths, url)
        return True
    except Exception:
        print_exc()
        return False
    finally:
        # Manually close this connection
        connection.close()
Beispiel #16
0
def sub_process_parse(einput):
    try:
        # Unecessary if already closed by parent process.
        # But it's ok to be sure.
        connection.close()
        (parser_cls, channel_pk, entry_chunk, parse_refs, lock) = einput

        parser = import_clazz(parser_cls)(channel_pk, parse_refs, lock)
        for entry_input in entry_chunk:
            if entry_input is not None:
                (local_paths, url) = entry_input
                # Check if downloaded
                if local_paths is not None and len(local_paths) > 0:
                    parser.parse_entry(local_paths, url)
        return True
    except Exception:
        print_exc()
        return False
    finally:
        # Manually close this connection
        connection.close()
Beispiel #17
0
def parse_snippets(pname, source, parser_name):
    project = Project.objects.get(dir_name=pname)
    parser_cls_name = SNIPPET_PARSERS[parser_name]
    parser_cls = import_clazz(parser_cls_name)
    snippet_parser = parser_cls(project, source)
    snippet_parser.parse(CLILockProgressMonitor())
Beispiel #18
0
def parse_snippets(pname, source, parser_name):
    project = Project.objects.get(dir_name=pname)
    parser_cls_name = SNIPPET_PARSERS[parser_name]
    parser_cls = import_clazz(parser_cls_name)
    snippet_parser = parser_cls(project, source)
    snippet_parser.parse(CLILockProgressMonitor())