Ejemplo n.º 1
0
 def test_pipeline_doc(self):
     """ Method to test the pipeline rst documentation.
     """
     # Generate the writer object
     docwriter = PipelineHelpWriter([self.pipeline_id])
     rstdoc = docwriter.write_api_docs(returnrst=True)
     self.assertTrue(self.pipeline_id in rstdoc)
 def test_process_doc(self):
     """ Method to test the process rst documentation.
     """
     # Generate the writer object
     docwriter = PipelineHelpWriter([self.process_id])
     rstdoc = docwriter.write_api_docs(returnrst=True)
     self.assertTrue(self.process_id in rstdoc)
Ejemplo n.º 3
0
        module_name = module_description.split(".")[1]
        sorted_dict.setdefault(module_name, []).append(module_description)

###############################################################################
# Generate pipelines and processes reST API
###############################################################################

# Treat pipeline and process:
for sorted_modules, dtype in ([sorted_pipelines, "pipeline"],
                              [sorted_processes, "process"]):

    # Go through all modules
    for module_name, modules in sorted_modules.items():

        # Generate the writer object
        docwriter = PipelineHelpWriter(modules, short_names=short_names)

        # Where the documentation will be written: a relative path from the
        # makefile
        short_name = docwriter.get_short_name(module_name)
        outdir = os.path.join(base_outdir, short_name, dtype)
        print('short name:', short_name, ', outdir:', outdir)

        docwriter.write_api_docs(outdir)

        # Create an index that will be inserted in the module main index.
        # The file format doesn't matter since we will make an include but
        # prevent Sphinx to convert such files
        docwriter.write_index(
            outdir, "index",
            relative_to=os.path.join(base_outdir, short_name),
Ejemplo n.º 4
0
# Sort pipelines processes
# From the pipelines full path 'm1.m2.pipeline' get there module names 'm2'
module_names = set([x.split(".")[1] for x in pipelines])
# Sort each pipeline according to its module name.
# The result is a dict of the form 'd[m2] = [pipeline1, pipeline2, ...]'.
sorted_pipelines = dict((x, []) for x in module_names)
for pipeline in pipelines:
    module_name = pipeline.split(".")[1]
    sorted_pipelines[module_name].append(pipeline)

# Generate a png representation of each pipeline.
for module_name, module_pipelines in sorted_pipelines.items():

    # this docwriter is juste used to manage short names
    docwriter = PipelineHelpWriter([], short_names=short_names)

    # Where the documentation will be written: a relative path from the
    # makefile
    short_name = docwriter.get_short_name(module_name)
    outdir = os.path.join(base_outdir, short_name, "schema")
    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    # Go through all pipeline
    for module_pipeline in module_pipelines:

        # Get pipeline instance
        pipeline_instance = get_process_instance(module_pipeline)

        # Get output files
Ejemplo n.º 5
0
# Sort pipelines processes
# From the pipelines full path 'm1.m2.pipeline' get there module names 'm2'
module_names = set([x.split(".")[1] for x in pipelines])
# Sort each pipeline according to its module name.
# The result is a dict of the form 'd[m2] = [pipeline1, pipeline2, ...]'.
sorted_pipelines = dict((x, []) for x in module_names)
for pipeline in pipelines:
    module_name = pipeline.split(".")[1]
    sorted_pipelines[module_name].append(pipeline)

# Generate a png representation of each pipeline.
for module_name, module_pipelines in sorted_pipelines.items():

    # this docwriter is juste used to manage short names
    docwriter = PipelineHelpWriter([], short_names=short_names)

    # Where the documentation will be written: a relative path from the
    # makefile
    short_name = docwriter.get_short_name(module_name)
    outdir = os.path.join(base_outdir, short_name,  "schema")
    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    # Go through all pipeline
    for module_pipeline in module_pipelines:

        # Get pipeline instance
        pipeline_instance = get_process_instance(module_pipeline)

        # Get output files
Ejemplo n.º 6
0
        module_name = module_description.split(".")[1]
        sorted_dict.setdefault(module_name, []).append(module_description)

###############################################################################
# Generate pipelines and processes reST API
###############################################################################

# Treat pipeline and process:
for sorted_modules, dtype in ([sorted_pipelines,
                               "pipeline"], [sorted_processes, "process"]):

    # Go through all modules
    for module_name, modules in sorted_modules.items():

        # Generate the writer object
        docwriter = PipelineHelpWriter(modules, short_names=short_names)

        # Where the documentation will be written: a relative path from the
        # makefile
        short_name = docwriter.get_short_name(module_name)
        outdir = os.path.join(base_outdir, short_name, dtype)
        print('short name:', short_name, ', outdir:', outdir)

        docwriter.write_api_docs(outdir)

        # Create an index that will be inserted in the module main index.
        # The file format doesn't matter since we will make an include but
        # prevent Sphinx to convert such files
        docwriter.write_index(outdir,
                              "index",
                              relative_to=os.path.join(base_outdir,