Esempio n. 1
0
    def _write_arguments(cls, file: MdUtils, title: str,
                         args: List[SerializedArgument]):
        def _process_flag(arg: SerializedArgument) -> str:
            flag = arg.flags
            if flag and arg.choices:
                return f'{flag}={arg.choices}'
            if flag and arg.name:
                return f'{flag}={arg.name}'
            return arg.name if arg.name else flag

        def _process_description(arg: SerializedArgument) -> str:
            description = arg.description.replace('*', r'\*')
            description += '. Multiple arguments' if arg.nargs else ''
            return f'{description}. Default: `{arg.default}`' if arg.default else description

        if not args:
            return

        file.new_header(level=3, title=title, add_table_of_contents='n')
        for arg in args:
            description = _process_description(arg).replace('..', '.')
            file.new_header(level=5,
                            title=f'`{_process_flag(arg)}`',
                            add_table_of_contents='n')
            file.new_paragraph(description)
Esempio n. 2
0
def createMarkdownFile(pz_releases, issues, markdownFileName):
    # title
    md_title = "PPG Services Release Summary ({}, {})".format(
        dayOfWeek, date_str)
    # Header
    mdFile = MdUtils(file_name=markdownFileName, title=md_title)

    # for each release line, add issues
    releases = sorted(pz_releases.keys(), reverse=True)
    for r in releases:
        release = pz_releases.get(r)
        mdFile.new_line()
        mdFile.new_line()
        mdFile.new_header(level=1,
                          title='Release {} ({}):'.format(
                              release[1], release[2]))
        for issue in issues:
            if issue.fields.customfield_10414[0] == release[1]:
                text = '{}: {}.\n'.format(issue.key, issue.fields.summary)
                mdFile.new_line(text)
    mdFile.new_line()

    # Write to markdown document
    mdFile.create_md_file()

    # create html file
    with open('{}.md'.format(markdownFileName), "r",
              encoding="utf-8") as input_file:
        text = input_file.read()
        html = markdown.markdown(text)
    with open('{}.html'.format(markdownFileName),
              "w",
              encoding="utf-8",
              errors="xmlcharrefreplace") as output_file:
        output_file.write(html)
def generate_report():
    df_final = predictionsPerSecond()
    explode = (0.1, 0.1, 0.1)
    plt.pie(df_final.Label.value_counts(),
            explode=explode,
            labels=df_final.Label.value_counts().index.to_list(),
            autopct='%1.1f%%',
            shadow=True,
            startangle=90)
    plt.title('Tiempo de uso de mascarilla')
    plt.savefig('predictions.png')
    mdFile = MdUtils(
        file_name='report',
        title='Reporte de predicción de uso correcto de mascarilla')
    mdFile.new_line(
        mdFile.new_inline_image(text='Predicciones', path='predictions.png'))
    mdFile.new_header(title='Tablas de resultados', level=1)
    mdFile.new_line('Juan Pablo Carranza Hurtado')
    mdFile.new_line('José Alberto Ligorría Taracena')
    mdFile.create_md_file()
    f = open("report.html", "w")
    f.write(markdown2.markdown_path('report.md'))
    f.write(pd.crosstab(df_final.Time, df_final.Label).to_html())
    f.write('<h1> Cantidad de segundos de utilización de mascarilla </h1>')
    f.write(pd.DataFrame(df_final.Label.value_counts()).to_html())
    f.close()
Esempio n. 4
0
def main():
    md_file = MdUtils(file_name='hacker_news', title='temp')
    md_file.new_header(level=1, title='Daily Hacker News')

    stories = get_top_stories()
    for i, v in enumerate(stories):
        item = get_topic_item(v)
        id = item['id']
        title = item['title']
        url = item.get('url', None)
        if url:
            domain = url_parser(url)
            md_file.new_line(
                '{}. {} `{}` [`comments`](https://news.ycombinator.com/item?id={})'
                .format(
                    str(i + 1),
                    md_file.new_inline_link(link=url,
                                            text=title,
                                            bold_italics_code='b'), domain,
                    str(id)))
        else:
            link = 'https://news.ycombinator.com/item?id={}'.format(str(id))
            md_file.new_line('{}. {} [`comments`]({})'.format(
                str(i + 1),
                md_file.new_inline_link(link=link,
                                        text=title,
                                        bold_italics_code='b'), link))

    issue_body = md_file.file_data_text
    date = get_date()
    issue_url = create_issue('Daily Hacker News {}'.format(date), issue_body)
Esempio n. 5
0
def generateShapeWorksCommandDocumentation(mdFilename = '../../docs/tools/ShapeWorksCommands.md', add_toc = False):
    
    # settings from Executable.cpp
    opt_width  = 32
    indent     = 2
    spacedelim = ''.ljust(indent)

    mdFile        = MdUtils(file_name = mdFilename, title = '')
    mdFile.new_header(level = 1, title = 'ShapeWorks Commands')
    
    # add intro paragraph
    intro_paragraph = "`shapeworks` is a single executable for ShapeWorks with a set of sub-executables (commands) that are flexible, modular, loosely coupled, and standardized subcommands, with interactive help to perform individual operations needed for a typical shape modeling workflow that includes the Groom, Optimize, and Analyze phases.\n"
    intro_paragraph = intro_paragraph + "!!! danger " +  "\"" + "Activate shapeworks environment" + "\"" + "\n"
    intro_paragraph = intro_paragraph + "\t Each time you use ShapeWorks from the command line, you must first activate its environment using the `conda activate shapeworks` command on the terminal. \n"
    
    intro_paragraph = intro_paragraph + "!!! danger " +  "\"" + "Add shapeworks to your path" + "\"" + "\n"
    intro_paragraph = intro_paragraph + "\t Please make sure that `shapeworks` is in your path. See [Adding to PATH Environment Variable](../dev/paths.md). \n"
    
    
    mdFile.new_paragraph(intro_paragraph)
    
    if add_toc:
        intro_marker = mdFile.create_marker(" ") # mark the after-intro to add table of contents after the introduction paragraph
    
    cmd           =  "shapeworks"    
    CommandsUtils.addCommand(mdFile, cmd, level = 2, spacedelim = spacedelim, verbose = True)
    
    if add_toc:
        mdFile.new_table_of_contents(table_title='Table of Contents', depth=3, marker = intro_marker)
    
    mdFile.create_md_file()
Esempio n. 6
0
def generateReadme():                                          # defining a function to generate the readme file 
    mdFile = MdUtils(file_name = "LOCAL_README.md")
    mdFile.new_header(level=1, title="Compilation Of DSC-RAIT resources")
    mdFile.new_paragraph("This is a compiled list of resources shared on the DSC-RAIT Discord Server!")
    for d in db.resources.find():                                                  
        '''
        db.resources.find() queries the database and returns all the data stored
        in the form of an iterable which contains all the domains and links present 
        under each domain, so we are looping through the iterable to insert each domain 
        and its respective resources in the readme file. Each element of the iterable
        is a python dictionary (in this case, it is 'd')
        {
            'domain': <domain name>, 
            'links': [
                {'info': <link-info1>,  'link': <link1>}, 
                {'info': <link-info2>,  'link': <link2>}, 
                ....
                ]
        } 
        ''' 
        mdFile.new_header(level = 2, title = d['domain'])
        for l in d['links']:                                                          
            mdFile.new_paragraph(text= f"{l['info']}: {l['link']}")
    mdFile.create_md_file()
    text = mdFile.read_md_file(file_name = "LOCAL_README.md")   # Read the created Readme file return its contents as a string
    return text
Esempio n. 7
0
    def report(self, reports):
        xml_dict = parsexmlfile(self.files[0])
        result = json.dumps(xml_dict)
        nmap_results = json.loads(result)
        ports = nmap_results["nmaprun"]["host"]["ports"]
        #cpe, portid, product, name, version, hosts/up
        if 'port' in ports:
            open_ports = ports["port"]
        else:
            open_ports = []

        # temp
        self.logger.info("Creating report for " + self.name)
        outfile = f"{self.reportdir}/{self.name}.md"
        title = f"PENSEC - {self.name.capitalize()} Report"
        reportfile = MdUtils(file_name=outfile, title=title)
        reportfile.new_header(level=1, title="Common Statistics")
        reportfile.new_paragraph(f"{len(open_ports)} open ports\n")
        if len(open_ports) > 0:
            reportfile.new_header(level=2, title="Open Ports")
            # list with open ports, cpe, etc
        reportfile.create_md_file()
        self.logger.info("Report saved in " + outfile)

        return {"open_ports": open_ports}
Esempio n. 8
0
def gen_api_markdown(target_api_class: Any,
                     output_file_path: str,
                     extra_head: str = ""):
    md_file = MdUtils(file_name=output_file_path)

    if extra_head:
        md_file.write(extra_head)

    md_file.new_header(level=1, title="")

    for method in get_own_methods(target_api_class):
        method_name = method.__name__
        md_file.new_header(level=2, title=method_name)

        docstring = method.__doc__
        if docstring:
            md_file.write(f"\n{method.__doc__}\n")

        parameter_table = ["参数名称", "类型", "默认值"]

        for parameter in inspect.signature(method).parameters.values():
            if parameter.name == "self":
                continue

            row = [parameter.name]

            if isinstance(parameter.annotation, str):
                row.append(parameter.annotation)
            else:
                if get_origin(parameter.annotation) is Union:
                    # group_msg = group_message
                    continue

                row.append(parameter.annotation.__name__)

            if parameter.default == inspect._empty:
                row.append("无")
            else:
                if parameter.default:
                    row.append(parameter.default)
                else:
                    row.append("无")

            parameter_table.extend(row)

        # debug(parameter_table)

        # md_file.new_line()
        if len(parameter_table) > 3:
            md_file.new_table(
                columns=3,
                rows=int(len(parameter_table) / 3),
                text=parameter_table,
                text_align="center",
            )

    md_file.create_md_file()
    print(f"成功生成{output_file_path}文件")
Esempio n. 9
0
 def main(self):
     mdFile = MdUtils(file_name='sitelist', title='Scraper Site List')
     mdFile.new_header(level=1, title='Sites')
     data = self.loop_spiders()
     mdFile.new_line()
     mdFile.new_table(columns=4,
                      rows=int(len(data) / 4),
                      text=data,
                      text_align='center')
     mdFile.create_md_file()
Esempio n. 10
0
    def create_readme(self):
        '''saves a .md file with descritption of a maximal setup

        TODO
        minimal readme just for subset making
            - n articles
            - regex patterns used for subsetting
        '''

        readme = MdUtils(file_name=os.path.join(self.work_dir_path, 'README'),
                         title='Subset readme')

        # produce content
        date_generated = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

        # SECTION 1: DATE GENERATED
        readme.new_paragraph()
        readme.new_header(level=1, title='Date', style='setext')
        readme.new_line(date_generated)

        # SECTION 2: FOLDER STRUCTURE SCPECIFICATION
        readme.new_paragraph()
        readme.new_header(level=1, title='File structure', style='setext')
        # top folder: workdir
        readme.new_line('```')
        readme.new_line('{}/'.format(os.path.basename(self.work_dir_path)))
        readme.new_line('├── data_subset/  #raw data used for this analysis')
        readme.new_line('│   └── *.ndjson')
        readme.new_line('├── preprocessed/  #preprocessed data')
        readme.new_line('│   └── *.ndjson')
        readme.new_line('├── model_input/  #dataset used for topic modeling')
        readme.new_line('│   └── *.ndjson')
        readme.new_line('├── mdl_lda/  #outputs of LDA')
        readme.new_line('│   ├── model_states/  #serialized trained LDAs')
        readme.new_line('│   │   └── *.pkl')
        readme.new_line('│   ├── topic_overviews/  #top terms per topic')
        readme.new_line('│   │   └── *.txt')
        readme.new_line('│   ├── doc_top_mat/  #document-topic matrices')
        readme.new_line('│   │   └── *.ndjson')
        readme.new_line('│   ├── pyldavis/  #pyldavis plots')
        readme.new_line('│   │   └── *.html')
        readme.new_line(
            '│   └── model_comparison.png  #elbow plot of n topics and coherence'
        )
        readme.new_line(
            '└── mdl_ntr/  #outputs of Novelty, Transience, Resonance')
        readme.new_line('    └── w{number}/  #results at window n')
        readme.new_line('        ├── *.csv')
        readme.new_line('        └── fig/  #plots of results at that window')
        readme.new_line('            └── *.png')
        readme.new_line('```')
        readme.new_line()

        readme.create_md_file()
def generate_report():
    report = MdUtils(file_name=f'./report.md')
    report.new_header(level=1, title='Выделение признаков символов')
    report.new_line(text='Выполнил Ахманов Алексей Б18-514')
    report.new_line(text=f'Алфавит - {ALPHABET}')

    for letter in ALPHABET:
        letter_folder_path = f'./{folder_helper.IMAGES_FOLDER_PATH}/letter_{letter}'
        os.makedirs(letter_folder_path, exist_ok=True)
        letter_image_path = f'{letter_folder_path}/{letter}.png'
        letter_image = Image.new(mode=constants.GRAYSCALE_MODE, size=(FONT_SIZE, FONT_SIZE), color=WHITE)
        result = ImageDraw.Draw(im=letter_image, mode=constants.GRAYSCALE_MODE)
        result.text(xy=(0, 0), text=letter, font=FONT, fill=0, anchor='lt')
        letter_image = cut_empty_rows_and_cols(letter_image)
        letter_image.save(letter_image_path)
        report.new_header(level=2, title=f'Буква {letter}')
        report.new_line(report.new_inline_image(text=letter, path=letter_image_path))
        thresholded = simple_threshold(letter_image, 100)
        report.new_line(text=f'Вес черного - {black_weight(thresholded)}')
        report.new_line(text=f'Удельный вес черного - {normalized_black_weight(thresholded)}')
        center = gravity_center(thresholded)
        report.new_line(text=f'Координаты центра масс - ({center[0]}, {center[1]})')
        normalized_center = normalized_gravity_center(thresholded)
        report.new_line(text=f'Нормированные координаты центра масс - ({normalized_center[0]}, {normalized_center[1]})')
        report.new_line(text=f'Центральный горизонтальный осевой момент - {central_horizontal_axial_moment(thresholded)}')
        report.new_line(text=f'Центральный вертикальный осевой момент - {central_vertical_axial_moment(thresholded)}')
        report.new_line(text=f'Нормированный центральный горизонтальный осевой момент -'
                             f'{normalized_central_horizontal_axial_moment(thresholded)}')
        report.new_line(text=f'Нормированный центральный вертикальный осевой момент -'
                             f'{normalized_central_vertical_axial_moment(thresholded)}')

        h_levels, h_projections = horizontal_projection(thresholded)
        pyplot.plot(h_levels, h_projections)
        pyplot.title(f'Horizontal projection {letter}')
        path = f'{letter_folder_path}/horizontal_projection_{letter}.png'
        pyplot.savefig(path)
        pyplot.close()

        report.new_line(report.new_inline_image(text=letter, path=path))

        v_levels, v_projections = vertical_projection(thresholded)
        pyplot.plot(v_levels, v_projections)
        pyplot.title(f'Vertical projection {letter}')
        path = f'{letter_folder_path}/vertical_projection_{letter}.png'
        pyplot.savefig(path)
        pyplot.close()

        report.new_line(report.new_inline_image(text=letter, path=path))

        report.new_line()

    report.create_md_file()
Esempio n. 12
0
def generateReadme():
    mdFile = MdUtils(file_name="LOCAL_README.md")
    mdFile.new_header(level=1, title="Compilation Of DSC-RAIT resources")
    mdFile.new_paragraph(
        "This is a ``README.md`` file generated by asmrPy to test it's capabilities. Stay tuned for more updates!"
    )
    for d in db.resources.find():
        mdFile.new_header(level=2, title=d['domain'])
        for l in d['links']:
            mdFile.new_paragraph(text=f"{l['info']}: {l['link']}")
    mdFile.create_md_file()
    text = mdFile.read_md_file(file_name="LOCAL_README.md")
    return text
Esempio n. 13
0
    def create_report(self, reports, sorted_tools):
        outfile = f"{self.outdir}/reports/Report.md"
        title = f"PENSEC - Report of {self.target.hostname}"
        reportfile = MdUtils(file_name=outfile, title=title)

        # "Execute Summary"
        reportfile.new_header(level=3, title="Common Statistics")
        for tool in sorted_tools:
            tool.write_report_summary(reportfile, reports)
        # "Technical Details"
        for tool in sorted_tools:
            tool.write_report(reportfile, reports)

        reportfile.create_md_file()
        self.logger.info("Report saved in " + outfile)
def create_file(files):
    data = get_data(files)
    md_file = MdUtils(file_name='repositories')
    md_file.new_header(level=1, title='Repositories')
    grouped_by_type = groupby(data, key=itemgetter('type'))
    for key, value in grouped_by_type:
        value_sorted = sorted(value, key=lambda x: x['name'])
        md_file.new_header(level=2, title=key)
        if key == 'Reading':
            write_reading_entries(value_sorted, md_file)
        else:
            for item in value_sorted:
                write_item(item, md_file)
        md_file.new_line()
    md_file.create_md_file()
Esempio n. 15
0
def generate_singular_templates(bib_data):
    """
    Parses a .bibtex file and creates a singular
    markdown write-up template for each entry.

    Args:
        bib_data: A pybtex parsed file

    Returns:
        Outputs singular markdown write-up templates

    """

    for entry in tqdm(bib_data.entries.values()):
        title = entry.fields['title']
        year = entry.fields['year']
        url = entry.fields['url']

        authors = str(entry.persons['author'][0])

        if 'journal' in entry.fields:
            journal = entry.fields['journal']

        file_name = authors.lower() + '-' + str(year)
        if 'journal' in locals():
            title = authors + ', ' + title + ', ' + year + ', ' + journal
        else:
            title = authors + ', ' + title + ', ' + year

        mdfile = MdUtils(file_name=file_name, title=title)

        # Add the correct header information
        mdfile.new_header(level=1, title='TLDR')
        mdfile.new_header(level=1, title='Links')
        mdfile.new_line('Paper - ' +
                        mdfile.new_inline_link(text='Link to Paper', link=url))
        mdfile.new_header(level=1, title='Summary')
        mdfile.new_header(level=1, title='Comments')
        mdfile.new_header(level=1, title='Reference')

        mdfile.insert_code(entry.to_string(bib_format='bibtex'))

        mdfile.create_md_file()
Esempio n. 16
0
def gen_kwargs_markdown(mapping: T_HandlerKwargMapping,
                        output_file_path: str,
                        extra_head: str = ""):
    md_file = MdUtils(file_name=output_file_path)

    if extra_head:
        md_file.write(extra_head)

    md_file.new_header(level=1, title="")

    for event_name, kwargs in mapping.items():
        md_file.new_header(level=2, title=event_name)

        parameter_table = ["参数名称", "类型"]

        for kwarg in kwargs:

            row = [kwarg.name]

            if isinstance(kwarg.type_, str):
                row.append(kwarg.type_)
            else:
                # if get_origin(parameter.annotation) is Union:
                #     # group_msg = group_message
                #     continue

                row.append(kwarg.type_)

            parameter_table.extend(row)

        # debug(parameter_table)

        if len(parameter_table) > 2:
            md_file.new_table(
                columns=2,
                rows=int(len(parameter_table) / 2),
                text=parameter_table,
                text_align="center",
            )

    md_file.create_md_file()
    print(f"成功生成{output_file_path}文件")
Esempio n. 17
0
def publish_event_page(logs, website_src_path):
    # Generate the about file.
    mdFile = MdUtils(
        file_name=os.path.join(website_src_path, "content", "log"),
        title="Container Event Logging",
    )

    status_table = ["Container Action", "Time (UTC)"]
    for entry in logs:
        container_status = (("~~" +
                             entry[0].attrs["Config"]["Image"].split("/")[0] +
                             "~~") if entry[2] == "destroyed" else
                            ("**" +
                             entry[0].attrs["Config"]["Image"].split("/")[0] +
                             "**"))
        status_table.extend([
            container_status,
            strf_time_diff(datetime.now(), entry[1]) + " ago"
        ])
    mdFile.new_line()

    mdFile.new_table(columns=2,
                     rows=len(logs) + 1,
                     text=status_table,
                     text_align="center")

    mdFile.new_header(level=5, title="Status Update Time")

    tz_table = ["TZ", "Time"]
    for city, tz in timezones.items():
        tz_table.extend([city, datetime.now().astimezone(tz).strftime(fmt)])

    mdFile.new_table(columns=2,
                     rows=len(timezones) + 1,
                     text=tz_table,
                     text_align="center")

    mdFile.create_md_file()

    return None
Esempio n. 18
0
 def report(self, reports):
     report_dict = reports[Tool.Dependencies.NMAP_SERVICES]
     self.logger.info("Creating report for " + self.name)
     obj = {}
     for f in self.files:
         with open(f, "r") as scanfile:
             for s in scanfile.read().split("\n\n")[:-1]:
                 scan = json.loads(s)
                 if 'RESULTS_EXPLOIT' in scan and 'SEARCH' in scan:
                     obj[scan["SEARCH"]] = {
                         "exploits": scan["RESULTS_EXPLOIT"]
                     }
     #temp
     outfile = f"{self.reportdir}/{self.name}.md"
     title = f"PENSEC - {self.name.capitalize()} Report"
     reportfile = MdUtils(file_name=outfile, title=title)
     reportfile.new_header(level=1, title="Common Statistics")
     reportfile.new_paragraph(f"X exploits found\n")
     reportfile.create_md_file()
     self.logger.info("Report saved in " + outfile)
     #report_dict["searchsploit_info"] = obj
     return obj
Esempio n. 19
0
def publish_status_page(running_containers, website_src_path):
    # Generate the about file.
    mdFile = MdUtils(
        file_name=os.path.join(website_src_path, "content", "status"),
        title="System Status",
    )

    status_table = ["Container", "Status"]
    for container in sorted(
            running_containers,
            key=lambda c: c.attrs["Config"]["Image"].split("/")[0]):
        status_table.extend([
            container.attrs["Config"]["Image"].split("/")[0],
            container.attrs["State"]["Status"],
        ])
    mdFile.new_line()

    mdFile.new_table(
        columns=2,
        rows=len(running_containers) + 1,
        text=status_table,
        text_align="center",
    )

    mdFile.new_header(level=5, title="Status Update Time")

    tz_table = ["TZ", "Time"]

    for city, tz in timezones.items():
        tz_table.extend([city, datetime.now().astimezone(tz).strftime(fmt)])

    mdFile.new_table(columns=2,
                     rows=len(timezones) + 1,
                     text=tz_table,
                     text_align="center")

    mdFile.create_md_file()

    return None
Esempio n. 20
0
    def test_new_header(self):
        file_name = 'Test_file'
        md_file = MdUtils(file_name)
        string_headers_expected = "\n# Header 0\n\n## Header 1\n\n### Header 2\n\n#### Header 3\n\n" \
                                  "##### Header 4\n\n###### Header 5\n"
        string_headers = ""
        for x in range(6):
            string_headers += md_file.new_header(level=(x + 1), title='Header ' + str(x), style='atx')

        self.assertEqual(string_headers, string_headers_expected)
        md_file.create_md_file()
        file_result = md_file.read_md_file(file_name)
        self.assertEqual(file_result, '\n\n\n' + string_headers_expected)
Esempio n. 21
0
def write_md(json_obj, md_fn, overwrite=True, wire_obj=True):
    if (not overwrite):
        if (os.path.isfile(md_fn)):
            return;
    print('->', md_fn)

    md_title = json_obj['title']
    mdFile = MdUtils(file_name=md_fn,title=md_title)

    desc = md_title
    if 'description' in json_obj:
        desc = json_obj['description']
    mdFile.new_paragraph(desc);

    if wire_obj:
        mdFile.new_paragraph('All wire objects have a set of basic attributes ```{object_id, action, type, persist, data}```. The ```data``` attribute defines the object-specific attributes');

    mdFile.new_header(level=2, title=f'\n{md_title} Attributes', style='setext', add_table_of_contents='n')

    object_table(mdFile, md_title, json_obj)

    if not 'properties' in json_obj:
        mdFile.create_md_file()
        return

    if not 'data' in json_obj['properties']:
        mdFile.create_md_file()
        return

    mdFile.new_header(level=3, title=f'{md_title} Data Attributes', add_table_of_contents='n')

    if '$ref' in json_obj['properties']['data']:
        obj_name = json_obj['properties']['data']['$ref'][len('#/definitions/'):]
        object_table(mdFile, md_title, json_obj['definitions'][obj_name], json_obj['definitions'])
    else:
        object_table(mdFile, md_title, json_obj['properties']['data'], json_obj['definitions'])

    mdFile.create_md_file()
Esempio n. 22
0
    def asMarkdown(self) -> MarkdownDocument:
        """Return document as markdown."""
        # TODO: example, needs to be dynamic

        mdFile = MdUtils(file_name="file1.md", title="Schema")
        mdFile.new_header(level=1, title=self.title)

        # document meta headings
        documentMeta = ["Information", "Value"]
        # document meta rows
        documentMeta.extend(["Schema Id", self.id])
        documentMeta.extend(["Title", self.title])

        mdFile.new_table(columns=2, rows=3, text=documentMeta)

        # TODO: generate code snippet sections like those in the gantree docs as of 2020/03/29
        mdFile.new_paragraph('```jsonc\n{\n  "example": "text"\n}\n```')

        props = self.schema["properties"]

        resolved = resolve_all(props, None)

        print("resolved:", resolved)

        # style is set 'atx' format by default.

        # list_of_strings = ["Name", "Value"]
        # for x in range(5):
        #     list_of_strings.extend(["Schema ID", schema_id])
        # mdFile.new_line()
        # mdFile.new_table(
        #     columns=2, rows=6, text=list_of_strings, text_align="center"
        # )

        filename = mdFile.file_name
        content = mdFile.file_data_text

        return {"filename": filename, "content": content}
Esempio n. 23
0
    def create_markdown(file_path, title, publication_date="None"):
        mdFile = MdUtils(file_name=file_path, title= title)
        
        mdFile.new_line("  [@wikidata:" + wd_id + "]")
        
        mdFile.new_line() 
        if publication_date != "None":
            mdFile.new_line("Publication date : " + str(publication_date))

        mdFile.new_line() 
        mdFile.new_header(1, "Highlights")
        mdFile.new_header(1, "Comments")
        mdFile.new_header(2, "Tags")
        mdFile.new_header(1, "Links")
        mdFile.new_line(" * [Scholia Profile](https://scholia.toolforge.org/work/" + wd_id + ")")
        mdFile.new_line(" * [Wikidata](https://www.wikidata.org/wiki/" + wd_id + ")")
        mdFile.new_line(" * [TABernacle](https://tabernacle.toolforge.org/?#/tab/manual/" + wd_id + "/P921%3BP4510)")
        mdFile.new_line() 
        mdFile.create_md_file()
Esempio n. 24
0
def makeMd():
    main()
    lastDate = 0
    lastMonth = 0
    lastYear = 0
    lastTime = ""
    os.chdir(saveLocation)
    mdOutputLocation = saveLocation + 'logbook.md'
    if os.path.exists(mdOutputLocation): #If file output.txt exists, remove it. If it doesn't, create it
        os.remove(mdOutputLocation)

    mdFile = MdUtils(file_name='logbook',title='My Logbook')
    with open(outputFileLocation, "r") as textDoc:
        lines = textDoc.readlines()
    for line in lines:
        firstTwoDigits = line[:2]
        if firstTwoDigits.isdigit():

            date = line[:2]
            month = line[3:5]
            year = line[6:10]
            lastTime = line[11:19]

            if year != lastYear:
                mdFile.new_header(level=1, title=year)
                lastYear = year

            if month != lastMonth:
                monthWorded = monthToWords(month)
                mdFile.new_header(level=2, title=monthWorded)
                lastMonth = month

            if date != lastDate:
                mdFile.new_header(level=3, title=date)
                lastDate = date
        if (firstTwoDigits.isdigit() == False) and (line != "\n"):
            entryContent = lastTime + " - " + line
            mdFile.new_paragraph(entryContent)
    mdFile.create_md_file()
    print("md file created")
Esempio n. 25
0
from mdutils.mdutils import MdUtils

mdFile = MdUtils(file_name='league01', title='Grundle Me This')
mdFile.new_header(level=2, title='ROS Valuations', add_table_of_contents='n')

list_of_strings = ["Rank", "Player", "Value"]
player = ["AB", "asdf", "asdfad"]
value = [60, 45, 12]
for x in range(3):
    list_of_strings.extend([str(x), player[x], str(value[x])])
mdFile.new_table(columns=3, rows=4, text=list_of_strings, text_align='center')

mdFile.new_header(level=3, title='Your Roster', add_table_of_contents='n')
mdFile.new_header(level=3,
                  title='Available Free Agents',
                  add_table_of_contents='n')

mdFile.new_header(level=2, title='Weekly Tiers', add_table_of_contents='n')
mdFile.new_header(level=3,
                  title='Suggested Starting Lineup',
                  add_table_of_contents='n')
mdFile.new_header(level=3,
                  title='Substitutions Necessary',
                  add_table_of_contents='n')
mdFile.new_header(level=3,
                  title='Skill Streams Available',
                  add_table_of_contents='n')

mdFile.new_header(level=2,
                  title='Weekly Streaming Advice',
                  add_table_of_contents='n')
Esempio n. 26
0
    user_agent="redditcoronav1",
)
subreddit = reddit.subreddit("Coronavirus")
submission = reddit.submission(id="fgy7rg")
submission.comments.replace_more(limit=0)
all_comments = submission.comments
found = 0
mdFile = MdUtils(file_name="test", title="Reddit ama corona virus")
for comment in all_comments:
    if len(comment.replies) > 0:
        for reply in comment.replies:
            if reply.author == "Emergencydocs":
                found = 1
                print(20 * "=")
                print("QUESTION: ")
                mdFile.new_header(level=1, title=comment.body)
                print(comment.body)
                print("ANSWER: ")
                mdFile.new_paragraph(reply.body)
                print(reply.body)
            else:
                continue

mdFile.create_md_file()

# hot_python = subreddit.hot()
# for submission in hot_python:
#     # check for removing pinned posts...
#     if not submission.stickied:
#         print(
#             "Title: {}, ups:{}, downs:{}, Have We visited: {}".format(
Esempio n. 27
0
I've learned that creating something `fun` will help you to retain all the learning steps taken to develop something. \n
During the workflow, we will use a simple bash script to simulate the `roll` and log the output to a text file.\n
That text file will then be stored in the repository and parsed to display at the bottom of this README file. \n
I wanted to demonstrate how utilizing `MdUtils` will make writing Markdown files extremely easy. \n

What I tested during this process: \n
- Saving diceroll.txt to an artifact. \n
- Saving diceroll.txt as a cache/key. \n
- Establishing a lambda call utilizing an AWS API Gateway. \n
In the end, I chose to save the file in the repository to allow anyone to pick up the dice as the shooter. \n.
I'll expand more in my blog but for now, let's start rolling! \n
New Shooooooooooooooooooootaaaaa!!"
"""

# style is set 'atx' format by default.
mdFile.new_header(level=1, title='Overview')
mdFile.new_paragraph(string)

#Overview information
mdFile.new_header(level=1, title='Let\'s Play!')
mdFile.new_paragraph(
    "Welcome to Casino Del Ray. This is a simple game of Craps that incorporates some of the sayings that you would normally hear around the table."
    " If you’ve never played craps, you’re seriously missing out (unless you’re coding, in which case you will inevitably make more $$ doing that in the long run)."
)

mdFile.new_paragraph(
    "**IMPORTANT:** This game will not teach you <ins>strategery</ins> or make you a pro. All bets are off..... (I'm not liable for any real $$ lost at the table)",
    bold_italics_code='bi',
    color='purple')
# How to play
mdFile.new_header(level=1, title="Rules of the Game")
Esempio n. 28
0
diff_messages = list(map(lambda x: x.commit.message.split('\n', 1)[0], diff))

# Delete unwanted commit messages from changelog
unwanted_commits = [
    'update changelog', 'update image tag in docker compose', 'merge branch.*'
]

temp = '(?:% s)' % '|'.join(unwanted_commits)

for message in list(diff_messages):
    if re.match(temp, message.strip().lower()):
        diff_messages.remove(message)

# Update CHANGELOG.md
changelog_new = MdUtils(file_name='')
changelog_new.new_header(level=1, title=new_version)
changelog_new.new_list(diff_messages)
changelog_new.write('  \n')
changelog_before = MdUtils(file_name='').read_md_file(file_name='CHANGELOG.md')
MarkDownFile('/tmp/CHANGELOG.md').rewrite_all_file(
    changelog_before + changelog_new.file_data_text)

changelog_contents = repo.get_contents("/CHANGELOG.md")

with open('/tmp/CHANGELOG.md', 'rb') as f:
    repo.update_file(changelog_contents.path, 'Update CHANGELOG', f.read(),
                     changelog_contents.sha)

# Update docker-compose.yml file
with open('docker-compose/docker-compose.yaml', encoding='UTF-8') as f:
    compose_file = safe_load(f)
Esempio n. 29
0
class RegisterBank(vhdl.BasicVHDL):
    def __init__(self, entity_name, architecture_name, datasize, RegisterNumber):
        vhdl.BasicVHDL.__init__(self, entity_name, architecture_name)
        self.generate_code = False
        self.reg = RegisterList()
        self.datasize = pow(2,math.ceil(math.log(datasize, 2)))
        self.addr_low = math.log(self.datasize/8,2)
        self.addr_increment = math.ceil(pow(2,self.addr_low))
        self.addrsize = math.ceil(math.log(RegisterNumber, 2) + self.addr_low )
        self.useRecords = False

        #aux files
        self.document = MdUtils(file_name="output/"+entity_name, title='Register Bank: %s' % entity_name)
        self.version = datetime.now().strftime("%Y%m%d_%H%m")

        #Companion Package
        self.pkg = pkgvhdl.PkgVHDL(entity_name + "_pkg")
        self.pkg.library.add("IEEE")
        self.pkg.library["IEEE"].package.add("std_logic_1164")
        self.pkg.library["IEEE"].package.add("numeric_std")
        self.pkg.packageDeclaration.constant.add("package_version_c", "String", "\"%s\"" % self.version)

        # Libraries
        self.library.add("IEEE")
        self.library["IEEE"].package.add("std_logic_1164")
        self.library["IEEE"].package.add("numeric_std")
        self.library.add("expert")
        self.library["expert"].package.add("std_logic_expert")
        self.work.add(self.pkg.name)

        # Generics
        self.entity.generic.add("C_S_AXI_ADDR_WIDTH", "integer", str(self.addrsize))
        self.entity.generic.add("C_S_AXI_DATA_WIDTH", "integer", str(self.datasize))

        # Architecture
        # Constant
        self.architecture.constant.add("register_bank_version_c", "String", "\"%s\"" % self.version)
        self.architecture.constant.add("C_S_AXI_ADDR_BYTE", "integer", "(C_S_AXI_DATA_WIDTH/8) + (C_S_AXI_DATA_WIDTH MOD 8)")
        self.architecture.constant.add("C_S_AXI_ADDR_LSB", "integer", str(math.ceil(self.addr_low)))
        self.architecture.constant.add("REG_NUM", "integer", "2**(C_S_AXI_ADDR_WIDTH-C_S_AXI_ADDR_LSB)")
        # Custom type
        self.architecture.customTypes.add("reg_t", "Array", "REG_NUM-1 downto 0", "std_logic_vector(C_S_AXI_DATA_WIDTH-1 downto 0)")
        # Signals
        self.architecture.signal.add("awaddr_s", "std_logic_vector(C_S_AXI_ADDR_WIDTH-1 downto 0)")
        self.architecture.signal.add("awready_s", "std_logic")
        self.architecture.signal.add("wready_s", "std_logic")
        self.architecture.signal.add("wtimeout_sr", "std_logic_vector(15 downto 0)", "( 0 => '1', others=>'0')")
        self.architecture.signal.add("wtimeout_s", "std_logic")

        self.architecture.signal.add("bresp_s", "std_logic_vector(1 downto 0)")
        self.architecture.signal.add("bvalid_s", "std_logic")
        self.architecture.signal.add("bresp_timer_sr", "std_logic_vector(15 downto 0)", "( 0 => '1', others=>'0')")
        self.architecture.signal.add("wtimeout_s", "std_logic")

        self.architecture.signal.add("araddr_s", "std_logic_vector(C_S_AXI_ADDR_WIDTH-1 downto 0)")
        self.architecture.signal.add("arready_s", "std_logic")

        self.architecture.signal.add("rresp_s", "std_logic_vector(1 downto 0)")
        self.architecture.signal.add("rvalid_s", "std_logic")
        self.architecture.signal.add("rtimeout_sr", "std_logic_vector(15 downto 0)", "( 0 => '1', others=>'0')")
        self.architecture.signal.add("rtimeout_s", "std_logic")

        self.architecture.signal.add("regwrite_s", "reg_t", "(others=>(others=>'0'))")
        self.architecture.signal.add("regread_s", "reg_t", "(others=>(others=>'0'))")
        self.architecture.signal.add("regclear_s", "reg_t", "(others=>(others=>'0'))")
        self.architecture.signal.add("regset_s", "reg_t", "(others=>(others=>'0'))")

        self.architecture.signal.add("regread_en", "std_logic")
        self.architecture.signal.add("regwrite_en", "std_logic")

        self.architecture.bodyCodeHeader.add("assert register_bank_version_c = package_version_c\r\n" + indent(2) + "report \"Package and Register Bank version mismatch.\"\r\n" + indent(2) + "severity warning;\r\n")

        for lines in TemplateCode.splitlines():
            self.architecture.bodyCodeHeader.add(lines)

        self._resetPort()
        self._resetArchBodyFooter()

    # PRIVATE API
    def _resetArchBodyFooter(self):
        self.architecture.bodyCodeFooter = vhdl.GenericCodeBlock()

    def _resetPort(self):
        self.entity.port = vhdl.PortList()
        self.entity.port.add("S_AXI_ACLK", "in", "std_logic")
        self.entity.port.add("S_AXI_ARESETN", "in", "std_logic")
        self.entity.port.add("S_AXI_AWADDR", "in", "std_logic_vector(C_S_AXI_ADDR_WIDTH-1 downto 0)")
        self.entity.port.add("S_AXI_AWPROT", "in", "std_logic_vector(2 downto 0)")
        self.entity.port.add("S_AXI_AWVALID", "in", "std_logic")
        self.entity.port.add("S_AXI_AWREADY", "out", "std_logic")
        self.entity.port.add("S_AXI_WDATA", "in", "std_logic_vector(C_S_AXI_DATA_WIDTH-1 downto 0)")
        self.entity.port.add("S_AXI_WSTRB", "in", "std_logic_vector((C_S_AXI_DATA_WIDTH/8)-1 downto 0)")
        self.entity.port.add("S_AXI_WVALID", "in", "std_logic")
        self.entity.port.add("S_AXI_WREADY", "out", "std_logic")
        self.entity.port.add("S_AXI_BRESP", "out", "std_logic_vector(1 downto 0)")
        self.entity.port.add("S_AXI_BVALID", "out", "std_logic")
        self.entity.port.add("S_AXI_BREADY", "in", "std_logic")
        self.entity.port.add("S_AXI_ARADDR", "in", "std_logic_vector(C_S_AXI_ADDR_WIDTH-1 downto 0)")
        self.entity.port.add("S_AXI_ARPROT", "in", "std_logic_vector(2 downto 0)")
        self.entity.port.add("S_AXI_ARVALID", "in", "std_logic")
        self.entity.port.add("S_AXI_ARREADY", "out", "std_logic")
        self.entity.port.add("S_AXI_RDATA", "out", "std_logic_vector(C_S_AXI_DATA_WIDTH-1 downto 0)")
        self.entity.port.add("S_AXI_RRESP", "out", "std_logic_vector(1 downto 0)")
        self.entity.port.add("S_AXI_RVALID", "out", "std_logic")
        self.entity.port.add("S_AXI_RREADY", "in", "std_logic")
        for local_port in list(self.entity.port.keys()):
            self.entity.port[local_port].assign(local_port)

    def _registerPortAdd(self):
        if self.useRecords:
            self.entity.port.add(self.entity.name+"_i","in",self.entity.name+"_i_t")
            self.entity.port.add(self.entity.name+"_o","out",self.entity.name+"_o_t")
        else:
            for register_num, register_word in self.reg.items():
                if isinstance(register_word,RegisterWord):
                    for index, register in register_word.items():
                        if (isinstance(register,RegisterBit) or isinstance(register,RegisterSlice)):
                            register.updatePort()
                            for port_name, port_data in register.port.items():
                                self.entity.port.append(port_data)
                                self.entity.port[port_name].assign(port_name)


    def _registerConnection(self):
        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "--Register Connection")
        for register_num, register_word in self.reg.items():
            for index, register in register_word.items():
                if isinstance(register, RegisterBit) or isinstance(register, RegisterSlice):
                    if register.size == 1:
                        vectorRange = str(index)
                    else:
                        vectorRange = "%d downto %d" % (index+register.size-1, index)
                        register.name = register.name+"(%d downto 0)" % (register.size-1)

                    if "ReadOnly" in register.regType:
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regread_s(%d)(%s) <= %s;" % (register_num, vectorRange, register.vhdlName))

                    elif "SplitReadWrite" in register.regType:
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "%s <= regwrite_s(%d)(%s);" %
                                                             (register.inv_vhdlName, register_num, vectorRange))
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regread_s(%d)(%s) <= %s;" %
                                                             (register_num, vectorRange, register.vhdlName))

                    elif "ReadWrite" in register.regType:
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "%s <= regwrite_s(%d)(%s);" % (register.vhdlName, register_num, vectorRange))
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regread_s(%d)(%s) <= regwrite_s(%d)(%s);" %
                                                             (register_num, vectorRange, register_num, vectorRange))
                    elif "Write2Clear" in register.regType:
                        # self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regread_s(%d)(%s) <= %s;" % (register_num,vectorRange,register.name))
                        pass

                    elif "Write2Pulse" in register.regType:
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "%s <= regwrite_s(%d)(%s);" % (register.vhdlName, register_num, vectorRange))

        self.architecture.bodyCodeFooter.add("\r\n")

    def _registerSetConnection(self):
        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "--Set Connection for Write to Clear")
        for reg_num, register_word in self.reg.items():
            for index, register in register_word.items():
                if isinstance(register, RegisterBit) or isinstance(register, RegisterSlice):
                    if register.size == 1:
                        vectorRange = str(index)

                    else:
                        vectorRange = "%d downto %d" % (index+register.size-1, index)

                    if register.regType == "Write2Clear":
                        self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regset_s(%d)(%s) <= %s;" % (reg_num, vectorRange, register.vhdlName))

        self.architecture.bodyCodeFooter.add("")

    def _registerClearConnection(self):
            self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "--External Clear Connection")
            for register_num, register_word in self.reg.items():
                for index, register in register_word.items():
                    if (isinstance(register,RegisterBit) or isinstance(register,RegisterSlice)):
                        if self.useRecords:
                            clearname = register.clearName
                        else:
                            clearname = register.clearName + "_i"

                        if register.size == 1:
                            defaultvalue = "'1'"
                            elsevalue = "'0'"
                            vectorRange = str(index)
                        else:
                            vectorRange = "%d downto %d" % (index+register.size-1, index)
                            tmp = register.vhdlRange.replace("(","")
                            tmp = tmp.replace(")","")
                            defaultvalue = "(%s => '1')" % tmp
                            elsevalue = "(%s => '0')" % tmp

                        if ( register.regType == "Write2Clear" or register.regType == "Write2Pulse"):
                            elsevalue = "regwrite_s(%d)(%s)" % (register_num, vectorRange)
                        if register.externalClear:
                            self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regclear_s(%d)(%s) <= %s when %s = '1' else %s;" %
                                                                (register_num, vectorRange, defaultvalue, clearname, elsevalue))
                        elif ( register.regType == "Write2Clear" or register.regType == "Write2Pulse"):
                            self.architecture.bodyCodeFooter.add(vhdl.indent(1) + "regclear_s(%d)(%s) <= %s;" % (register_num, vectorRange, elsevalue) )


    def _generate(self):
        if (not self.generate_code):
            self._resetPort()
            self._resetArchBodyFooter
            self._registerPortAdd()
            self._registerConnection()
            self._registerSetConnection()
            self._registerClearConnection()
            self.pkg.packageDeclaration.component.append(self.dec_object())
            self.generate_code = True


    # PUBLIC API
    def add(self, number, name):
        self.generate_code = False
        self.reg.add(number, RegisterWord(name, self.datasize))


    def SetPortAsRecord(self):
        self.generate_code = False
        self.useRecords = True
        out_record_name = self.entity.name+"_o_t"
        in_record_name = self.entity.name+"_i_t"
        self.pkg.packageDeclaration.customTypes.add(out_record_name, "Record")
        self.pkg.packageDeclaration.customTypes.add(in_record_name, "Record")
        for reg_num, register_word in self.reg.items():
            for index, register in register_word.items():
                if (isinstance(register,RegisterBit) or isinstance(register,RegisterSlice)):
                    for j in register.port:
                        if register.externalClear:
                            self.pkg.packageDeclaration.customTypes[in_record_name].add(register.clearName,"std_logic")
                            self.reg[reg_num][index].clearName = self.entity.name+"_i."+register.clearName

                        if "SplitReadWrite" in register.regType:
                            self.pkg.packageDeclaration.customTypes[out_record_name].add(register.name,register.vhdlType)
                            self.pkg.packageDeclaration.customTypes[in_record_name].add(register.name,register.vhdlType)
                            self.reg[reg_num][index].vhdlName = self.entity.name+"_i."+register.name
                            self.reg[reg_num][index].inv_vhdlName = self.entity.name+"_o."+register.name
                        elif "out" in register.direction:
                            self.pkg.packageDeclaration.customTypes[out_record_name].add(register.name,register.vhdlType)
                            self.reg[reg_num][index].vhdlName = self.entity.name+"_o."+register.name
                        else:
                            self.pkg.packageDeclaration.customTypes[in_record_name].add(register.name,register.vhdlType)
                            self.reg[reg_num][index].vhdlName = self.entity.name+"_i."+register.name

    def code(self):
        self._generate()
        return vhdl.BasicVHDL.code(self)

    def write_package(self):
        self.pkg.write_file()

    def write_document(self):
        self._generate()

        if (not os.path.exists("output")):
            os.makedirs("output")

        self.document.new_header(1, "Details")
        self.document.new_line("Data Width: %d" % self.datasize)
        self.document.new_line("Number of registers: %d" % len(self.reg))
        self.document.new_line("Version: v%s" % self.version)
        self.document.new_line("Register Bank auto-generated using the hdltools/regbank_gen.py")
        self.document.new_line()
        self.document.new_header(1, "List of Registers")
        self.document.new_line()
        for index in self.reg:
            register = self.reg[index]
            self.document.new_header(2, "Register %d: %s" % (index, register.name))
            self.document.new_line("Address: BASE + 0x%x" % index)
            if (register.description):
                self.document.new_line("Description: %s" % register.description)
            self.document.new_line()
            list_of_strings = ["Bit", "Field", "Type", "Reset", "Description"]
            numOfRows = 1
            for bit in register:
                if isinstance(register[bit], RegisterBit):
                    numOfRows = numOfRows + 1
                    if register[bit].size > 1:
                        range = "%d-%d" % (bit+register[bit].size-1, bit)
                    else:
                        range = "%d" % (bit)
                    field = register[bit].radix
                    type = register[bit].regType
                    # Default value
                    init = register[bit].init
                    if init == "(others => '0')" or init == "'0'":
                        init = "0x0"
                    elif init == "'1'":
                        init = "1"
                    else:
                        init = "0" + init.replace("\"", "")
                    # decription
                    description = register[bit].description
                    list_of_strings.extend([range, field, type, init, description])
            self.document.new_table(columns=5, rows=numOfRows, text=list_of_strings, text_align='center')
        self.document.new_line()
        self.document.new_line("hdltools available at https://github.com/rftafas/hdltools.")
        self.document.create_md_file()

    def write_header(self):
        self._generate()
        if (not os.path.exists("output")):
            os.makedirs("output")

        header_code = ""
        header_code = header_code + "#ifndef %s_H\n\r" % (self.entity.name.upper())
        header_code = header_code + "#define %s_H\n\r" % (self.entity.name.upper())

        header_code = header_code + "\n\r"
        header_code = header_code + "/*This auto-generated header file was created using hdltools. File version:*/\n\r"
        header_code = header_code + "#define %s_VERSION \"%s\"\n\r" % (self.entity.name.upper(), self.version)
        header_code = header_code + "\n\r"

        for index in self.reg:
            register = self.reg[index]
            header_code = header_code + "/*Register %s address */\n\r" % register.name
            header_code = header_code + "#define %s_OFFSET 0x%x\n\r" % (register.name.upper(), index)
            for bit in register:
                if isinstance(register[bit], RegisterBit):
                    header_code = header_code + "/*Register %s field %s */\n\r" % (register.name, register[bit].radix)
                    fieldName = register.name.upper() + "_" + register[bit].radix.upper()
                    header_code = header_code + "#define %s_FIELD_OFFSET %d\n\r" % (fieldName, bit)
                    header_code = header_code + "#define %s_FIELD_WIDTH %d\n\r" % (fieldName, register[bit].size)
                    # compute field mask
                    mask = 0
                    for i in range(bit, bit + register[bit].size):
                        mask = mask + 2**i
                    header_code = header_code + "#define %s_FIELD_MASK %s\n\r" % (fieldName, hex(mask))
                    # Field default value
                    if register[bit].init == "(others => '0')" or register[bit].init == "'0'":
                        header_code = header_code + "#define %s_RESET %s\n\r" % (fieldName, "0x0")
                    else:
                        header_code = header_code + "#define %s_RESET 0%s\n\r" % (fieldName, register[bit].init.replace("\"", ""))
            header_code = header_code + "\n\r"

        header_code = header_code + "\n\r"

        output_file_name = "output/"+self.entity.name+".h"
        # to do: check if file exists. If so, emit a warning and
        # check if must clear it.
        output_file = open(output_file_name, "w+")
        for line in header_code:
            output_file.write(line)

        output_file.close()

    def write_testbench(self):
        self._generate()
        testbench = vhdl.BasicVHDL(self.entity.name+"_tb","simulation")
        testbench.entity.generic.add("runner_cfg", "string", "")
        testbench.entity.generic.add("run_time", "integer", "100")
        testbench.library = self.library
        testbench.library.add("std")
        testbench.library["std"].package.add("textio")
        testbench.library.add("vunit_lib")
        testbench.library["vunit_lib"].context.add("vunit_context")
        testbench.library["vunit_lib"].context.add("vc_context")
        testbench.work.add(self.pkg.name)

        for index, generic in self.entity.generic.items():
            testbench.architecture.constant.add(index,generic.type,generic.value)

        testbench.architecture.constant.add("axi_handle","bus_master_t","new_bus(data_length => C_S_AXI_DATA_WIDTH, address_length => C_S_AXI_ADDR_WIDTH)")
        testbench.architecture.constant.add("addr_increment_c","integer",str(self.addr_increment))

        for port in self.entity.port:
            testbench.architecture.signal.add(port,self.entity.port[port].type)
        # set starting value to clock. All other signals should be handled by reset.
        testbench.architecture.signal["S_AXI_ACLK"].value = "'0'"


        testbench.architecture.instances.add("axi_master_u","entity vunit_lib.axi_lite_master")
        testbench.architecture.instances["axi_master_u"].generic.add("bus_handle","","axi_handle")

        for local_port in list(vunitPort.keys()):
            testbench.architecture.instances["axi_master_u"].port.add(local_port,"","")
            testbench.architecture.instances["axi_master_u"].port[local_port].assign(vunitPort[local_port])

        testbench.architecture.instances.append(self.instanciation("dut_u"))
        read_only = vhdl.GenericCodeBlock()
        read_write = vhdl.GenericCodeBlock()
        split_read_write = vhdl.GenericCodeBlock()
        write_to_clear = vhdl.GenericCodeBlock()
        write_to_pulse = vhdl.GenericCodeBlock()
        external_clear = vhdl.GenericCodeBlock()

        for reg_number, register_word in self.reg.items():
            reg_address = reg_number * self.addr_increment

            for index, register in register_word.items():
                if not (isinstance(register,RegisterBit) or isinstance(register,RegisterSlice)):
                    continue

                if register.size == 1:
                    tb_value = random_bit()
                    vector_location = "(%s)" % index
                    all_one = "'1'"
                    all_zero = "'0'"

                else:
                    tb_value = random_vector(register.size)
                    vector_location = "(%s downto %s)" % (register.size + index - 1, index)
                    all_one = "(others=>'1')"
                    all_zero = "(others=>'0')"

                if register.regType == "ReadOnly":
                    testbench.architecture.bodyCodeFooter.add(vhdl.indent(1) + "--Read Only: %s;" % register.vhdlName)
                    testbench.architecture.bodyCodeFooter.add(vhdl.indent(1) + "%s <= %s;" % (register.vhdlName, tb_value))
                    read_only.add(vhdl.indent(1) + "--Testing %s" % register.vhdlName)
                    read_only.add(vhdl.indent(1) + "read_bus(net,axi_handle,%d,rdata_v);" % reg_address )
                    read_only.add(vhdl.indent(1) + "check_equal(rdata_v%s,%s,result(\"Test Read: %s.\"));" % ( vector_location, register.vhdlName, register.vhdlName ))

                if register.regType == "ReadWrite":
                    tb_value = random_vector(self.datasize)
                    read_write.add(vhdl.indent(1) + "--Testing %s" % register.vhdlName)
                    read_write.add(vhdl.indent(1) + "rdata_v := %s;" % tb_value)
                    read_write.add(vhdl.indent(1) + "write_bus(net,axi_handle,%d,rdata_v,%s);" % (reg_address, register.byte_enable) )
                    read_write.add(vhdl.indent(1) + "read_bus(net,axi_handle,%d,rdata_v);" % reg_address )
                    read_write.add(vhdl.indent(1) + "check_equal(%s,rdata_v%s,result(\"Test Readback and Port value: %s.\"));" % ( register.vhdlName, vector_location, register.vhdlName ))

                if register.regType == "SplitReadWrite":
                    testbench.architecture.bodyCodeFooter.add(vhdl.indent(1) + "--Split Read and Write: %s;" % register.vhdlName)
                    testbench.architecture.bodyCodeFooter.add(vhdl.indent(1) + "%s <= %s;" % (register.vhdlName, tb_value))
                    split_read_write.add(vhdl.indent(1) + "--Testing %s" % register.vhdlName)
                    split_read_write.add(vhdl.indent(1) + "read_bus(net,axi_handle,%d,rdata_v);" % reg_address )
                    split_read_write.add(vhdl.indent(1) + "check_equal(rdata_v%s,%s,result(\"Test Read: %s.\"));" % ( vector_location, register.vhdlName, register.vhdlName ))
                    tb_value = random_vector(self.datasize)
                    split_read_write.add(vhdl.indent(1) + "--Testing %s" % register.inv_vhdlName)
                    split_read_write.add(vhdl.indent(1) + "rdata_v := %s;" % tb_value )
                    split_read_write.add(vhdl.indent(1) + "write_bus(net,axi_handle,%d,rdata_v,%s);" % (reg_address, register.byte_enable) )
                    split_read_write.add(vhdl.indent(1) + "wait for 1 us;")
                    split_read_write.add(vhdl.indent(1) + "check_equal(%s,rdata_v%s,result(\"Test Read: %s.\"));" % ( register.inv_vhdlName, vector_location, register.inv_vhdlName ))

                if register.regType == "Write2Clear":
                    write_to_clear.add(vhdl.indent(1) + "--Testing %s: Set to %s" % (register.vhdlName, all_one) )
                    write_to_clear.add(vhdl.indent(1) + "%s <= %s;" % (register.vhdlName, all_one))
                    write_to_clear.add(vhdl.indent(1) + "wait until rising_edge(S_AXI_ACLK);")
                    write_to_clear.add(vhdl.indent(1) + "%s <= %s;" % (register.vhdlName, all_zero))
                    write_to_clear.add(vhdl.indent(1) + "wait until rising_edge(S_AXI_ACLK);")
                    write_to_clear.add(vhdl.indent(1) + "read_bus(net,axi_handle,%d,rdata_v);" % reg_address )
                    write_to_clear.add(vhdl.indent(1) + "check(rdata_v%s = %s,result(\"Test Read Ones: %s.\"));" % ( vector_location, tb_value.replace('0','1'), register.vhdlName ))
                    write_to_clear.add(vhdl.indent(1) + "rdata_v := (others=>'0');" )
                    write_to_clear.add(vhdl.indent(1) + "rdata_v%s := %s;" % (vector_location, all_one) )
                    write_to_clear.add(vhdl.indent(1) + "write_bus(net,axi_handle,%d,rdata_v,%s);" % (reg_address, register.byte_enable) )
                    write_to_clear.add(vhdl.indent(1) + "read_bus(net,axi_handle,%d,rdata_v);" % reg_address )
                    write_to_clear.add(vhdl.indent(1) + "check(rdata_v%s = %s,result(\"Test Read Zeroes: %s.\"));" % ( vector_location, tb_value.replace('1','0'), register.vhdlName ))

                if register.regType == "Write2Pulse":
                    write_to_pulse.add(vhdl.indent(1) + "--Testing %s" % register.vhdlName)
                    write_to_pulse.add(vhdl.indent(1) + "rdata_v%s := %s;" % (vector_location, all_one) )
                    write_to_pulse.add(vhdl.indent(1) + "write_bus(net,axi_handle,%d,rdata_v,%s);" % (reg_address, register.byte_enable) )
                    write_to_pulse.add(vhdl.indent(1) + "wait until %s = %s;" % (register.vhdlName, tb_value.replace("0","1")) )

        read_only.add(vhdl.indent(1) + "check_passed(result(\"Read Out Test Pass.\"));")
        read_write.add(vhdl.indent(1) + "check_passed(result(\"Read and Write Test Pass.\"));")
        split_read_write.add(vhdl.indent(1) + "check_passed(result(\"Split Read Write Test Pass.\"));")
        write_to_clear.add(vhdl.indent(1) + "check_passed(result(\"Write to Clear Test Pass.\"));")
        write_to_pulse.add(vhdl.indent(1) + "check_passed(result(\"Write to Pulse Test Pass.\"));")
        external_clear.add(vhdl.indent(1) + "check_passed(result(\"External Clear Test Pass.\"));")

        new_tb_code = testBenchCode.replace("--read_only_tag",read_only.code(4))
        new_tb_code = new_tb_code.replace("--read_write_tag",read_write.code(4))
        new_tb_code = new_tb_code.replace("--split_read_write_tag",split_read_write.code(4))
        new_tb_code = new_tb_code.replace("--write_to_clear_tag",write_to_clear.code(4))
        new_tb_code = new_tb_code.replace("--write_to_pulse_tag",write_to_pulse.code(4))
        new_tb_code = new_tb_code.replace("--external_clear_tag",external_clear.code(4))

        testbench.architecture.bodyCodeHeader.add(new_tb_code)

        testbench.write_file()


    def write_script(self):
        tmpScript = templateScript.replace("<name>",self.entity.name)

        if (not os.path.exists("output")):
            os.makedirs("output")
        output_file_name = "output/"+self.entity.name+"_run.py"
        # to do: check if file exists. If so, emit a warning and
        # check if must clear it.
        output_file = open(output_file_name, "w+")
        for line in tmpScript:
            output_file.write(line)

        output_file.close()

    def write_file(self):
        self._generate()
        vhdl.BasicVHDL.write_file(self)
        return True

    def __call__(self):
        self.write_file()
        self.write_package()
        self.write_testbench()
        self.write_script()
        self.write_header()
        self.write_document()
Esempio n. 30
0
 def _write_command_usage(cls, file: MdUtils, lines: List[str]):
     file.new_header(level=3, title='Usage', add_table_of_contents='n')
     main_lines = ''.join([f'    {line}\n' for line in lines[1:]])
     file.write(f'```bash\n{lines[0]}\n{main_lines}```')