def _add_overview(self, doc: Document, title: str = 'Overview') -> None: """Add overview section to document. :param doc: :param title: :return: """ with doc.create(Section(title)): doc.append('Overview of kernel search results.') doc.append("\n") doc.append(VerticalSpace("10pt")) doc.append(LineBreak()) best_kern_short = str(self.best_gp_model) best_kern_long = self.best_gp_model.covariance.infix_full with doc.create(MiniPage()): doc.append(bold("Best Kernel:")) doc.append("\n") doc.append(VerticalSpace("1pt")) doc.append(LineBreak()) doc.append(italic("Short Form:")) doc.append("\n") doc.append(best_kern_short) doc.append("\n") doc.append(VerticalSpace("2.5pt")) doc.append(LineBreak()) doc.append(italic("Long Form:")) doc.append("\n") doc.append(best_kern_long)
def print_workout(doc, lift, one_rm, no, accessorys): """Print one workout routine. Arguments: lift one_rm no accessorys Returns: One workout """ doc.append(VerticalSpace("20pt")) doc.append(LineBreak()) main_lift = gen_main_lift(lift, one_rm, no) access = gen_accessory_table(accessorys) with doc.create(MiniPage(width=r"0.5\textwidth")): doc.append(bold('Main Lift\n\n')) doc.append(main_lift) doc.append(VerticalSpace("20pt")) doc.append(LineBreak()) doc.append(bold('Accessory Exercises\n\n')) doc.append(access) # doc.append(main_lift) doc.append(VerticalSpace("20pt")) doc.append(LineBreak()) return doc
def append_solution(self, solution_id, problem, solution): with self.doc.create(Multicols(arguments=[2])): image_filename = os.path.join( os.path.dirname('.'), 'images/solution_%d.png' % solution_id) with self.doc.create( Section('Problem with solution %d' % solution_id, label="problem%d" % solution_id)): with self.doc.create( Subsection('TSNE representation', label="tsne%d" % solution_id)): with self.doc.create(Figure(position='H')) as tsne: tsne.add_image(image_filename, width=NoEscape(r'11cm')) tsne.add_caption('TSNE 2d feature space') self.doc.append(VerticalSpace(NoEscape(r"\fill"))) with self.doc.create( Subsection('Problem and Solution', label="solution%d" % solution_id)): self.doc.append(problem) with self.doc.create(Verbatim()): self.doc.append(solution) self.doc.append(VerticalSpace(NoEscape(r"\fill"))) self.doc.append(NewPage()) return self.doc
def append_liquor_list(doc, df, own_page): # TODO no interaction with dataframe? kinds = df[df.Category.isin(['Spirit', 'Vermouth', 'Liqueur'])][['Kind', 'Type']] if own_page: print "Appending list as new page" doc.append(NewPage()) listing = SamepageEnvironment() block = Center() if not own_page: block.append(HRuleFill()) block.append(Command('\\')) block.append(VerticalSpace('16pt')) block.append(TitleText("Included Ingredients")) block.append(Command('\\')) listing.append(block) listing.append(VerticalSpace('12pt')) cols = add_paracols_environment(listing, 2, '8pt', sloppy=False) with cols.create(FlushRight()): for item in kinds.Kind: cols.append(LargeText(item)) cols.append(Command('\\')) cols.append(Command('switchcolumn')) with cols.create(FlushLeft()): for item in kinds.Type: cols.append(LargeText(italic(item))) cols.append(Command('\\')) doc.append(listing)
def _generate_document(self): legend = self.create_unit_legend() node_legend, node_legend2 = self.create_platform_unit_legend() self.doc.append(legend) self.doc.append(VerticalSpace("10pt")) self.doc.append(LineBreak()) self.doc.append(node_legend) self.doc.append(VerticalSpace("10pt")) self.doc.append(LineBreak()) self.doc.append(node_legend2) for section in self.sections.values(): self.doc.append(section)
def add_contig_info(self): if self.analysis.contig_info is None: return for method in ['ONT', 'Illumina']: if not method in self.analysis.contig_info.index: continue with self.doc.create( Subsection('Assembly coverage by ' + method, numbering=False)): table_format = 'l' * self.analysis.contig_info[method].shape[1] self.doc.append('') with self.doc.create(Tabular(table_format)) as table: table.add_row(('Contig', 'Length (bp)', 'Coverage (X)')) table.add_hline() formatted = self.analysis.contig_info[method].copy() formatted.iloc[:, 1] = formatted.iloc[:, 1].apply( lambda x: '{:,}'.format(x)) for i in range(self.analysis.contig_info[method].shape[0]): table.add_row(formatted.iloc[i, :].values.tolist()) self.doc.append(LineBreak()) self.doc.append(VerticalSpace("10pt"))
def test_position(): repr(HorizontalSpace(size='20pt', star=False)) repr(VerticalSpace(size="20pt", star=True)) # Test alignment environments center = Center() center.append("append") repr(center) right = FlushRight() right.append("append") repr(right) left = FlushLeft() left.append("append") repr(left) minipage = MiniPage(width=r"\textwidth", height="10pt", pos='t', align='r', content_pos='t', fontsize="Large") minipage.append("append") repr(minipage) textblock = TextBlock(width="200", horizontal_pos="200", vertical_pos="200", indent=True) textblock.append("append") textblock.dumps() repr(textblock)
def generate_first_page(schema: PLDSchema, document: Document) -> Document: document.append(LargeText(Command("maketitle"))) document.append(VerticalSpace("4cm")) with document.create(Center()) as center: center: Center center.append(LargeText(bold(schema.subtitle))) return document
def add_features(self): if len(self.analysis.feature_hits) == 0: return self.doc.append(NewPage()) with self.doc.create(Section(self.feature_title, numbering=False)): for feature_name in self.analysis.feature_hits.index.tolist(): features = self.analysis.feature_hits[feature_name].copy() if features.shape[0] == 0: continue features.iloc[:, 1] = features.iloc[:, 1].apply( lambda x: '{:,}'.format(x)) features.iloc[:, 2] = features.iloc[:, 2].apply( lambda x: '{:,}'.format(x)) table_format = 'l' * (features.shape[1] - 1) with self.doc.create(Subsection(feature_name, numbering=False)): if (features.shape[0] == 0): self.doc.append('None') continue for contig in pd.unique(features.iloc[:, 0]): self.doc.append(contig) contig_features = features.loc[( features.iloc[:, 0] == contig), :] with self.doc.create(Tabular(table_format)) as table: table.add_row(('Start', 'Stop', 'Feature', 'Identity (%)', 'Strand')) table.add_hline() for i in range(contig_features.shape[0]): feature = contig_features.iloc[i, :].copy( deep=True) feature[4] = '{:.3f}'.format(feature[4]) table.add_row(feature[1:].values.tolist()) self.doc.append(LineBreak()) self.doc.append(VerticalSpace("10pt")) method = 'The genome assembly was queried for features using blastn (v ' + self.analysis.versions['blastn'] + '). ' + \ 'Feature hits were clustered using bedtools (v ' + self.analysis.versions['bedtools'] + ') ' + \ 'and the highest scoring hit for each cluster was reported.' self.methods[self.feature_methods_title] = self.methods[ self.feature_methods_title].append(pd.Series(method))
def add_assembly_notes(self): if len(self.analysis.assembly_notes) == 0: return with self.doc.create( Subsection(self.assembly_notes_title, numbering=False)): left = FlushLeft() for _, note in self.analysis.assembly_notes.iteritems(): left.append(note) left.append(LineBreak()) self.doc.append(left) self.doc.append(VerticalSpace("10pt"))
def discover_experiment_data(self, experiment_name, experiment_type, tasks, task_counts, nodes: List[Node], description, start_time): if experiment_name not in self.sections.keys(): self.sections[experiment_name] = Section(experiment_name) self.sections[experiment_name].append(description) self.sections[experiment_name].append( '\nExperiment start time: {}'.format( datetime.fromtimestamp(start_time))) if experiment_type not in self.experiment_types: self.experiment_types.append(experiment_type) workloads_results = Subsection('') # create table with results table = self.create_table() for task in tasks: task_name = self._strip_task_name(task) task_count = task_counts[task_name] average_latency, average_throughput, q09_latency, q09_throughput,\ numa_nodes, mbw_local, mbw_remote = self.get_metrics(tasks[task]) table.add_row( (tasks[task].name.replace('default/', ''), average_latency, average_throughput, q09_latency, q09_throughput, numa_nodes[0], numa_nodes[1], numa_nodes[2], numa_nodes[3], mbw_local, mbw_remote)) table.add_hline() self._keep_task_results(task, task_name, task_count, experiment_type, average_latency, average_throughput, q09_latency, q09_throughput) # create table with node metrics node_table = self.create_nodes_table() for node in nodes: for socket in [0, 1]: row = [node.name, socket] for metric in metrics.platform_metrics: row.append( self.round_metric( float( node.performance_metrics[socket][metric.name]) / float(metrics.MetricLegends[metric]['helper']))) node_table.add_row(row) node_table.add_hline() workloads_results.append(table) workloads_results.append(VerticalSpace("10pt")) workloads_results.append(LineBreak()) workloads_results.append(node_table) self.sections[experiment_name].append(workloads_results)
def add_contamination(self): if len(self.analysis.kraken_fracs) == 0: return self.doc.append(NewPage()) with self.doc.create(Section('Contamination check', numbering=False)): for read_type, kraken_fracs in self.analysis.kraken_fracs.iteritems( ): left = FlushLeft() left.append(read_type + ' classifications') left.append(VerticalSpace('5pt')) self.doc.append(left) with self.doc.create( Tabular(''.join(['l'] * kraken_fracs.shape[1]), width=kraken_fracs.shape[1])) as table: table.add_row( ('Percent of reads', 'Reads', 'Level', 'Label')) table.add_hline() for index, row in kraken_fracs.iterrows(): table.add_row(row.tolist()) self.doc.append(LineBreak()) self.doc.append(VerticalSpace('5pt')) if not self.contamination_methods_title in self.methods: self.methods[self.contamination_methods_title] = '' method = 'Kraken2 (' + self.analysis.versions[ 'kraken2'] + ') was used to assign the raw reads into taxa.' self.methods[self.contamination_methods_title] = self.methods[ self.contamination_methods_title].append(pd.Series(method))
def add_ont_library_information(self): if self.analysis.ont_n50 is None: return with self.doc.create( Subsection('ONT library statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row( ('ONT N50', '{:,}'.format(self.analysis.ont_n50))) table.add_row( ('ONT reads', '{:,}'.format(self.analysis.ont_read_count))) table.add_row( ('ONT bases', '{:s}'.format(self.analysis.ont_bases))) self.doc.append(VerticalSpace("10pt"))
def add_features(self): if len(self.report[self.analysis.feature_title]) == 0: return self.doc.append(NewPage()) with self.doc.create( Section(self.analysis.feature_title, numbering=False)): for feature_name in self.report[ self.analysis.feature_title].index.tolist(): features = self.report[ self.analysis.feature_title][feature_name].copy() if features.shape[0] == 0: continue features.iloc[:, 1] = features.iloc[:, 1].apply( lambda x: '{:,}'.format(x)) features.iloc[:, 2] = features.iloc[:, 2].apply( lambda x: '{:,}'.format(x)) table_format = 'l' * (features.shape[1] - 1) with self.doc.create(Subsection(feature_name, numbering=False)): if (features.shape[0] == 0): self.doc.append('None') continue for contig in pandas.unique(features.iloc[:, 0]): self.doc.append(contig) contig_features = features.loc[( features.iloc[:, 0] == contig), :] with self.doc.create(Tabular(table_format)) as table: table.add_row(('Start', 'Stop', 'Feature', 'Identity (%)', 'Strand')) table.add_hline() for i in range(contig_features.shape[0]): feature = contig_features.iloc[i, :].copy( deep=True) feature[4] = '{:.3f}'.format(feature[4]) table.add_row(feature[1:].values.tolist()) self.doc.append(LineBreak()) self.doc.append(VerticalSpace("10pt"))
def add_illumina_library_information(self): if self.analysis.illumina_length_mean is None: return with self.doc.create( Subsection('Illumina library statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row( ('Illumina mean length', '{:.1f}'.format(self.analysis.illumina_length_mean))) table.add_row( ('Illumina reads', '{:,}'.format(self.analysis.illumina_read_count))) table.add_row(('Illumina bases', '{:s}'.format(self.analysis.illumina_bases))) self.doc.append(VerticalSpace("10pt"))
def generate_organigram(schema: PLDSchema, locale: LocaleDictionary, document: Document) -> Document: document.append(NewPage()) with document.create(Figure()) as figure: figure: Figure with figure.create(Section(title=locale.organigram)) as section: section: Section section.append(Command("centering")) with section.create(Center()) as center: center: Center with center.create(TikZ()) as forest: forest: TikZ node_kwargs = {'align': 'center', 'minimum size': '20pt'} # noinspection PyTypeChecker top_box = TikZNode(text=schema.title, handle=f"project-box", options=TikZOptions( 'draw', 'rounded corners', **node_kwargs)) forest.append(top_box) last_box_handle = top_box.handle for n_deliverable, deliverable in enumerate( schema.deliverables, start=1): # noinspection PyTypeChecker box = TikZNode( text=f"{n_deliverable}. {deliverable.name}", handle=f"deliverable-box-{n_deliverable}", options=TikZOptions( 'draw', 'rounded corners', f'below = of {last_box_handle}' if top_box.handle == last_box_handle else f'right = of {last_box_handle}', **node_kwargs)) last_box_handle = box.handle # noinspection PyTypeChecker path = TikZDraw( TikZPathList(top_box.get_anchor_point("south"), "--", box.get_anchor_point("north"))) forest.append(box) forest.append(path) document.append(VerticalSpace("2cm")) return document
def add_assembly_information(self): if self.analysis.genome is None: return with self.doc.create(Subsection('Assembly statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row(('Contigs', len(self.analysis.genome))) genome_size = 0 for i in self.analysis.genome: genome_size += len(i.seq) genome_size = si_format(genome_size, precision=1) table.add_row(('Assembly size', genome_size)) self.doc.append(VerticalSpace("10pt"))
def add_run_information(self): with self.doc.create(Subsection('Run information', numbering=False)): with self.doc.create( Tabular('p{0.15\linewidth}p{0.65\linewidth}', width=2)) as table: table.add_row(('Date', self.analysis.start_time)) if self.analysis.ont_fast5: table.add_row(('ONT FAST5', self.analysis.ont_fast5)) if self.analysis.ont_raw_fastq: table.add_row(('ONT FASTQ', self.analysis.ont_raw_fastq)) if self.analysis.illumina_fastq: table.add_row(('Illumina FASTQ', ', '.join(self.analysis.illumina_fastq))) if self.analysis.genome_fasta: table.add_row(('Assembly', self.analysis.genome_fasta)) if self.analysis.reference_fasta: table.add_row(('Reference', self.analysis.reference_fasta)) self.doc.append(VerticalSpace("10pt"))
def _add_comparison(self, doc: Document, title: str = 'Comparison to Other Models') -> None: """Add comparison sub-section to document. :param doc: :param title: :return: """ with doc.create(Subsection(title)): doc.append( 'This table contains the RMSE of the best model and others.') doc.append("\n") doc.append(VerticalSpace("1pt")) doc.append(LineBreak()) with doc.create(Center()) as centered: with centered.create(Tabu("|c|c|c|c|c|", to="4in")) as data_table: header_row = [ "Best Model", "Linear Regression", "Support Vector Regression", "GP (RBF kernel)", "k-NN Regression" ] data_table.add_row(header_row, mapper=[bold]) data_table.add_hline() rmse_best_model = compute_gpy_model_rmse( self.best_model, self.x_test, self.y_test) rmse_lr = rmse_lin_reg(self.x_train, self.y_train, self.x_test, self.y_test) rmse_svm = rmse_svr(self.x_train, self.y_train, self.x_test, self.y_test) se_rmse = rmse_rbf(self.x_train, self.y_train, self.x_test, self.y_test) knn_rmse = rmse_knn(self.x_train, self.y_train, self.x_test, self.y_test) row = ('%0.3f %0.3f %0.3f %0.3f %0.3f' % (rmse_best_model, rmse_lr, rmse_svm, se_rmse, knn_rmse)).split(' ') data_table.add_row(row)
def generate_labels(): geometry_options = {"margin": "0.5in"} doc = Document(geometry_options=geometry_options) doc.change_document_style("empty") for i in range(10): with doc.create(MiniPage(width=r"0.5\textwidth")): doc.append("Vladimir Gorovikov") doc.append("\n") doc.append("Company Name") doc.append("\n") doc.append("Somewhere, City") doc.append("\n") doc.append("Country") if (i % 2) == 1: doc.append(VerticalSpace("20pt")) doc.append(LineBreak()) doc.generate_pdf("minipage", clean_tex=False)
def _add_model_summary(self, doc: Document, title: str = 'Best Model Summary') -> None: """Add model summary sub-section to document. :param doc: :param title: :return: """ with doc.create(Subsection(title)): doc.append('This table contains various scores of the best model.') doc.append("\n") doc.append(VerticalSpace("1pt")) doc.append(LineBreak()) with doc.create(Center()) as centered: with centered.create(Tabu("|c|c|c|c|c|c|", to="4in")) as data_table: header_row = [ "NLL", "NLL (normalized)", "Mean NLPD", "BIC", "AIC", "PL2" ] data_table.add_row(header_row, mapper=[bold]) data_table.add_hline() nll = -self.best_model.log_likelihood() nll_norm = log_likelihood_normalized(self.best_model) mean_nlpd = np.mean( -self.best_model.log_predictive_density( self.x_test, self.y_test)) aic = AIC(self.best_model) bic = BIC(self.best_model) pl2_score = pl2(self.best_model) row = ('%0.3f %0.3f %0.3f %0.3f %0.3f %0.3f' % (nll, nll_norm, mean_nlpd, bic, aic, pl2_score)).split(' ') data_table.add_row(row)
def padding(self, tex_length): # Space between the header text and line self.__padding = VerticalSpace(tex_length)
def gerar_pdf_certificado(certificado): # Configurações da classe geometry_options = {'landscape': True, 'left': '2cm', 'right': '1cm'} doc = Document(geometry_options=geometry_options, lmodern=False, document_options=['a4paper', 'brazil'], inputenc=None, fontenc=None, font_size='footnotesize') # Pacotes doc.packages.add(Package('microtype')) doc.packages.add(Package('indentfirst')) doc.packages.add(Package('graphicx')) doc.packages.add(Package('calc')) doc.packages.add(Package('fontspec')) options_background = ['scale=1', 'opacity=1', 'angle=0'] doc.packages.add(Package('background', options=options_background)) doc.packages.add(Package('csquotes')) # Configurações (preâmbulo) doc.preamble.append(Command('MakeOuterQuote', '\"')) # coverte aspas automaticamente, sem precisar de `` e '' doc.preamble.append(Command('renewcommand', arguments=[Command('baselinestretch'), '1.5'])) doc.preamble.append(Command('setlength', arguments=[Command('parindent'), NoEscape(r'.35\textwidth')])) doc.preamble.append(Command('setlength', arguments=[Command('parskip'), '0.2cm'])) doc.preamble.append(Command('setlength', arguments=[Command('emergencystretch'), '5pt'])) # Imagem de fundo doc.preamble.append(NoEscape(r'\backgroundsetup{ contents=\includegraphics{modelo-certificado-20.pdf} }')) # Diretório das imagens img_dir = '{}/base/static/img/'.format(BASE_DIR) # necessário barra no final doc.preamble.append(UnsafeCommand('graphicspath', '{{{}}}'.format(img_dir))) # Início do documento doc.append(UnsafeCommand('setmainfont', 'Latin Modern Sans', ['SizeFeatures={Size=16}', 'Ligatures=TeX'])) doc.append(Command('pagestyle', 'empty')) doc.append(Command('BgThispage')) doc.append(VerticalSpace(size='2cm', star=True)) with doc.create(FlushRight()) as fr: fr.append(StandAloneGraphic('titulo-certificado.pdf', 'width=6.5cm')) fr.append(LineBreak()) doc.append(VerticalSpace(size=NoEscape('-1cm'), star=True)) doc.append(Command('Large')) # Usado para o nome dos meses ('%B') locale.setlocale(locale.LC_ALL, 'pt_BR.UTF-8') inicio = certificado.relatorio.periodo_inicio.strftime('%d de %B de %Y').lower() fim = certificado.relatorio.periodo_inicio.strftime('%d de %B de %Y').lower() # TODO: tá faltando coisa texto_principal = r''' Certificamos que \textbf{{{nome}}} atuou como {funcao}, {sob_coordenacao}no período de {inicio} a {fim}, na cidade de Foz do Iguaçu -- Paraná, com a atividade de extensão: "\textbf{{{titulo}}}", com carga horária de {carga_horaria_total} horas. ''' if certificado.funcao.nome == 'Coordenador(a)': sob_coordenacao = '' else: nome_coordenador = certificado.relatorio.projeto_extensao.coordenador.nome_completo sob_coordenacao = r'sob a coordenação de \textbf{{{}}}, '.format(escape_latex(nome_coordenador)) texto_principal = texto_principal.format(nome=escape_latex(certificado.nome), funcao=certificado.funcao.nome.lower(), sob_coordenacao=sob_coordenacao, inicio=inicio, fim=fim, titulo=escape_latex(certificado.relatorio.projeto_extensao.titulo), carga_horaria_total=str(certificado.carga_horaria_total).split('.')[0]) # texto_principal = NoEscape(r''' # Certificamos que \textbf{Adriana de Oliveira Gomes} participou como bolsista do Programa de Apoio a Inclusão Social em Atividades de Extensão -- Convênio No 750/2014 -- Fundação Araucária, Edital 05/2014-PROEX, sob a orientação do (a) professor (a) \textbf{Fernando Amâncio Aragão}, no período de outubro/2014 a setembro/2015, com a atividade de extensão: \textbf{''Atendimento fisioterapêutico para pacientes com sequelas neurológicas baseada em tarefas funcionais.''}, com carga horária de 960 (novecentas e sessenta) horas. # ''') doc.append(NoEscape(texto_principal)) doc.append(VerticalSpace(size='1.5cm', star=True)) doc.append(HorizontalSpace(size='7cm', star=True)) dia = timezone.now().strftime('%d') mes = timezone.now().strftime('%B') ano = timezone.now().strftime('%Y') data = NoEscape(r'Foz do Iguaçu, {} de {} de {}'.format(dia, mes, ano)) largura = Command('widthof', data).dumps() with doc.create(MiniPage(width=largura)) as mini: with mini.create(Center()) as center: center.append(data) center.append(NewLine()) center.append(NewLine()) center.append(NewLine()) center.append('Coordenador do Projeto de Extensão') center.append(NewLine()) center.append(NewLine()) center.append(NewLine()) center.append('Diretor de Centro') os.system('mkdir -p ' + PDF_DIR) filepath = '{}/certificado_{}'.format(PDF_DIR, str(certificado.id)) doc.generate_pdf(filepath, clean_tex=False, compiler=pdfutils.COMPILER, compiler_args=pdfutils.COMPILER_ARGS) return filepath
def make_pdf_report(): """ assume profile available """ pieces = gd.sessionInfo['Pieces'] cntrating = [cr for nm, x, cr, tl in pieces] # we need a (single) temp dir for intermediates. tmpdir = Path(tempfile.gettempdir()) / 'RtcApp' if not tmpdir.is_dir(): tmpdir.mkdir() # subdir if not reportsDir().is_dir(): reportsDir().mkdir() reportfile = reportsDir() / gd.config['Session'] crewname = gd.metaData['CrewName'] geometry_options = { "top": "5mm", "bottom": "5mm", "right": "5mm", "left": "5mm" } doc = Document(documentclass='article', geometry_options=geometry_options, document_options=["12pt"]) doc.preamble.append(NoEscape(r'\usepackage{graphicx}')) # see https://doc.qt.io/qt-5/qml-color.html for colors doc.append(NoEscape(r'\definecolor{aquamarine}{HTML}{7fffd4}')) doc.append(NoEscape(r'\definecolor{gainsboro}{HTML}{dcdcdc}')) # First page with doc.create( Section(f'Boat report {gd.metaData["CrewName"]}', numbering=False)): r = gd.metaData["Rowers"] rwrcnt = gd.sessionInfo['RowerCnt'] if rwrcnt == 1: doc.append('Rowers: ') doc.append(f'{r[0][0]} ') else: doc.append('Rowers from bow: ') for i in range(rwrcnt): doc.append(f'{r[i][0]}, ') doc.append(NewLine()) doc.append(f'Boattype: {gd.metaData["BoatType"]}\n') doc.append(f'Calibration: {gd.metaData["Calibration"]}\n') doc.append(f'Misc: {gd.metaData["Misc"]}\n') doc.append(f'Powerline: {gd.metaData["PowerLine"]}\n') doc.append(f'Venue: {gd.metaData["Venue"]}\n') doc.append(VerticalSpace("5pt")) doc.append(NewLine()) # get table from boat report rows = gd.boattablemodel.rowCount() columns = gd.boattablemodel.columnCount() boattab = 'l|' + ''.join(['r' for i in range(columns - 1)]) + '|' with doc.create(Tabular(boattab)) as table: table.add_hline() row = [] for j in range(columns): index = QAbstractTableModel.index(gd.boattablemodel, 0, j) row.append(str(gd.boattablemodel.data(index))) table.add_row(row, color='aquamarine') table.add_hline() cnt = 0 for i in range(rows): row = [] if i == 0: continue for j in range(columns): index = QAbstractTableModel.index(gd.boattablemodel, i, j) row.append(str(gd.boattablemodel.data(index))) if cnt % 2 == 0: table.add_row(row, color='gainsboro') else: table.add_row(row, color='aquamarine') cnt += 1 table.add_hline() """ table.add_empty_row() table.add_row((4, 5, 6, 7)) """ doc.append(NewPage()) # for the plots fontP = FontProperties() fontP.set_size('xx-small') # Second page with doc.create( Section(f'Boat report {gd.metaData["CrewName"]}', numbering=False)): av = '' filt = '' if gd.averaging: av = 'averaging' if gd.filter: filt = 'filtered' pcs = ['all'] + gd.p_names + ['average'] doc.append(f'Using piece "{pcs[gd.boatPiece]}": {av} {filt}\n') doc.append(VerticalSpace("5pt")) doc.append(NewLine()) sensors = gd.sessionInfo['Header'] fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(nrows=2, ncols=2) ax1.set_title('Speed') ax1.grid(True) ax2.set_title('Acceleration') ax2.grid(True) ax3.set_title('Pitch') ax3.grid(True) ax4.set_title('Accel-Tempo per Piece') ax4.grid(True) piece = gd.boatPiece if piece == 0: for i in range(len(gd.p_names)): ax1.plot(gd.norm_arrays[i, :, sensors.index('Speed')], linewidth=0.6, label=gd.p_names[i]) ax2.plot(gd.norm_arrays[i, :, sensors.index('Accel')], linewidth=0.6, label=gd.p_names[i]) ax3.plot(gd.norm_arrays[i, :, sensors.index('Pitch Angle')], linewidth=0.6, label=gd.p_names[i]) elif piece == 7: speed = np.zeros(gd.norm_arrays[0, :, 1].shape) accel = np.zeros(gd.norm_arrays[0, :, 1].shape) pitch = np.zeros(gd.norm_arrays[0, :, 1].shape) for i in range(len(gd.p_names)): speed += gd.norm_arrays[i, :, sensors.index('Speed')] accel += gd.norm_arrays[i, :, sensors.index('Accel')] pitch += gd.norm_arrays[i, :, sensors.index('Pitch Angle')] ax1.plot(speed / 6, linewidth=0.6, label=gd.p_names[i]) ax2.plot(accel / 6, linewidth=0.6, label=gd.p_names[i]) ax3.plot(pitch / 6, linewidth=0.6, label=gd.p_names[i]) else: i = piece - 1 ax1.plot(gd.norm_arrays[i, :, sensors.index('Speed')], linewidth=0.6, label=gd.p_names[i]) ax2.plot(gd.norm_arrays[i, :, sensors.index('Accel')], linewidth=0.6, label=gd.p_names[i]) ax3.plot(gd.norm_arrays[i, :, sensors.index('Pitch Angle')], linewidth=0.6, label=gd.p_names[i]) pa = [] for i in range(len(gd.p_names)): # accel and tempo per piece d, a = gd.prof_data[i] pa.append((d['Speed'], cntrating[i][1])) pa = list(zip(*pa)) p = [10 * x for x in pa[0]] # ad hoc scaling ax4.scatter(list(range(len(gd.p_names))), p, marker='H', color='green') ax4.scatter(list(range(len(gd.p_names))), pa[1], marker='H', color='blue') ax1.legend(loc='lower right', prop=fontP) plt.tight_layout() tmpfig = tmpdir / gd.config['Session'] plt.savefig(tmpfig) tmpfig = re.sub('\\\\', '/', str(tmpfig)) # for windows doc.append( NoEscape(r'\includegraphics[width=1.0\textwidth]{' + f'{tmpfig}' + r'}')) plt.close(fig) ## Third page doc.append(NewPage()) with doc.create(Section('Crew report', numbering=False)): pcs = gd.p_names + ['average'] doc.append(f'Piece "{pcs[gd.crewPiece]}" used.\n') fig = plt.figure() fig.subplots_adjust(hspace=0.7) gs = fig.add_gridspec(5, 2) ax1 = fig.add_subplot(gs[0:3, :]) ax2 = fig.add_subplot(gs[3:, 0]) ax3 = fig.add_subplot(gs[3:, 1]) ax1.set_title('Gate Angle - GateForceX/Y') ax1.grid(True) ax2.set_title('Stretcher ForceX') ax2.grid(True) ax3.set_title('Power') ax3.grid(True) rcnt = gd.sessionInfo['RowerCnt'] piece = gd.crewPiece if piece < len(gd.prof_data): # a seperate piece, from the tumbler cp = gd.crewPiece d, aa = gd.prof_data[cp] for r in range(rcnt): sns = rowersensors(r) if gd.sessionInfo['ScullSweep'] == 'sweep': i = sns['GateAngle'] j = sns['GateForceX'] k = sns['GateForceY'] else: i = sns['P GateAngle'] j = sns['P GateForceX'] k = sns['P GateForceY'] # stretchers not always present! # k = sns['Stretcher Z'] # todo: create switch to control working in this case ax1.plot(gd.norm_arrays[cp, :, i], gd.norm_arrays[cp, :, j], linewidth=0.6, label=f'R {r+1}') ax1.plot(gd.norm_arrays[cp, :, i], gd.norm_arrays[cp, :, k], linestyle=stippel, linewidth=0.6, label=f'R {r+1}Y') #twee = self.ax2.plot(gd.norm_arrays[gd.crewPiece, :, i], linewidth=0.6, label=f'R {r+1}') ax3.plot([gd.gmin[gd.crewPiece]], [0], marker='v', color='b') ax3.plot([gd.gmax[gd.crewPiece]], [0], marker='^', color='b') # reference curve derived from the stroke sns = rowersensors(rcnt - 1) fmean = d[rcnt - 1]['GFEff'] if gd.sessionInfo['ScullSweep'] == 'sweep': i = sns['GateAngle'] j = sns['GateForceX'] else: i = sns['P GateAngle'] j = sns['P GateForceX'] minpos = min(gd.norm_arrays[cp, :, i]) maxpos = max(gd.norm_arrays[cp, :, i]) minarg = np.argmin(gd.norm_arrays[cp, :, i]) maxarg = np.argmax(gd.norm_arrays[cp, :, i]) fmin = gd.norm_arrays[cp, minarg, j] fmax = gd.norm_arrays[cp, maxarg, j] xstep = (maxpos - minpos) / 20 ystep = (fmin - fmax) / 20 # assume fmin > fmax if gd.sessionInfo['ScullSweep'] == 'sweep': xref = np.array([ minpos, minpos + 0.4 * xstep, minpos + 2 * xstep, minpos + 5 * xstep, minpos + 7 * xstep, minpos + 9 * xstep, minpos + 11 * xstep, minpos + 14 * xstep, minpos + 16 * xstep, minpos + 20 * xstep ]) yref = np.array([ fmin, fmin + 20, 1.1 * fmean, 1.6 * fmean, 1.65 * fmean, 1.7 * fmean, 1.6 * fmean, 1.25 * fmean, 0.8 * fmean, fmax ]) else: xref = np.array([ minpos, minpos + 0.4 * xstep, minpos + 2 * xstep, minpos + 5 * xstep, minpos + 7 * xstep, minpos + 9 * xstep, minpos + 11 * xstep, minpos + 14 * xstep, minpos + 16 * xstep, minpos + 20 * xstep ]) yref = np.array([ fmin, fmin + 20, 1.1 * fmean, 1.6 * fmean, 1.65 * fmean, 1.7 * fmean, 1.6 * fmean, 1.25 * fmean, 0.8 * fmean, fmax ]) curveref = make_interp_spline(xref, yref, 2) xrefnew = np.linspace(min(xref), max(xref), int(maxpos - minpos)) ax1.plot(xrefnew, curveref(xrefnew), color='black', linewidth=0.5, linestyle=(0, (3, 6))) ax3.plot(aa[0 + r], linewidth=0.6, label=f'R {r+1}') else: # average for r in range(rcnt): sns = rowersensors(r) if gd.sessionInfo['ScullSweep'] == 'sweep': i = sns['GateAngle'] j = sns['GateForceX'] else: i = sns['P GateAngle'] j = sns['P GateForceX'] # stretchers not always available! # k = sns['Stretcher Z'] # average nmbrpieces = len(gd.p_names) angle = np.zeros((100, )) force = np.zeros((100, )) power = np.zeros((100, )) for p in range(nmbrpieces): angle += gd.norm_arrays[p, :, i] force += gd.norm_arrays[p, :, j] # stretcherZ = gd.norm_arrays[p, :, k] d, a = gd.prof_data[p] power += aa[0 + r] # plot #ax1.plot(angle/nmbrpieces, linewidth=0.6, label=f'R {r+1}') #ax2.plot(force/nmbrpieces, linewidth=0.6, label=f'R {r+1}') ax3.plot(power / nmbrpieces, linewidth=0.6, label=f'R {r+1}') ax3.legend(loc='upper right', prop=fontP) plt.tight_layout() # we keep using the same name tmpfig = tmpdir / (gd.config['Session'] + '_crew') plt.savefig(tmpfig) tmpfig = re.sub('\\\\', '/', str(tmpfig)) # for windows doc.append( NoEscape(r'\includegraphics[width=1.0\textwidth]{' + f'{tmpfig}' + r'}')) plt.close(fig) # Rower pages doc.append(NewPage()) rwrcnt = gd.sessionInfo['RowerCnt'] fig = [None for i in range(rwrcnt)] rax1 = [None for i in range(rwrcnt)] sax1 = [None for i in range(rwrcnt)] for rwr in range(rwrcnt): pcs = ['all'] + gd.p_names + ['average'] with doc.create( Section( f'Rower: {gd.metaData["Rowers"][rwr][0]}, using piece "{pcs[gd.rowerPiece[rwr]]}"', numbering=False)): rows = gd.rowertablemodel[rwr].rowCount() columns = gd.rowertablemodel[rwr].columnCount() rowertab = 'l|' + ''.join(['r' for i in range(columns - 1)]) + '|' with doc.create(Tabular(rowertab)) as table: table.add_hline() row = [] for j in range(columns): index = QAbstractTableModel.index(gd.rowertablemodel[rwr], 0, j) row.append(str(gd.rowertablemodel[rwr].data(index))) table.add_row(row, color='aquamarine') table.add_hline() cnt = 0 for i in range(rows): row = [] if i == 0: continue for j in range(columns): index = QAbstractTableModel.index( gd.rowertablemodel[rwr], i, j) row.append(str(gd.rowertablemodel[rwr].data(index))) if cnt % 2 == 0: table.add_row(row, color='gainsboro') else: table.add_row(row, color='aquamarine') cnt += 1 table.add_hline() doc.append('\n') fig[rwr], ((rax1[rwr])) = plt.subplots(nrows=1, ncols=1) rax1[rwr].set_title('GateAngle - GateForceX/Y') rax1[rwr].grid(True) rsens = rowersensors(rwr) piece = gd.rowerPiece[rwr] scaleAngle = 10 if gd.rowerPiece[rwr] == 0: # all for i in range(len(gd.p_names)): if gd.sessionInfo['ScullSweep'] == 'sweep': # print(f'Make rowerplot for {self.rower}') rax1[rwr].plot(gd.norm_arrays[i, :, rsens['GateAngle']], gd.norm_arrays[i, :, rsens['GateForceX']], linewidth=0.6, label=f'{gd.p_names[i]}') rax1[rwr].plot(gd.norm_arrays[i, :, rsens['GateAngle']], gd.norm_arrays[i, :, rsens['GateForceY']], linestyle=(0, (7, 10)), linewidth=0.6, label=f'{gd.p_names[i]}') else: rax1[rwr].plot(gd.norm_arrays[i, :, rsens['P GateAngle']], gd.norm_arrays[i, :, rsens['P GateForceX']], linewidth=0.6, label=f'{gd.p_names[i]}') rax1[rwr].plot(gd.norm_arrays[i, :, rsens['P GateAngle']], gd.norm_arrays[i, :, rsens['P GateForceY']], linestyle=(0, (7, 10)), linewidth=0.6, label=f'{gd.p_names[i]}') elif gd.rowerPiece[rwr] == 7: # average angle = np.zeros((100, )) forceX = np.zeros((100, )) if gd.sessionInfo['ScullSweep'] == 'sweep': for i in range(len(gd.p_names)): angle += gd.norm_arrays[i, :, rsens['GateAngle']] forceX += gd.norm_arrays[i, :, rsens['GateForceX']] forceY += gd.norm_arrays[i, :, rsens['GateForceY']] rax1[rwr].plot(angle / 6, forceX / 6, linewidth=0.6, label='FX') rax1[rwr].plot(angle / 6, forceY / 6, linestyle=(0, (7, 10)), linewidth=0.6, label='FY') else: for i in range(len(gd.p_names)): angle += gd.norm_arrays[i, :, rsens['P GateAngle']] forceX += gd.norm_arrays[i, :, rsens['P GateForceX']] forceY += gd.norm_arrays[i, :, rsens['P GateForceY']] rax1[rwr].plot(angle / 6, forceX / 6, linewidth=0.6, label='FX') rax1[rwr].plot(angle / 6, forceY / 6, linestyle=(0, (7, 10)), linewidth=0.6, label='FY') else: rp = gd.rowerPiece[rwr] - 1 sns = rowersensors(rwr) # ad hoc angle x 10. Bettet via (max-min). Scale is for force # print(f'Create rowerplot for {self.rower}') outboat = [d for d, e in gd.prof_data] ri = [a[rwr] for a in outboat] # rower info per piece fmean = ri[rp]['GFEff'] if gd.sessionInfo['ScullSweep'] == 'sweep': i = sns['GateAngle'] j = sns['GateForceX'] k = sns['GateForceY'] else: i = sns['P GateAngle'] j = sns['P GateForceX'] k = sns['P GateForceY'] # TESTING referentie curve # lengte uit tabel? Voorlopig 100, begin goed zetten # scale with avarage force minpos = min(gd.norm_arrays[rp, :, i]) maxpos = max(gd.norm_arrays[rp, :, i]) minarg = np.argmin(gd.norm_arrays[rp, :, i]) maxarg = np.argmax(gd.norm_arrays[rp, :, i]) fmin = gd.norm_arrays[rp, minarg, j] fmax = gd.norm_arrays[rp, maxarg, j] xstep = (maxpos - minpos) / 20 ystep = (fmin - fmax) / 20 # assume fmin > fmax if gd.sessionInfo['ScullSweep'] == 'sweep': xref = np.array([ minpos, minpos + 0.4 * xstep, minpos + 2 * xstep, minpos + 5 * xstep, minpos + 7 * xstep, minpos + 9 * xstep, minpos + 11 * xstep, minpos + 14 * xstep, minpos + 16 * xstep, minpos + 20 * xstep ]) yref = np.array([ fmin, fmin + 20, 1.1 * fmean, 1.6 * fmean, 1.65 * fmean, 1.7 * fmean, 1.6 * fmean, 1.25 * fmean, 0.8 * fmean, fmax ]) else: xref = np.array([ minpos, minpos + 0.4 * xstep, minpos + 2 * xstep, minpos + 5 * xstep, minpos + 7 * xstep, minpos + 9 * xstep, minpos + 11 * xstep, minpos + 14 * xstep, minpos + 16 * xstep, minpos + 20 * xstep ]) yref = np.array([ fmin, fmin + 20, 1.1 * fmean, 1.6 * fmean, 1.65 * fmean, 1.7 * fmean, 1.6 * fmean, 1.25 * fmean, 0.8 * fmean, fmax ]) curveref = make_interp_spline(xref, yref, 2) xrefnew = np.linspace(min(xref), max(xref), int(maxpos - minpos)) rax1[rwr].plot(gd.norm_arrays[rp, :, i], gd.norm_arrays[rp, :, j], linewidth=0.6, label=f'{gd.p_names[rp]} FX') rax1[rwr].plot(gd.norm_arrays[rp, :, i], gd.norm_arrays[rp, :, k], linestyle=stippel, linewidth=0.6, label=f'{gd.p_names[rp]} FY') rax1[rwr].plot(xrefnew, curveref(xrefnew), color='black', linewidth=0.5, linestyle=(0, (3, 6))) # rax1[rwr].legend(loc='lower right', prop=fontP, bbox_to_anchor=(1.05, 1)) rax1[rwr].legend(loc='upper right', prop=fontP) plt.tight_layout() tmpfig = tmpdir / (gd.config['Session'] + f'_{rwr}') plt.savefig(tmpfig) tmpfig = re.sub('\\\\', '/', str(tmpfig)) # for windows doc.append( NoEscape(r'\includegraphics[width=0.9\textwidth]{' + f'{tmpfig}' + r'}')) plt.close(fig[rwr]) if 'StretcherForceX' in sensors: doc.append('\n') # stretcher plot fig[rwr], sax1[rwr] = plt.subplots() sax1[rwr].set_title('Stretcher') sax1[rwr].grid(True) rsens = rowersensors(rwr) if gd.rowerPiece[rwr] == 0: # all DOEN WE NIET pass elif gd.rowerPiece[rwr] == len(gd.p_names) + 1: # average DOEN WE NIET pass else: # a piece (alleen dit) i = gd.rowerPiece[rwr] - 1 name, se, nr, sp = pieces[i] sax1[rwr].plot(gd.dataObject[sp[0]:sp[1], rsens['StretcherForceX']], linewidth=0.6, label='StretcherForceX') sax1[rwr].plot( 10 * gd.dataObject[sp[0]:sp[1], rsens['Stretcher RL']], linewidth=0.6, label='Stretcher RL') sax1[rwr].plot( 10 * gd.dataObject[sp[0]:sp[1], rsens['Stretcher TB']], linewidth=0.6, label='Stretcher TB') sax1[rwr].legend(loc='lower right', prop=fontP) plt.tight_layout() tmpfig = tmpdir / (gd.config['Session'] + f'_{rwr}_s') plt.savefig(tmpfig) tmpfig = re.sub('\\\\', '/', str(tmpfig)) # for windows doc.append( NoEscape(r'\includegraphics[width=0.6\textwidth]{' + f'{tmpfig}' + r'}')) plt.close(fig[rwr]) if rwr != rwrcnt - 1: doc.append(NewPage()) # Extra page if gd.extraplot: doc.append(NewPage()) fig, extr = plt.subplots() s2 = gd.config['Session2'] if s2 == '': extr.set_title('Custom plot') else: extr.set_title(f'Custom plot (second session: {s2})') extr.grid(True) # data from update_plot from View piece, can we do this simpler? [strt, end, strttime, center, scalex, slist, secslist] = gd.extrasettings times = list(map(lambda x: x / Hz, list(range(gd.view_tr.shape[0])))) for i, name, scaley in slist: extr.plot(times, gd.view_tr[:, i] * scaley, linewidth=0.6, label=name) for i, name, scale in secslist: extr.plot(times, gd.view_tr2[:, i] * scaley, linewidth=0.6, label=name, linestyle=stippel) dist = (end - strt) xFrom = center - scalex * dist / 2 xTo = center + scalex * dist / 2 extr.set_xlim(xFrom, xTo) # start at correct beginvalue locs = extr.get_xticks() ticks = [item + strttime for item in locs] extr.set_xticklabels(ticks) extr.legend() plt.tight_layout() # we keep using the same name tmpfig = tmpdir / (gd.config['Session'] + '_extra') plt.savefig(tmpfig) tmpfig = re.sub('\\\\', '/', str(tmpfig)) # for windows doc.append( NoEscape(r'\includegraphics[width=1.0\textwidth]{' + f'{tmpfig}' + r'}')) plt.close(fig) doc.append(NewLine()) doc.append(VerticalSpace("10pt")) doc.append(f' Piece: {gd.selPiece}') if gd.sd_selPiece != '': doc.append(NewLine()) doc.append(VerticalSpace("5pt")) doc.append(f'Secondary piece: {gd.sd_selPiece}') # generate report doc.generate_pdf(reportfile, clean_tex=True)
def add_summary(self): with self.doc.create( Section(self.analysis.summary_title, numbering=False)): with self.doc.create(Subsection('Run information', numbering=False)): with self.doc.create(Tabular('lp{6cm}lp{20cm}', width=2)) as table: table.add_row(('Date', self.analysis.start_time)) if self.analysis.ont_fast5: table.add_row(('ONT FAST5', self.analysis.ont_fast5)) if self.analysis.ont_raw_fastq: table.add_row( ('ONT FASTQ', self.analysis.ont_raw_fastq)) if self.analysis.illumina_fastq: table.add_row( ('Illumina FASTQ', ', '.join(self.analysis.illumina_fastq))) if self.analysis.genome_fasta: table.add_row(('Assembly', self.analysis.genome_fasta)) if self.analysis.reference_fasta: table.add_row( ('Reference', self.analysis.reference_fasta)) self.doc.append(VerticalSpace("10pt")) if not (self.analysis.ont_n50 is None): with self.doc.create( Subsection('ONT library statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row( ('ONT N50', '{:,}'.format(self.analysis.ont_n50))) table.add_row( ('ONT reads', '{:,}'.format(self.analysis.ont_read_count))) table.add_row(('ONT bases', '{:s}'.format(self.analysis.ont_bases))) self.doc.append(VerticalSpace("10pt")) if not (self.analysis.illumina_length_mean is None): with self.doc.create( Subsection('Illumina library statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row(('Illumina mean length', '{:.1f}'.format( self.analysis.illumina_length_mean))) table.add_row( ('Illumina reads', '{:,}'.format(self.analysis.illumina_read_count))) table.add_row( ('Illumina bases', '{:s}'.format(self.analysis.illumina_bases))) self.doc.append(VerticalSpace("10pt")) if not (self.analysis.genome is None): with self.doc.create( Subsection('Assembly statistics', numbering=False)): with self.doc.create(Tabular('ll', width=2)) as table: table.add_row(('Contigs', len(self.analysis.genome))) genome_size = 0 for i in self.analysis.genome: genome_size += len(i.seq) genome_size = si_format(genome_size, precision=1) table.add_row(('Assembly size', genome_size)) self.doc.append(VerticalSpace("10pt")) if len(self.report[self.analysis.assembly_title]) > 0: if len(self.report[self.analysis.assembly_title][ self.analysis.assembly_notes_title]) > 0: with self.doc.create( Subsection(self.analysis.assembly_notes_title, numbering=False)): left = FlushLeft() for note in self.report[self.analysis.assembly_title][ self.analysis.assembly_notes_title]: left.append(note) left.append(LineBreak()) self.doc.append(left) self.doc.append(VerticalSpace("10pt")) if not (self.analysis.contig_info is None): for method in ['ONT', 'Illumina']: if not method in self.analysis.contig_info.index: continue with self.doc.create( Subsection('Assembly coverage by ' + method, numbering=False)): table_format = 'l' * self.analysis.contig_info[ method].shape[1] self.doc.append('') with self.doc.create(Tabular(table_format)) as table: table.add_row( ('Contig', 'Length (bp)', 'Coverage (X)')) table.add_hline() formatted = self.analysis.contig_info[method].copy( ) formatted.iloc[:, 1] = formatted.iloc[:, 1].apply( lambda x: '{:,}'.format(x)) for i in range(self.analysis.contig_info[method]. shape[0]): table.add_row( formatted.iloc[i, :].values.tolist()) self.doc.append(LineBreak()) self.doc.append(VerticalSpace("10pt"))
page.append("\nSTREET, ADDRESS") page.append("\nCITY, POSTAL CODE") with page.create(TextBlock(100, 150, 40)): page.append(HugeText(bold("VOID"))) with page.create(TextBlock(80, 150, 0)): page.append("DATE") page.append(MediumText(bold("2016 06 07\n"))) page.append(HorizontalSpace("10mm")) page.append(SmallText("Y/A M/M D/J")) with page.create(TextBlock(70, 150, 30)): page.append(MediumText(bold("$***** 10,000.00"))) page.append(VerticalSpace("100mm")) doc.generate_pdf("textblock", clean_tex=False) """ Asked for 1 installation when I ran the code """ """ rishikesh agrawani@DESKTOP-8AATOO4 MINGW64 /d/projects/Python/PyLaTex (master) $ cd 17_textblock_example/ rishikesh agrawani@DESKTOP-8AATOO4 MINGW64 /d/projects/Python/PyLaTex/17_textblock_example (master) $ python textblock_example.py rishikesh agrawani@DESKTOP-8AATOO4 MINGW64 /d/projects/Python/PyLaTex/17_textblock_example (master) $ ls textblock.pdf textblock.tex textblock_example.py
title="Curriculum Vitae", author=data['personal']['name'], options=['ngerman']) if args.translate: doc.preamble.append(Command("usepackage", "babel")) doc.preamble.append(Command("usepackage", "csquotes")) doc.preamble.append(Command("MakeOuterQuote", NoEscape("\""))) PROFIL = PROFIL.resolve().as_posix() doc.append(Command("raggedright")) make_personal(doc, image=True) for section in cv_tree['sections']: sheet = section['sheet'] doc.append(VerticalSpace(NoEscape("-6pt"))) with doc.create(Section(sheet_data[sheet]['section'])): doc.append(VerticalSpace(NoEscape("-8pt"))) doc.append(Command('hrule')) for v in section['order']: v = v.replace(" ", "_") subheader = sheet_data[sheet]['subcategories'][v][ "subsection"] # doc.append(NoEscape("\\noindent\\rule{\\textwidth}{0.4pt}")) # doc.append(Command('hrule')) p_y = '' p_m = '' monthit = len( sheet_data[sheet]['subcategories'][v]['data']) p_title = '' p_entry = {}
def gen_pdf(data): ''' This function generates a pdf prayer time schedule using latexself. It takes a Tawqeetex object (data) that must be initialized before the call. ''' geometry_options = {"tmargin": "3cm", "bmargin": "2cm", "rmargin": "2cm", "lmargin": "2cm"} doc = Document(geometry_options=geometry_options) # Add packages for arabic text doc.preamble.append(NoEscape(r'\usepackage[T2A,LAE,T1]{fontenc}')) doc.preamble.append(NoEscape(r'\usepackage[arabic, USenglish]{babel}')) # Add document header header = PageStyle("header") # header_thickness=0, footer_thickness=0 # # Create left header # with header.create(Head("L")): # header.append("") # # Create center header # with header.create(Head("C")): # header.append("") # # Create right header # with header.create(Head("R")): # header.append("logo goes here") # Create center footer with header.create(Foot("C")): #NOTE: L and R footer are also available header.append("Generated with tawqeeTeX") # Add Heading doc.preamble.append(header) doc.change_document_style("header") doc.append(NoEscape(r'\begin{center}')) #doc.append(VerticalSpace('20pt')) doc.append(LargeText(bold(get_title_str(data.lang, data.month, data.year)))) doc.append(VerticalSpace('20pt')) doc.append(LineBreak()) doc.append(LargeText(italic(data.city))) doc.append(VerticalSpace('20pt')) doc.append(LineBreak()) doc.append(NoEscape(r'\rowcolors{2}{green!10}{yellow!10}')) doc.append(NoEscape(r'\setlength{\arrayrulewidth}{0.5pt}')) # Create table for the prayer time schedule with doc.create(Tabular('|lc|cccccc|cr|', pos='c', row_height='1.35', col_space='7.5', width=10)) as table: table.add_hline() table.add_row(('', get_month_str(data.lang, data.month), 'Isha', 'Maghrib', 'Asr', 'Dhuhr', 'Shuruq', 'Fajr', NoEscape(r'\AR{' + data.months['hi'] + '}'), ''), color='lightgray!20') table.add_row(('', '', *get_prayers_str(), '', ''), color='lightgray!20') table.add_hline() c = 0 for day, weekday in data.date_gr.items(): # Insert timings table.add_row(get_weekday_str(weekday, data.lang), day, data.isha[c], data.maghrib[c], data.asr[c], data.dhuhr[c], data.sunrise[c], data.fajr[c], data.hi_day[c], NoEscape(r'\AR{' + data.hi_weekday[c] + '}')) table.add_hline() c += 1 doc.append(NoEscape(r'\end{center}')) doc.generate_pdf(data.city + '-' + data.month + '-' + data.year, clean_tex=False, silent=True)