def _on_export_items_button_pressed(self, event): event.Skip() items = self.__get_items_to_work_on(_('Exporting entries')) if items is None: return # export dialog pat = gmPerson.gmCurrentPatient() dlg = cExportAreaExportToMediaDlg(self, -1, patient = pat, item_count = len(items)) choice = dlg.ShowModal() media = dlg._LCTRL_removable_media.get_selected_item_data(only_one = True) use_subdir = dlg._CHBOX_use_subdirectory.IsChecked() encrypt = dlg._CHBOX_encrypt.IsChecked() dlg.DestroyLater() if choice == wx.ID_CANCEL: return # export the files if media['type'] == 'cd': base_dir = gmTools.mk_sandbox_dir(prefix = 'iso-') else: base_dir = media['mountpoint'] if use_subdir: dir2save2 = os.path.join(base_dir, pat.subdir_name) else: dir2save2 = base_dir export_dir = self.__export_as_files ( gmTools.coalesce(encrypt, _('Exporting encrypted entries'), _('Exporting entries')), base_dir = dir2save2, items = items, encrypt = encrypt, with_metadata = True ) if export_dir is None: gmDispatcher.send(signal = 'statustext', msg = _('Cannot export: aborted or error.')) return if media['type'] == 'cd': if not self.__burn_dir_to_disk(base_dir = base_dir): return gmDispatcher.send(signal = 'statustext', msg = _('Entries successfully burned to disk.')) self.__save_soap_note(soap = _('Burned onto CD/DVD:\n - %s') % '\n - '.join([ i['description'] for i in items ])) else: gmDispatcher.send(signal = 'statustext', msg = _('Exported entries into [%s]') % export_dir) self.__save_soap_note(soap = _('Exported onto removable media:\n - %s') % '\n - '.join([ i['description'] for i in items ])) self.__browse_patient_data(dir2save2, encrypted = encrypt, archive = False, has_metadata = True) # remove_entries ? return True
def dump_items_to_disk(self, base_dir=None, items=None): if items is None: items = self.items if len(items) == 0: return None if base_dir is None: from Gnumed.business.gmPerson import cPatient pat = cPatient(aPK_obj=self.__pk_identity) base_dir = gmTools.mk_sandbox_dir(prefix=u'exp-%s-' % pat.dirname) _log.debug('dumping export items to: %s', base_dir) gmTools.mkdir(base_dir) for item in items: item.save_to_file(directory=base_dir) return base_dir
def mirror_url(url: str, base_dir: str = None, verbose: bool = False) -> str: """Mirror the web*page* at _url_, non-recursively. Note: Not for mirroring a *site* (recursively). Args: url: the URL to mirror base_dir: where to store the page and its prerequisites, sandbox dir under tmp_dir if None """ assert (url is not None), '<url> must not be None' _log.debug('mirroring: %s', url) if base_dir is None: prefix = url.split('://')[-1] prefix = prefix.strip(':').strip('/').replace('/', '#') prefix = gmTools.fname_sanitize(prefix) base_dir = gmTools.mk_sandbox_dir(prefix=prefix + '-') _log.debug('base dir: %s', base_dir) wget_cmd = [ 'wget', '--directory-prefix=%s' % base_dir, #'--adjust-extension', '--no-remove-listing', '--timestamping', '--page-requisites', '--continue', '--convert-links', '--user-agent=""', '--execute', 'robots=off', '--wait=1' ] if verbose: wget_cmd.append('--debug') wget_cmd.append(url) #wget --output-file=logfile #'<a href="%s">%s</a>' % (url, url), success, ret_code, STDOUT = gmShellAPI.run_process(cmd_line=wget_cmd, timeout=15, verbose=verbose) if success: return base_dir return None
def join_files_as_pdf(files: [] = None, pdf_name: str = None) -> str: """Convert files to PDF and joins them into one final PDF. Returns: Name of final PDF or None """ assert (files is not None), '<files> must not be None' if len(files) == 0: return None sandbox = gmTools.mk_sandbox_dir() pdf_pages = [] page_idx = 1 for fname in files: pdf = convert_file(filename=fname, target_mime='application/pdf', target_filename=gmTools.get_unique_filename( prefix='%s-' % page_idx, suffix='.pdf', tmp_dir=sandbox), target_extension='.pdf', verbose=True) if pdf is None: return None pdf_pages.append(pdf) page_idx += 1 if pdf_name is None: pdf_name = gmTools.get_unique_filename(suffix='.pdf') cmd_line = ['pdfunite'] cmd_line.extend(pdf_pages) cmd_line.append(pdf_name) success, returncode, stdout = gmShellAPI.run_process(cmd_line=cmd_line, verbose=True) if not success: _log.debug('cannot join files into one PDF') return None return pdf_name
def export(self, base_dir=None, items=None, with_metadata=True): if items is None: items_found = self.items if len(items) == 0: return None from Gnumed.business.gmPerson import cPatient pat = cPatient(aPK_obj = self.__pk_identity) if base_dir is None: base_dir = gmTools.mk_sandbox_dir(prefix = u'exp-%s-' % pat.dirname) _log.debug('base dir: %s', base_dir) doc_dir = os.path.join(base_dir, r'documents') gmTools.mkdir(doc_dir) mugshot = pat.document_folder.latest_mugshot if mugshot is None: mugshot_url = u'documents/no-such-file.png' mugshot_alt = _('no patient photograph available') mugshot_title = u'' else: mugshot_url = mugshot.export_to_file(directory = doc_dir) mugshot_alt =_('patient photograph from %s') % gmDateTime.pydt_strftime(mugshot['date_generated'], '%B %Y') mugshot_title = gmDateTime.pydt_strftime(mugshot['date_generated'], '%B %Y') # index.html idx_fname = os.path.join(base_dir, u'index.html') idx_file = io.open(idx_fname, mode = u'wt', encoding = u'utf8') # header browse_dicomdir = u'' existing_files = os.listdir(base_dir) if u'DICOMDIR' in existing_files: browse_dicomdir = u' <li><a href="./DICOMDIR">browse DICOMDIR</a></li>' idx_file.write(_html_start % { u'html_title_header': _('Patient data for'), u'html_title_patient': gmTools.html_escape_string(pat['description_gender'] + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'title': _('Patient data export'), u'pat_name': gmTools.html_escape_string(pat['description_gender']), u'pat_dob': gmTools.html_escape_string(_(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'mugshot_url': mugshot_url, u'mugshot_alt': mugshot_alt, u'mugshot_title': mugshot_title, u'docs_title': _(u'Documents'), u'browse_root': _(u'browse storage medium'), u'browse_docs': _(u'browse documents area'), u'browse_dicomdir': browse_dicomdir }) # middle for item in items: item_path = item.export_to_file(directory = doc_dir) item_fname = os.path.split(item_path)[1] idx_file.write(_html_list_item % ( item_fname, gmTools.html_escape_string(item['description']) )) # footer _cfg = gmCfg2.gmCfgData() idx_file.write(_html_end % ( gmTools.html_escape_string(gmDateTime.pydt_strftime(format = '%Y %B %d', encoding = u'utf8')), gmTools.html_escape_string(_cfg.get(option = u'client_version')) )) idx_file.close() # autorun.inf autorun_fname = os.path.join(base_dir, u'autorun.inf') autorun_file = io.open(autorun_fname, mode = u'wt', encoding = u'utf8') autorun_file.write(_autorun_inf % ( (pat['description_gender'] + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')).strip(), _('Browse patient data') )) autorun_file.close() # cd.inf cd_inf_fname = os.path.join(base_dir, u'cd.inf') cd_inf_file = io.open(cd_inf_fname, mode = u'wt', encoding = u'utf8') cd_inf_file.write(_cd_inf % ( pat['lastnames'], pat['firstnames'], gmTools.coalesce(pat['gender'], u'?'), pat.get_formatted_dob('%Y-%m-%d'), gmDateTime.pydt_strftime(format = '%Y-%m-%d', encoding = u'utf8'), pat.ID, _cfg.get(option = u'client_version'), u' / '.join([ u'%s = %s (%s)' % (g['tag'], g['label'], g['l10n_label']) for g in pat.gender_list ]) )) cd_inf_file.close() # README readme_fname = os.path.join(base_dir, u'README') readme_file = io.open(readme_fname, mode = u'wt', encoding = u'utf8') readme_file.write(_README % ( pat['description_gender'] + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d') )) readme_file.close() # patient demographics as GDT/XML/VCF pat.export_as_gdt(filename = os.path.join(base_dir, u'patient.gdt')) pat.export_as_xml_linuxmednews(filename = os.path.join(base_dir, u'patient.xml')) pat.export_as_vcard(filename = os.path.join(base_dir, u'patient.vcf')) return base_dir
def create_encrypted_zip_archive_from_dir(source_dir: str, comment: str = None, overwrite: bool = True, passphrase: str = None, verbose: bool = False) -> bool: """Create encrypted archive of a directory. The encrypted archive file will always be named "datawrapper.zip" for confidentiality reasons. If callers want another name they will have to shutil.move() the zip file themselves. This archive will be compressed and AES256 encrypted with the given passphrase. Therefore, the result will not decrypt with earlier versions of unzip software. On Windows, 7z oder WinZip are needed. The zip format does not support header encryption thereby allowing attackers to gain knowledge of patient details by observing the names of files and directories inside the encrypted archive. To reduce that attack surface, GNUmed will create _another_ zip archive inside "datawrapper.zip", which eventually wraps up the patient data as "data.zip". That archive is not compressed and not encrypted, and can thus be unpacked with any old unzipper. Note that GNUmed does NOT remember the passphrase for you. You will have to take care of that yourself, and possibly also safely hand over the passphrase to any receivers of the zip archive. Args: source_dir: the directory to archive and encrypt comment: included as a file containing the comment overwrite: remove preexisting archive before creation, avoiding *updating* of same, and thereby including unintended data passphrase: minimum length of 5 if given Returns: True / False / None (other error) """ assert (source_dir is not None), '<source_dir> must not be <None>' if len(passphrase) < 5: _log.error('<passphrase> must be at least 5 characters/signs/digits') return None gmLog2.add_word2hide(passphrase) source_dir = os.path.abspath(source_dir) if not os.path.isdir(source_dir): _log.error('<source_dir> does not exist or is not a directory: %s', source_dir) return False for cmd in ['7z', '7z.exe']: found, binary = gmShellAPI.detect_external_binary(binary=cmd) if found: break if not found: _log.warning('no 7z binary found') return None sandbox_dir = gmTools.mk_sandbox_dir() archive_path_inner = os.path.join(sandbox_dir, 'data') if not gmTools.mkdir(archive_path_inner): _log.error('cannot create scratch space for inner achive: %s', archive_path_inner) archive_fname_inner = 'data.zip' archive_name_inner = os.path.join(archive_path_inner, archive_fname_inner) archive_path_outer = gmTools.gmPaths().tmp_dir archive_fname_outer = 'datawrapper.zip' archive_name_outer = os.path.join(archive_path_outer, archive_fname_outer) # remove existing archives so they don't get *updated* rather than newly created if overwrite: if not gmTools.remove_file(archive_name_inner, force=True): _log.error('cannot remove existing archive [%s]', archive_name_inner) return False if not gmTools.remove_file(archive_name_outer, force=True): _log.error('cannot remove existing archive [%s]', archive_name_outer) return False # 7z does not support ZIP comments so create a text file holding the comment if comment is not None: tmp, fname = os.path.split(source_dir.rstrip(os.sep)) comment_filename = os.path.join(sandbox_dir, '000-%s-comment.txt' % fname) with open(comment_filename, mode='wt', encoding='utf8', errors='replace') as comment_file: comment_file.write(comment) # create inner (data) archive: uncompressed, unencrypted, similar to a tar archive args = [ binary, 'a', # create archive '-sas', # be smart about archive name extension '-bd', # no progress indicator '-mx0', # no compression (only store files) '-mcu=on', # UTF8 filenames '-l', # store content of links, not links '-scsUTF-8', # console charset '-tzip' # force ZIP format ] if verbose: args.append('-bb3') args.append('-bt') else: args.append('-bb1') args.append(archive_name_inner) args.append(source_dir) if comment is not None: args.append(comment_filename) success, exit_code, stdout = gmShellAPI.run_process(cmd_line=args, encoding='utf8', verbose=verbose) if not success: _log.error('cannot create inner archive') return None # create "decompress instructions" file instructions_filename = os.path.join( archive_path_inner, '000-on_Windows-open_with-WinZip_or_7z_tools') open(instructions_filename, mode='wt').close() # create outer (wrapper) archive: compressed, encrypted args = [ binary, 'a', # create archive '-sas', # be smart about archive name extension '-bd', # no progress indicator '-mx9', # best available zip compression ratio '-mcu=on', # UTF8 filenames '-l', # store content of links, not links '-scsUTF-8', # console charset '-tzip', # force ZIP format '-mem=AES256', # force useful encryption '-p%s' % passphrase # set passphrase ] if verbose: args.append('-bb3') args.append('-bt') else: args.append('-bb1') args.append(archive_name_outer) args.append(archive_path_inner) success, exit_code, stdout = gmShellAPI.run_process(cmd_line=args, encoding='utf8', verbose=verbose) if success: return archive_name_outer _log.error('cannot create outer archive') return None
def export(self, base_dir=None, items=None, expand_compressed=False): if items is None: items = self.items if len(items) == 0: return None media_base_dir = base_dir from Gnumed.business.gmPerson import cPatient pat = cPatient(aPK_obj=self.__pk_identity) if media_base_dir is None: media_base_dir = gmTools.mk_sandbox_dir(prefix=u'exp-%s-' % pat.dirname) _log.debug('patient media base dir: %s', media_base_dir) doc_dir = os.path.join(media_base_dir, r'documents') if os.path.isdir(doc_dir): index_existing_docs = True else: index_existing_docs = False gmTools.mkdir(doc_dir) _html_start_data = { u'html_title_header': _('Patient data for'), u'html_title_patient': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False) + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'title': _('Patient data export'), u'pat_name': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False)), u'pat_dob': gmTools.html_escape_string( _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'mugshot_url': u'documents/no-such-file.png', u'mugshot_alt': _('no patient photograph available'), u'mugshot_title': u'', u'docs_title': _(u'Documents'), u'browse_root': _(u'browse storage medium'), u'browse_docs': _(u'browse documents area'), u'browse_dicomdir': u'', u'run_dicom_viewer': u'' } mugshot = pat.document_folder.latest_mugshot if mugshot is not None: _html_start_data['mugshot_url'] = mugshot.save_to_file( directory=doc_dir, adjust_extension=True) _html_start_data['mugshot_alt'] = _( 'patient photograph from %s') % gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') _html_start_data['mugshot_title'] = gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') if u'DICOMDIR' in os.listdir(media_base_dir): _html_start_data[ u'browse_dicomdir'] = u'<li><a href="./DICOMDIR">%s</a></li>' % _( u'show DICOMDIR file') # copy DWV into target dir dwv_target_dir = os.path.join(media_base_dir, u'dwv') gmTools.rmdir(dwv_target_dir) dwv_src_dir = os.path.join(gmTools.gmPaths().local_base_dir, u'dwv4export') if not os.path.isdir(dwv_src_dir): dwv_src_dir = os.path.join( gmTools.gmPaths().system_app_data_dir, u'dwv4export') try: shutil.copytree(dwv_src_dir, dwv_target_dir) _html_start_data[ u'run_dicom_viewer'] = u'<li><a href="./dwv/viewers/mobile-local/index.html">%s</a></li>' % _( u'run Radiology Images (DICOM) Viewer') except shutil.Error, OSError: _log.exception('cannot include DWV, skipping')
def export(self, base_dir=None, items=None, with_metadata=True, expand_compressed=False): if items is None: items = self.items if len(items) == 0: return None from Gnumed.business.gmPerson import cPatient pat = cPatient(aPK_obj=self.__pk_identity) if base_dir is None: base_dir = gmTools.mk_sandbox_dir(prefix=u'exp-%s-' % pat.dirname) _log.debug('base dir: %s', base_dir) doc_dir = os.path.join(base_dir, r'documents') gmTools.mkdir(doc_dir) _html_start_data = { u'html_title_header': _('Patient data for'), u'html_title_patient': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False) + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'title': _('Patient data export'), u'pat_name': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False)), u'pat_dob': gmTools.html_escape_string( _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')), u'mugshot_url': u'documents/no-such-file.png', u'mugshot_alt': _('no patient photograph available'), u'mugshot_title': u'', u'docs_title': _(u'Documents'), u'browse_root': _(u'browse storage medium'), u'browse_docs': _(u'browse documents area'), u'browse_dicomdir': u'' } mugshot = pat.document_folder.latest_mugshot if mugshot is not None: _html_start_data['mugshot_url'] = mugshot.export_to_file( directory=doc_dir) _html_start_data['mugshot_alt'] = _( 'patient photograph from %s') % gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') _html_start_data['mugshot_title'] = gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') # index.html idx_fname = os.path.join(base_dir, u'index.html') idx_file = io.open(idx_fname, mode=u'wt', encoding=u'utf8') # header existing_files = os.listdir(base_dir) if u'DICOMDIR' in existing_files: _html_start_data[ u'browse_dicomdir'] = u' <li><a href="./DICOMDIR">browse DICOMDIR</a></li>' idx_file.write(_html_start % _html_start_data) # middle for item in items: item_path = item.export_to_file(directory=doc_dir) item_fname = os.path.split(item_path)[1] idx_file.write( _html_list_item % (item_fname, gmTools.html_escape_string(item['description']))) # footer _cfg = gmCfg2.gmCfgData() from Gnumed.business.gmPraxis import gmCurrentPraxisBranch prax = gmCurrentPraxisBranch() lines = [] adr = prax.branch.org_unit.address if adr is not None: lines.extend(adr.format()) for comm in prax.branch.org_unit.comm_channels: if comm['is_confidential'] is True: continue lines.append(u'%s: %s' % (comm['l10n_comm_type'], comm['url'])) adr = u'' if len(lines) > 0: adr = gmTools.html_escape_string(u'\n'.join(lines), replace_eol=True, keep_visual_eol=True) _html_end_data = { 'branch': gmTools.html_escape_string(prax['branch']), 'praxis': gmTools.html_escape_string(prax['praxis']), 'date': gmTools.html_escape_string( gmDateTime.pydt_strftime(gmDateTime.pydt_now_here(), format='%Y %B %d', encoding=u'utf8')), 'gm_ver': gmTools.html_escape_string(_cfg.get(option=u'client_version')), #'gm_ver': 'git HEAD', # for testing 'adr': adr } idx_file.write(_html_end % _html_end_data) idx_file.close() # autorun.inf autorun_fname = os.path.join(base_dir, u'autorun.inf') autorun_file = io.open(autorun_fname, mode=u'wt', encoding=u'utf8') autorun_file.write( _autorun_inf % ((pat.get_description_gender(with_nickname=False) + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')).strip(), _('Browse patient data'))) autorun_file.close() # cd.inf cd_inf_fname = os.path.join(base_dir, u'cd.inf') cd_inf_file = io.open(cd_inf_fname, mode=u'wt', encoding=u'utf8') cd_inf_file.write( _cd_inf % (pat['lastnames'], pat['firstnames'], gmTools.coalesce(pat['gender'], u'?'), pat.get_formatted_dob('%Y-%m-%d'), gmDateTime.pydt_strftime(gmDateTime.pydt_now_here(), format='%Y-%m-%d', encoding=u'utf8'), pat.ID, _cfg.get(option=u'client_version'), u' / '.join([ u'%s = %s (%s)' % (g['tag'], g['label'], g['l10n_label']) for g in pat.gender_list ]))) cd_inf_file.close() # README readme_fname = os.path.join(base_dir, u'README') readme_file = io.open(readme_fname, mode=u'wt', encoding=u'utf8') readme_file.write( _README % (pat.get_description_gender(with_nickname=False) + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d'))) readme_file.close() # patient demographics as GDT/XML/VCF pat.export_as_gdt(filename=os.path.join(base_dir, u'patient.gdt')) pat.export_as_xml_linuxmednews( filename=os.path.join(base_dir, u'patient.xml')) pat.export_as_vcard(filename=os.path.join(base_dir, u'patient.vcf')) # praxis VCF shutil.move(prax.vcf, os.path.join(base_dir, u'praxis.vcf')) return base_dir
def _on_save_items_button_pressed(self, event): event.Skip() items = self._LCTRL_items.get_selected_item_data(only_one=False) if len(items) == 0: items = self._LCTRL_items.get_item_data() if len(items) == 0: return pat = gmPerson.gmCurrentPatient() dlg = cCreatePatientMediaDlg(self, -1, burn2cd=False, patient=pat, item_count=len(items)) print("calling dlg.ShowModal()") choice = dlg.ShowModal() print("after returning from dlg.ShowModal()") if choice != wx.ID_SAVE: dlg.Destroy() return use_subdir = dlg._CHBOX_use_subdirectory.IsChecked() path = dlg._LBL_directory.Label.strip() remove_existing_data = dlg._RBTN_remove_data.Value is True generate_metadata = dlg._CHBOX_generate_metadata.IsChecked() dlg.Destroy() if use_subdir: path = gmTools.mk_sandbox_dir(prefix='%s-' % pat.subdir_name, base_dir=path) else: if remove_existing_data is True: if gmTools.rm_dir_content(path) is False: gmGuiHelpers.gm_show_error( title=_('Creating patient media'), error=_('Cannot remove content from\n [%s]') % path) return False exp_area = pat.export_area if generate_metadata: export_dir = exp_area.export(base_dir=path, items=items) else: export_dir = exp_area.dump_items_to_disk(base_dir=path, items=items) self.save_soap_note( soap=_('Saved to [%s]:\n - %s') % (export_dir, '\n - '.join([i['description'] for i in items]))) msg = _('Saved documents into directory:\n\n %s') % export_dir browse_index = gmGuiHelpers.gm_show_question( title=_('Creating patient media'), question=msg + '\n\n' + _('Browse patient data pack ?'), cancel_button=False) if browse_index: if generate_metadata: gmNetworkTools.open_url_in_browser( url='file://%s' % os.path.join(export_dir, 'index.html')) else: gmMimeLib.call_viewer_on_file(export_dir, block=False) return True
def _on_save_items_button_pressed(self, event): event.Skip() items = self._LCTRL_items.get_selected_item_data(only_one=False) if len(items) == 0: items = self._LCTRL_items.get_item_data() dlg = wx.DirDialog( self, message=_( 'Select the directory into which to export the documents.'), defaultPath=os.path.join(gmTools.gmPaths().home_dir, 'gnumed')) choice = dlg.ShowModal() path = dlg.GetPath() if choice != wx.ID_OK: return True if not gmTools.dir_is_empty(path): reuse_nonempty_dir = gmGuiHelpers.gm_show_question( title=_(u'Saving export area documents'), question=_( u'The chosen export directory\n' u'\n' u' [%s]\n' u'\n' u'already contains files. Do you still want to save the\n' u'selected export area documents into that directory ?\n' u'\n' u'(this is useful for including the external documents\n' u' already stored in or below this directory)\n' u'\n' u'[NO] will create a subdirectory for you and use that.') % path, cancel_button=True) if reuse_nonempty_dir is None: return True if reuse_nonempty_dir is False: path = gmTools.mk_sandbox_dir( prefix=u'export-%s-' % gmPerson.gmCurrentPatient().dirname, base_dir=path) include_metadata = gmGuiHelpers.gm_show_question( title=_(u'Saving export area documents'), question=_(u'Create descriptive metadata files\n' u'and save them alongside the\n' u'selected export area documents ?'), cancel_button=True) if include_metadata is None: return True export_dir = gmPerson.gmCurrentPatient().export_area.export( base_dir=path, items=items, with_metadata=include_metadata) self.save_soap_note( soap=_('Saved to [%s]:\n - %s') % (export_dir, u'\n - '.join([i['description'] for i in items]))) title = _('Saving export area documents') msg = _('Saved documents into directory:\n\n %s') % export_dir if include_metadata: browse_index = gmGuiHelpers.gm_show_question( title=title, question=msg + u'\n\n' + _('Browse patient data pack ?'), cancel_button=False) if browse_index: gmNetworkTools.open_url_in_browser( url=u'file://%s' % os.path.join(export_dir, u'index.html')) else: gmGuiHelpers.gm_show_info(title=title, info=msg) return True
def convert_latex_to_pdf(filename: str = None, verbose: bool = False, is_sandboxed: bool = False) -> str: """Compile LaTeX code to PDF using pdflatex. Args: is_sandboxed: whether or not to create a sandbox for compiling Returns: Name of resulting PDF, or None on failure. """ global __LaTeX_version_checked global __pdflatex_executable if not __LaTeX_version_checked: __LaTeX_version_checked = True found, __pdflatex_executable = gmShellAPI.detect_external_binary( binary='pdflatex') if not found: _log.error('pdflatex not found') return None cmd_line = [__pdflatex_executable, '-version'] success, ret_code, stdout = gmShellAPI.run_process(cmd_line=cmd_line, encoding='utf8', verbose=True) if not success: _log.error('[%s] failed, LaTeX not usable', cmd_line) return None if is_sandboxed: sandbox_dir = os.path.split(filename)[0] else: sandbox_dir = gmTools.mk_sandbox_dir( prefix=gmTools.fname_stem(filename) + '_') shutil.copy(filename, sandbox_dir) filename = os.path.join(sandbox_dir, os.path.split(filename)[1]) _log.debug('LaTeX sandbox directory: [%s]', sandbox_dir) cmd_final = [ __pdflatex_executable, '-recorder', '-interaction=nonstopmode', "-output-directory=%s" % sandbox_dir ] cmd_draft = cmd_final + ['-draftmode'] # LaTeX can need up to three runs to get cross references et al right for cmd2run in [cmd_draft, cmd_draft, cmd_final]: success, ret_code, stdout = gmShellAPI.run_process( cmd_line=cmd2run + [filename], acceptable_return_codes=[0], encoding='utf8', verbose=True #_cfg.get(option = 'debug') ) if not success: _log.error( 'problem running pdflatex, cannot generate form output, trying diagnostics' ) found, binary = gmShellAPI.find_first_binary( binaries=['lacheck', 'miktex-lacheck.exe']) if not found: _log.debug('lacheck not found') else: cmd_line = [binary, filename] success, ret_code, stdout = gmShellAPI.run_process( cmd_line=cmd_line, encoding='utf8', verbose=True) found, binary = gmShellAPI.find_first_binary( binaries=['chktex', 'ChkTeX.exe']) if not found: _log.debug('chcktex not found') else: cmd_line = [binary, '--verbosity=2', '--headererr', filename] success, ret_code, stdout = gmShellAPI.run_process( cmd_line=cmd_line, encoding='utf8', verbose=True) return None return '%s.pdf' % os.path.splitext(filename)[0]
def create_encrypted_zip_archive_from_dir(source_dir, comment=None, overwrite=True, passphrase=None, verbose=False): """Use 7z to create an encrypted ZIP archive of a directory. <source_dir> will be included into the archive <comment> included as a file containing the comment <overwrite> remove existing archive before creation, avoiding *updating* of those, and thereby including unintended data <passphrase> minimum length of 5 The resulting zip archive will always be named "datawrapper.zip" for confidentiality reasons. If callers want another name they will have to shutil.move() the zip file themselves. This archive will be compressed and AES256 encrypted with the given passphrase. Therefore, the result will not decrypt with earlier versions of unzip software. On Windows, 7z oder WinZip are needed. The zip format does not support header encryption thereby allowing attackers to gain knowledge of patient details by observing the names of files and directories inside the encrypted archive. To reduce that attack surface, GNUmed will create _another_ zip archive inside "datawrapper.zip", which eventually wraps up the patient data as "data.zip". That archive is not compressed and not encrypted, and can thus be unpacked with any old unzipper. Note that GNUmed does NOT remember the passphrase for you. You will have to take care of that yourself, and possibly also safely hand over the passphrase to any receivers of the zip archive. """ if len(passphrase) < 5: _log.error('<passphrase> must be at least 5 characters/signs/digits') return None gmLog2.add_word2hide(passphrase) source_dir = os.path.abspath(source_dir) if not os.path.isdir(source_dir): _log.error('<source_dir> does not exist or is not a directory: %s', source_dir) return False for cmd in ['7z', '7z.exe']: found, binary = gmShellAPI.detect_external_binary(binary = cmd) if found: break if not found: _log.warning('no 7z binary found') return None sandbox_dir = gmTools.mk_sandbox_dir() archive_path_inner = os.path.join(sandbox_dir, 'data') if not gmTools.mkdir(archive_path_inner): _log.error('cannot create scratch space for inner achive: %s', archive_path_inner) archive_fname_inner = 'data.zip' archive_name_inner = os.path.join(archive_path_inner, archive_fname_inner) archive_path_outer = gmTools.gmPaths().tmp_dir archive_fname_outer = 'datawrapper.zip' archive_name_outer = os.path.join(archive_path_outer, archive_fname_outer) # remove existing archives so they don't get *updated* rather than newly created if overwrite: if not gmTools.remove_file(archive_name_inner, force = True): _log.error('cannot remove existing archive [%s]', archive_name_inner) return False if not gmTools.remove_file(archive_name_outer, force = True): _log.error('cannot remove existing archive [%s]', archive_name_outer) return False # 7z does not support ZIP comments so create a text file holding the comment if comment is not None: tmp, fname = os.path.split(source_dir.rstrip(os.sep)) comment_filename = os.path.join(sandbox_dir, '000-%s-comment.txt' % fname) with open(comment_filename, mode = 'wt', encoding = 'utf8', errors = 'replace') as comment_file: comment_file.write(comment) # create inner (data) archive: uncompressed, unencrypted, similar to a tar archive args = [ binary, 'a', # create archive '-sas', # be smart about archive name extension '-bd', # no progress indicator '-mx0', # no compression (only store files) '-mcu=on', # UTF8 filenames '-l', # store content of links, not links '-scsUTF-8', # console charset '-tzip' # force ZIP format ] if verbose: args.append('-bb3') args.append('-bt') else: args.append('-bb1') args.append(archive_name_inner) args.append(source_dir) if comment is not None: args.append(comment_filename) success, exit_code, stdout = gmShellAPI.run_process(cmd_line = args, encoding = 'utf8', verbose = verbose) if not success: _log.error('cannot create inner archive') return None # create "decompress instructions" file instructions_filename = os.path.join(archive_path_inner, '000-on_Windows-open_with-WinZip_or_7z_tools') open(instructions_filename, mode = 'wt').close() # create outer (wrapper) archive: compressed, encrypted args = [ binary, 'a', # create archive '-sas', # be smart about archive name extension '-bd', # no progress indicator '-mx9', # best available zip compression ratio '-mcu=on', # UTF8 filenames '-l', # store content of links, not links '-scsUTF-8', # console charset '-tzip', # force ZIP format '-mem=AES256', # force useful encryption '-p%s' % passphrase # set passphrase ] if verbose: args.append('-bb3') args.append('-bt') else: args.append('-bb1') args.append(archive_name_outer) args.append(archive_path_inner) success, exit_code, stdout = gmShellAPI.run_process(cmd_line = args, encoding = 'utf8', verbose = verbose) if success: return archive_name_outer _log.error('cannot create outer archive') return None
def export(self, base_dir=None, items=None, expand_compressed=False): if items is None: items = self.items if len(items) == 0: return None media_base_dir = base_dir from Gnumed.business.gmPerson import cPatient pat = cPatient(aPK_obj=self.__pk_identity) if media_base_dir is None: media_base_dir = gmTools.mk_sandbox_dir(prefix='exp-%s-' % pat.subdir_name) _log.debug('patient media base dir: %s', media_base_dir) doc_dir = os.path.join(media_base_dir, r'documents') if os.path.isdir(doc_dir): index_existing_docs = True else: index_existing_docs = False gmTools.mkdir(doc_dir) _html_start_data = { 'html_title_header': _('Patient data for'), 'html_title_patient': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False) + ', ' + _('born') + ' ' + pat.get_formatted_dob('%Y %B %d')), 'title': _('Patient data export'), 'pat_name': gmTools.html_escape_string( pat.get_description_gender(with_nickname=False)), 'pat_dob': gmTools.html_escape_string( _('born') + ' ' + pat.get_formatted_dob('%Y %B %d')), 'mugshot_url': 'documents/no-such-file.png', 'mugshot_alt': _('no patient photograph available'), 'mugshot_title': '', 'docs_title': _('Documents'), 'browse_root': _('browse storage medium'), 'browse_docs': _('browse documents area'), 'browse_dicomdir': '', 'run_dicom_viewer': '' } mugshot = pat.document_folder.latest_mugshot if mugshot is not None: _html_start_data['mugshot_url'] = mugshot.save_to_file( directory=doc_dir, adjust_extension=True) _html_start_data['mugshot_alt'] = _( 'patient photograph from %s') % gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') _html_start_data['mugshot_title'] = gmDateTime.pydt_strftime( mugshot['date_generated'], '%B %Y') if 'DICOMDIR' in os.listdir(media_base_dir): _html_start_data[ 'browse_dicomdir'] = '<li><a href="./DICOMDIR">%s</a></li>' % _( 'show DICOMDIR file') # copy DWV into target dir dwv_src_dir = os.path.join(gmTools.gmPaths().local_base_dir, 'dwv4export') if not os.path.isdir(dwv_src_dir): dwv_src_dir = os.path.join( gmTools.gmPaths().system_app_data_dir, 'dwv4export') if os.path.isdir(dwv_src_dir): dwv_target_dir = os.path.join(media_base_dir, 'dwv') gmTools.rmdir(dwv_target_dir) try: shutil.copytree(dwv_src_dir, dwv_target_dir) _html_start_data[ 'run_dicom_viewer'] = '<li><a href="./dwv/viewers/mobile-local/index.html">%s</a></li>' % _( 'run Radiology Images (DICOM) Viewer') except (shutil.Error, OSError): _log.exception('cannot include DWV, skipping') # index.html # - header idx_fname = os.path.join(media_base_dir, 'index.html') idx_file = io.open(idx_fname, mode='wt', encoding='utf8') idx_file.write(_html_start % _html_start_data) # - middle (side effect ! -> exports items into files ...) existing_docs = os.listdir( doc_dir ) # get them now, or else we will include the to-be-exported items # - export items for item in items: item_path = item.save_to_file(directory=doc_dir) item_fname = os.path.split(item_path)[1] idx_file.write( _html_list_item % (item_fname, gmTools.html_escape_string(item['description']))) # - preexisting documents for doc_fname in existing_docs: idx_file.write( _html_list_item % (doc_fname, gmTools.html_escape_string(_('other: %s') % doc_fname))) # - footer _cfg = gmCfg2.gmCfgData() from Gnumed.business.gmPraxis import gmCurrentPraxisBranch prax = gmCurrentPraxisBranch() lines = [] adr = prax.branch.org_unit.address if adr is not None: lines.extend(adr.format()) for comm in prax.branch.org_unit.comm_channels: if comm['is_confidential'] is True: continue lines.append('%s: %s' % (comm['l10n_comm_type'], comm['url'])) adr = '' if len(lines) > 0: adr = gmTools.html_escape_string('\n'.join(lines), replace_eol=True, keep_visual_eol=True) _html_end_data = { 'branch': gmTools.html_escape_string(prax['branch']), 'praxis': gmTools.html_escape_string(prax['praxis']), 'date': gmTools.html_escape_string( gmDateTime.pydt_strftime(gmDateTime.pydt_now_here(), format='%Y %B %d')), 'gm_ver': gmTools.html_escape_string(_cfg.get(option='client_version')), #'gm_ver': 'git HEAD', # for testing 'adr': adr } idx_file.write(_html_end % _html_end_data) idx_file.close() # start.html (just a copy of index.html, really ;-) start_fname = os.path.join(media_base_dir, 'start.html') try: shutil.copy2(idx_fname, start_fname) except Exception: _log.exception('cannot copy %s to %s', idx_fname, start_fname) # autorun.inf autorun_dict = {} autorun_dict['label'] = self._compute_autorun_inf_label(pat) autorun_dict['action'] = _('Browse patient data') autorun_dict['icon'] = '' media_icon_kwd = '$$gnumed_patient_media_export_icon' media_icon_kwd_exp = gmKeywordExpansion.get_expansion( keyword=media_icon_kwd, textual_only=False, binary_only=True) icon_tmp_file = media_icon_kwd_exp.save_to_file( target_mime='image/x-icon', target_extension='.ico', ignore_conversion_problems=True) if icon_tmp_file is None: _log.debug('cannot retrieve <%s>', media_icon_kwd) else: media_icon_fname = os.path.join(media_base_dir, 'gnumed.ico') try: shutil.move(icon_tmp_file, media_icon_fname) autorun_dict['icon'] = 'icon=gnumed.ico' except Exception: _log.exception('cannot move %s to %s', icon_tmp_file, media_icon_fname) autorun_fname = os.path.join(media_base_dir, 'autorun.inf') autorun_file = io.open(autorun_fname, mode='wt', encoding='cp1252', errors='replace') autorun_file.write(_autorun_inf % autorun_dict) autorun_file.close() # cd.inf cd_inf_fname = os.path.join(media_base_dir, 'cd.inf') cd_inf_file = io.open(cd_inf_fname, mode='wt', encoding='utf8') cd_inf_file.write( _cd_inf % (pat['lastnames'], pat['firstnames'], gmTools.coalesce(pat['gender'], '?'), pat.get_formatted_dob('%Y-%m-%d'), gmDateTime.pydt_strftime(gmDateTime.pydt_now_here(), format='%Y-%m-%d'), pat.ID, _cfg.get(option='client_version'), ' / '.join([ '%s = %s (%s)' % (g['tag'], g['label'], g['l10n_label']) for g in pat.gender_list ]))) cd_inf_file.close() # README readme_fname = os.path.join(media_base_dir, 'README') readme_file = io.open(readme_fname, mode='wt', encoding='utf8') readme_file.write( _README % (pat.get_description_gender(with_nickname=False) + ', ' + _('born') + ' ' + pat.get_formatted_dob('%Y %B %d'))) readme_file.close() # patient demographics as GDT/XML/VCF pat.export_as_gdt(filename=os.path.join(media_base_dir, 'patient.gdt')) pat.export_as_xml_linuxmednews( filename=os.path.join(media_base_dir, 'patient.xml')) pat.export_as_vcard( filename=os.path.join(media_base_dir, 'patient.vcf')) # praxis VCF shutil.move(prax.vcf, os.path.join(media_base_dir, 'praxis.vcf')) return media_base_dir