def test_url_socket(url=None): ''' shortcut to test if socket is opened from a URL ''' try: u = urllib.parse.urlparse(url or SETTINGS.get('store_url')) except Exception as exp: logger.error("Unable to parse URL `{}`".format(url)) logger.exception(exp) return False # check if address and port are reachable return test_socket(u.hostname, u.port or 80)
def unarchive(self, collect_id): ''' unarchive the requested collect onto the remote anam-receiver triggered by Home's table button ''' try: do_post('/collects/{id}/unarchive'.format(id=collect_id)) self.reset() except Exception as exp: logger.error("Failed to unarchive #{}".format(collect_id)) logger.exception(exp) else: logger.info("Unarchived #{}".format(collect_id))
def do_request(path, method, kwargs={}, or_none=False, server_url=None, server_token=None): ''' performs a GET or POST on `path` URL is computed from `server_url`, /api and `path` Authorization header sent with `server_token` Excepts `anam-receiver` formatted JSON response. Raises on non-success status response. returns response as JSON ''' server_url = server_url or SETTINGS.get('store_url', '') server_token = server_token or SETTINGS.get('store_token') url = ''.join((server_url, '/api', path)) if not test_url_socket(url): logger.info("{} requests to {} failed. No socket.".format(method, url)) if or_none: return None else: raise IOError("Unable to connect to {}. Network Error".format(url)) req = None try: req = getattr(requests, method.lower())( url, headers=get_auth_headers(server_token), timeout=30, **kwargs) assert req.status_code in (200, 201) resp = req.json() assert resp['status'] == 'success' return resp except Exception as exp: if req: logger.error(req.status_code, req.text) logger.exception(exp) # silented error if or_none: return None raise
def worker(self): ''' copy all expected images from USB folder to samba share - loop on targets ; for each - recompute original folder name - compute list of expected attachment images (for whole household) - ensure original files are present (or add to errors list) - retrieve DB IDs from mapping - compute new files and folders names for share - ensure samba share is writable - copy files to samba share (failures to errors list) - if errors not empty, write a summary in a log file - mark collect images copied on anam-receiver - display feedback ''' mappings = self.dataset.get("targets", {}) error_list = [] # first prepare a full list of files to copy copy_list = {} for index, target in enumerate(self.get_targets()): # retrieve basic information from target try: ident = target.get("ident") logger.debug(ident) mapping = mappings.get(ident) dos_id = mapping.get('dossier') indigent_perso_id = mapping.get('indigent') first_name = target.get("enquete/prenoms") last_name = target.get("enquete/nom") name = "{last} {firsts}".format(last=last_name.upper(), firsts=first_name.title()) folder = get_folder_name(ident, last_name, first_name) assert dos_id assert indigent_perso_id assert name assert folder except Exception as exp: # should NEVER happen logger.error("Missing indigent data in dataset") logger.exception(exp) self.status_bar.set_error( "Données manquante sur les indigents.\n" "L'import est peut-être corrompu ?") return copy_list[ident] = { 'label': "{n}: {d}".format(n=name, d=dos_id), 'files': [] } seen_ids = [] for attachment in target.get("_attachments"): attach_slug = attachment['labels']['slug'] # skip other attachments if attach_slug not in VALID_ATTACHMENTS: continue # skip if already copied (_hamed as duplicates) if attachment['id'] in seen_ids: continue else: seen_ids.append(attachment['id']) # origin file fname = attachment['export_fname'] fpath = os.path.join(self.source_dir, folder, fname) # find its perso_id if 'epouse' in fname or 'enfant' in fname: perso_id = mapping.get(fname.split("_", 2)[1]) if 'epouse' in fname: perso_type = 'conjoint' elif 'enfant' in fname: perso_type = 'enfant' else: perso_id = indigent_perso_id perso_type = 'assure' # build destination file name and path nfname = "{pid}_{ptype}_{dtype}.jpg".format(pid=perso_id, dtype=attach_slug, ptype=perso_type) nfpath = "/".join([dos_id[-4:], dos_id, nfname]) # update status bar to show we're working on it self.status_bar.setText("{name}: {fpath}".format(name=name, fpath=nfpath)) # skip if the origin file is not present if not os.path.exists(fpath): error_list.append((fpath, nfpath, MISSING)) continue # make sure we're not adding duplicates if not (fpath, nfpath) in copy_list[ident]['files']: copy_list[ident]['files'].append((fpath, nfpath)) # check error list to fail early if the source is compromise if len(error_list) >= self.nb_images: self.status_bar.set_error( "Aucune image accessible depuis la source.\n" "Vérifiez la source et recommencez.") return self.status_bar.setText("Connexion au partage…") # ensure destination is ready to fail early if not if not test_connection(): self.status_bar.set_error( "Impossible d'écrire sur le partage.\n" "Vérifiez les paramètres et recommencez.") return self.status_bar.setText("Connecté au partage.") # start file copies from copy list for index, copy_data in enumerate(copy_list.values()): # update status and progress bar (copies are bundled by dossier) self.progress_bar.setValue(index + 1) copy_progress = "{label}... copie de {nb} fichiers".format( label=copy_data['label'], nb=len(copy_data['files'])) self.status_bar.setText(copy_progress) success = False failures = [] try: success, failures = copy_files(copy_data['files']) except smb.base.SMBTimeout as exp: logger.exception(exp) self.status_bar.set_error( "Perte de connexion avec le partage.\n" "Vérifiez les paramètres et le réseau et recommencez.") return except Exception as exp: logger.exception(exp) # keep track of failed copies if not success: error_list += [(l, d, COPYFAILED) for l, d in failures] # copy is over self.progress_bar.setValue(self.progress_bar.maximum()) self.status_bar.setText("Finalisation…") # return error and don't upload if only errors nb_errors = len(error_list) if nb_errors == self.nb_images: self.status_bar.set_error( "Aucune image n'a pu être copiée.\n" "Vérifiez la source et les paramètres et recommencez.") return # upload results try: payload = { 'images_nb_total': self.nb_images, 'images_nb_error': nb_errors, } do_post("/collects/{cid}/mark_images_copied".format( cid=self.collect_id), payload=payload) upload_success = True except Exception as exp: logger.exception(exp) upload_success = False # display feedback if nb_errors == 0 and upload_success: self.status_bar.set_success( "Copie des images terminée avec succès.") else: msg = "Copie partielle des images terminée.\n" \ "{nbe} erreurs sur {nbt} images.".format( nbe=nb_errors, nbt=self.nb_images) if not upload_success: msg += "\nImpossible de transmettre les résultats de la copie." self.status_bar.set_warning(msg) # prepare a log file if partial copy if nb_errors: with open(self.error_log_fname, 'w') as f: # initial statistics f.write("Copie partielle des images de la collecte {id}.{cr}" "Date de la copie: {date}.{cr}" "Nb. images: {nbt}.{cr}" "Nb. erreurs: {nbe}.{cr}{cr}".format( cr=os.linesep, id=self.ona_form_id, date=datetime.datetime.now().isoformat(), nbt=self.nb_images, nbe=nb_errors)) # list of [REASON] source -> destination lines for fpath, nfpath, error in error_list: f.write("[{e}] {s} ---> {d}{cr}".format( cr=os.linesep, e=COPY_ERROR_TYPES.get(error), s=fpath, d=nfpath)) # make status bar clickable (opens log in reader) self.status_bar.on_click = self.open_user_log
def worker(self): ''' imports collect data into anam oracle DB - connect to database - loop on all targets ; for each - create a DOSSIER for the indigent/household - create a IM_PERSONNES_MOBILE for the indigent - create many IM_PERSO_PJ_MOBILE for the indigent - create zero-plus IM_PERSONNES_MOBILE for the indigent spouses - create many IM_PERSO_PJ_MOBILE for the indigent spouses - create zero-plus IM_PERSONNES_MOBILE for the indigent children - create many IM_PERSO_PJ_MOBILE for the indigent children - POST to anam-receiver to mark collect imported rollback if any of this failed ''' try: assert ora_test() conn = ora_connect() except Exception as exp: logger.exception(exp) self.status_bar.set_error("Connexion impossible à la base Oracle. " "Vérifiez les paramètres.") return # will hold reference to both json IDs and oracle DB IDs mapping = {} nb_imported = 0 self.progress_bar.setValue(self.progress_bar.maximum() // 2) for index, target in enumerate(self.get_indigents()): # retrieve name to update progress bar first_name = target.get("enquete/prenoms") last_name = target.get("enquete/nom") name = "{last} {firsts}".format(last=last_name.upper(), firsts=first_name.title()) self.status_bar.setText(name) # self.progress_bar.setValue(index + 1) try: # import_target returns a DOS_ID: dict() of all mappings mapping.update(import_target(conn, target)) except Exception as exp: logger.error("DB import error on #{}: {}".format(index, name)) logger.exception(exp) self.status_bar.set_error( "Impossible d'importer les données (ORACLE).\n" "Les données n'ont pas été importées.\n{exp}".format( exp=exp)) conn.rollback() ora_disconnect(conn) return # commit if every 1,000 indigents if index % 50 == 0: try: conn.commit() nb_imported += 50 except Exception as exp: logger.error("DB Commit error on #{}: {}".format( index, name)) logger.exception(exp) self.status_bar.set_error( "Impossible d'importer certaines données " "(ORACLE/COMMIT/THOUSANDS).\n" "ATTENTION: {nb} indigents ont été importés dans " "la base de données Oracle !!\n{exp}".format( nb=nb_imported, exp=exp)) conn.rollback() ora_disconnect(conn) return # commit remaining batch of statements try: conn.commit() nb_imported = self.nb_targets except Exception as exp: logger.error("DB Commit error on last batch: {}".format(name)) logger.exception(exp) self.status_bar.set_error( "Impossible d'importer les données (ORACLE/COMMIT/END).\n" "ATTENTION: {nb} indigents ont été importés dans " "la base de données Oracle !!\n{exp}".format(nb=nb_imported, exp=exp)) conn.rollback() return else: self.status_bar.set_success("Données Oracle importées.") finally: ora_disconnect(conn) # update progress UI as we're done self.progress_bar.setValue(self.progress_bar.maximum()) self.status_bar.setText("Finalisation…") # mark collect imported on anam-receiver and submits the mappings try: do_post( "/collects/{cid}/mark_imported".format(cid=self.collect_id), mapping) except Exception as exp: logger.exception(exp) self.status_bar.set_error( "Impossible mettre à jour le service web ANAM.\n" "ATTENTION: {nb} indigents ont été importés dans la base " "de données Oracle !!\n{exp}".format(nb=nb_imported, exp=exp)) return else: self.status_bar.set_success("Import terminé avec success.")
def copy_files(files, service_name=None, conn=None): ''' copy a list of files to `service_name` files is a list of (source_path, destitionation_path) tuples returns (success, [list, of, failures]) ''' conn = conn or smb_connect() service_name = service_name or SETTINGS.get('picserv_share') def _create_folder(service_name, path): try: sharedFile = conn.getAttributes(service_name=service_name, path=path) assert sharedFile.isDirectory except OperationFailure: # does not exist, create folder create_folder(path, service_name, conn) except AssertionError: # is not a directory. remove and recreate delete_file(path, service_name, conn) create_folder(path, service_name, conn) else: # path already exist and is directory. moving on. return def _create_folder_tree(service_name, dest_filename): # create recursing folders on destination walked_folders = [] for folder in p(dest_filename).splitall()[:-1]: if not folder: continue walked_folders.append(folder) path = os.path.join(*walked_folders) _create_folder(service_name, path) failures = [] for local_filename, dest_filename in files: logger.debug("Copying `{}` to `{}`" .format(local_filename, dest_filename)) # create all folders up to dest_filename on samba share try: _create_folder_tree(service_name, dest_filename) except Exception as exp: logger.debug("Unable to create folder tree for `{}`" .format(dest_filename)) logger.exception(exp) failures.append((local_filename, dest_filename)) continue try: # write file on destination (overwrites if exists) with open(local_filename, 'rb') as local_file: assert conn.storeFile( service_name, dest_filename, local_file) > 0 except Exception as exp: logger.error("Unable to write {s} onto {d} on SMB {sh}" .format(s=local_filename, d=dest_filename, sh=service_name)) logger.exception(exp) failures.append((local_filename, dest_filename)) return not failures, failures