def cron_folder_auto_classification(self, path=False, processing_path=False, limit=False): if not path: path = (self.env["ir.config_parameter"].sudo().get_param( "document_quick_access_auto_classification.path", default=False)) if not path: return False if not processing_path and not self.env.context.get( "ignore_process_path"): processing_path = ( self.env["ir.config_parameter"].sudo().get_param( "document_quick_access_auto_classification.process_path", default=False, )) elements = [ os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) ] if limit: elements = elements[:limit] for element in elements: obj = self new_element = element if processing_path: new_cr = Registry(self.env.cr.dbname).cursor() try: if processing_path: new_element = os.path.join(processing_path, os.path.basename(element)) shutil.copy(element, new_element) obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay( **self._delay_vals())) obj._process_document(new_element) if processing_path: new_cr.commit() except Exception: if processing_path: os.unlink(new_element) new_cr.rollback() raise finally: if processing_path: new_cr.close() if processing_path: os.unlink(element) return True
def cron_move_documents(self, limit=False, path=False): if not path: path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner_queue.preprocess_path", default=False, )) dest_path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner.path", default=False)) if not path or not dest_path: return False elements = [ os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) ] if limit: elements = elements[:limit] min_time = int(time.time()) - 60 single_commit = self.env.context.get("scanner_single_commit", False) for element in elements: if os.path.getmtime( element) > min_time and not self.env.context.get( "scanner_ignore_time", False): continue filename = os.path.basename(element) new_element = os.path.join(dest_path, filename) shutil.copy(element, new_element) if not single_commit: new_cr = Registry(self.env.cr.dbname).cursor() try: if not single_commit: obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay()) else: obj = self.env[self._name].browse() obj.process_document(new_element) if not single_commit: new_cr.commit() except Exception: os.unlink(new_element) if not single_commit: new_cr.rollback() # error, rollback everything atomically raise finally: if not single_commit: new_cr.close() os.unlink(element) return True
def _next(self): if (getattr(threading.currentThread(), "testing", False) or self.env.context.get("install_mode") or self.env.context.get("ignore_safe", not self.safe)): return super()._next() new_cr = Registry(self.env.cr.dbname).cursor() try: env = api.Environment(new_cr, self.env.uid, self.env.context) res = env[self._name].browse(self.id) result = res.with_context(ignore_safe=True)._next() new_cr.commit() except Exception: new_cr.rollback() # error, rollback everything atomically raise finally: new_cr.close() return result
def cron_ssh_move_documents( self, host=False, port=False, user=False, password=False, ssh_path=False, ): dest_path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner.path", default=False)) connection = SSHClient() connection.load_system_host_keys() if not dest_path: return False if not host: host = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.host", default=False) if not port: port = int(self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.port", default="0")) if not user: user = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.user", default=False) if not password: password = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.password", default=False, ) if not ssh_path: ssh_path = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.ssh_path", default=False, ) connection.connect(hostname=host, port=port, username=user, password=password) sftp = connection.open_sftp() if ssh_path: sftp.chdir(ssh_path) elements = sftp.listdir_attr(".") min_time = int(time.time()) - 60 single_commit = self.env.context.get("scanner_single_commit", False) for element in elements: if element.st_atime > min_time and not self.env.context.get( "scanner_ignore_time", False): continue filename = element.filename new_element = os.path.join(dest_path, filename) if not single_commit: new_cr = Registry(self.env.cr.dbname).cursor() try: sftp.get(filename, new_element) if single_commit: obj = self.env[self._name].browse() else: obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay()) obj.process_document(new_element) if not single_commit: new_cr.commit() except Exception: if os.path.exists(new_element): os.unlink(new_element) if not single_commit: new_cr.rollback() # error, rollback everything atomically raise finally: if not single_commit: new_cr.close() sftp.remove(element.filename) sftp.close() connection.close() return True