def _gc_file_store(self): """ Perform the garbage collection of the filestore. """ if self._storage() != 'file': return # Continue in a new transaction. The LOCK statement below must be the # first one in the current transaction, otherwise the database snapshot # used by it may not contain the most recent changes made to the table # ir_attachment! Indeed, if concurrent transactions create attachments, # the LOCK statement will wait until those concurrent transactions end. # But this transaction will not see the new attachements if it has done # other requests before the LOCK (like the method _storage() above). cr = self._cr cr.commit() # prevent all concurrent updates on ir_attachment while collecting, # but only attempt to grab the lock for a little bit, otherwise it'd # start blocking other transactions. (will be retried later anyway) cr.execute("SET LOCAL lock_timeout TO '10s'") cr.execute("LOCK ir_attachment IN SHARE MODE") # retrieve the file names from the checklist checklist = {} for dirpath, _, filenames in os.walk(self._full_path('checklist')): dirname = os.path.basename(dirpath) for filename in filenames: fname = "%s/%s" % (dirname, filename) checklist[fname] = os.path.join(dirpath, filename) # Clean up the checklist. The checklist is split in chunks and files are garbage-collected # for each chunk. removed = 0 for names in cr.split_for_in_conditions(checklist): # determine which files to keep among the checklist cr.execute( "SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names]) whitelist = set(row[0] for row in cr.fetchall()) # remove garbage files, and clean up checklist for fname in names: filepath = checklist[fname] if fname not in whitelist: try: os.unlink(self._full_path(fname)) _logger.debug("_file_gc unlinked %s", self._full_path(fname)) removed += 1 except (OSError, IOError): _logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True) with tools.ignore(OSError): os.unlink(filepath) # commit to release the lock cr.commit() _logger.info("filestore gc %d checked, %d removed", len(checklist), removed)
def _mark_for_gc(self, fname): """ Add ``fname`` in a checklist for the filestore garbage collection. """ # we use a spooldir: add an empty file in the subdirectory 'checklist' full_path = os.path.join(self._full_path('checklist'), fname) if not os.path.exists(full_path): dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): with tools.ignore(OSError): os.makedirs(dirname) open(full_path, 'ab').close()
def _validate_numerical_box(self, answer): try: floatanswer = float(answer) except ValueError: return {self.id: _('This is not a number')} if self.validation_required: # Answer is not in the right range with tools.ignore(Exception): if not (self.validation_min_float_value <= floatanswer <= self.validation_max_float_value): return {self.id: self.validation_error_msg} return {}
def validate_numerical_box(self, post, answer_tag): self.ensure_one() errors = {} answer = post[answer_tag].strip() # Empty answer to mandatory question if self.constr_mandatory and not answer: errors.update({answer_tag: self.constr_error_msg}) # Checks if user input is a number if answer: try: floatanswer = float(answer) except ValueError: errors.update({answer_tag: _('This is not a number')}) # Answer validation (if properly defined) if answer and self.validation_required: # Answer is not in the right range with tools.ignore(Exception): floatanswer = float(answer) # check that it is a float has been done hereunder if not (self.validation_min_float_value <= floatanswer <= self.validation_max_float_value): errors.update({answer_tag: self.validation_error_msg}) return errors
def field_triggers(self): # determine field dependencies dependencies = {} for Model in self.models.values(): if Model._abstract: continue for field in Model._fields.values(): # dependencies of custom fields may not exist; ignore that case exceptions = (Exception, ) if field.base_field.manual else () with ignore(*exceptions): dependencies[field] = set(field.resolve_depends(self)) # determine transitive dependencies def transitive_dependencies(field, seen=[]): if field in seen: return for seq1 in dependencies.get(field, ()): yield seq1 for seq2 in transitive_dependencies(seq1[-1], seen + [field]): yield concat(seq1[:-1], seq2) def concat(seq1, seq2): if seq1 and seq2: f1, f2 = seq1[-1], seq2[0] if f1.type == 'one2many' and f2.type == 'many2one' and \ f1.model_name == f2.comodel_name and f1.inverse_name == f2.name: return concat(seq1[:-1], seq2[1:]) return seq1 + seq2 # determine triggers based on transitive dependencies triggers = {} for field in dependencies: for path in transitive_dependencies(field): if path: tree = triggers for label in reversed(path): tree = tree.setdefault(label, {}) tree.setdefault(None, set()).add(field) return triggers