def commit(self): try: try: for fn in self.on_before_commit: fn() # hooks for dbrow in self.tracking: row_handler = getattr(dbrow, "_on_before_commit", None) if row_handler: row_handler() for dbrow in self.tracking: dbrow._on_commit() for dbrow in self.tracking: row_handler = getattr(dbrow, "_on_after_commit", None) if row_handler: row_handler() except Exception, e: M.trollback() raise if self.in_transaction: M.tcommit() for fn in self.on_after_commit: fn() # hooks
def fm_validate_insert(self, value): """ This validator checks the field using the Fileman logic. Since it expects value to be in Fileman External format and we are using Internal Format, it is of limited use. Also, I don't know how it will work on a sub-file. """ M.Globals["ERR"].kill() # Validates single field against the data dictionary s0, = M.proc("CHK^DIE", self.fileid, self.fieldid, "H", value, M.INOUT(""), "ERR") err = M.Globals["ERR"] # s0 should contain ^ for error, internal value for valid data if s0 == "^": error_code = err['DIERR'][1].value error_msg = '\n'.join( [v for k, v in err['DIERR'][1]['TEXT'].items()]) help_msg = [v for k, v in err['DIHELP'].items()] raise FilemanError( """DBSDD.fm_validate_insert(): fileid = [%s], fieldid = [%s], value = [%s], error_code = [%s], error_msg = [%s], help = [%s]""" % (self.fileid, self.fieldid, value, error_code, error_msg, help_msg)) # If err exists, then some form of programming error if err.exists(): raise FilemanError( """DBSDD.fm_validate_insert(): fileid = [%s], fieldid = [%s], value = [%s], err = [%s]""" % (self.fileid, self.fieldid, value, '\n'.join(err)))
def abort(self): try: for fn in self.on_before_abort: fn() # hooks for dbrow in self.tracking: row_handler = getattr(dbrow, "_on_before_abort", None) if row_handler: row_handler() for dbrow in self.tracking: row_handler = getattr(dbrow, "_on_abort", None) if row_handler: row_handler() if self.in_transaction: M.trollback() self.in_transaction = False for dbrow in self.tracking: row_handler = getattr(dbrow, "_on_after_abort", None) if row_handler: row_handler() for fn in self.on_after_abort: fn() # hooks finally: self.tracking = []
def unlock(self): """ Unlock the record """ # Locking is done via an M level routine on the record global g_path = self._dd.m_closed_form(self._rowid) M.mexec(str("LOCK -%s" % g_path)) # TODO: mexec to take unicode
def update(self, values=None): """ Write changed data back to the database. TODO: dbsdd validation for fields """ # Create an FDA format array for fileman fdaid = self._create_fda(values) if fdaid: M.Globals["ERR"].kill() # Flags: # E - use external formats # K - lock the record # S - do not clear the row global # T - verify the data #TODO: I want to do validation, but use internal format throughout if self._internal: flags = "" else: flags = "ET" M.proc("FILE^DIE", flags, fdaid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): raise FilemanErrorNumber(dierr=err) # If there are subfiles, these will have to be processed. subfiles = set([x[0] for x in values.keys() if type(x) == tuple]) if len(subfiles) > 0: for subfile in subfiles: self._update_subfile(subfile, [(x, values[x]) for x in values.keys() if type(x) == tuple])
def fileid(self): """ Look up the ^DIC array and find the file number for the specified file, e.g. FILE = 1 - result is a string. """ if self._fileid is None: rv = M.mexec('''set s1=$order(^DIC("B",s0,0))''', str(self.filename[:30]), M.INOUT(""))[0] if rv != '': self._fileid = rv return self._fileid
def lock(self, timeout=5): """ Lock a record. """ g_path = self._dd.m_closed_form(self._rowid) # Set the timeout M.Globals["DILOCKTM"].value = timeout # use DILF^LOCK function to perform the lock M.proc("LOCK^DILF", g_path) # result is returned in $T rv, = M.mexec("set l0=$T", M.INOUT(0)) if rv != 1: raise FilemanLockFailed(filename=self._dd.filename, row=self._rowid, timeout=timeout)
def fm_validate_insert(self, value): """ This validator checks the field using the Fileman logic. Since it expects value to be in Fileman External format and we are using Internal Format, it is of limited use. Also, I don't know how it will work on a sub-file. """ M.Globals["ERR"].kill() # Validates single field against the data dictionary s0, = M.proc("CHK^DIE", self.fileid, self.fieldid, "H", value, M.INOUT(""), "ERR") err = M.Globals["ERR"] # s0 should contain ^ for error, internal value for valid data if s0 == "^": error_code = err["DIERR"][1].value error_msg = "\n".join([v for k, v in err["DIERR"][1]["TEXT"].items()]) help_msg = [v for k, v in err["DIHELP"].items()] raise FilemanError( """DBSDD.fm_validate_insert(): fileid = [%s], fieldid = [%s], value = [%s], error_code = [%s], error_msg = [%s], help = [%s]""" % (self.fileid, self.fieldid, value, error_code, error_msg, help_msg) ) # If err exists, then some form of programming error if err.exists(): raise FilemanError( """DBSDD.fm_validate_insert(): fileid = [%s], fieldid = [%s], value = [%s], err = [%s]""" % (self.fileid, self.fieldid, value, "\n".join(err)) )
def delete(self): """ I see no clear mechanism for doing deletes in the DBS API. There seems to be a call in the "classic" API: ^DIK DIK = "The file global - open format" DA = "The entry number in the file" TODO: Validate permissions """ if not self._internal: raise FilemanError("You must use internal format to modify a file") if self._rowid is not None: M.Globals["Y"].kill() if type(self._rowid) == str and self._rowid.endswith(","): # Generate a DA structure for a multiple. # see manual, P 2-58 parts = [x for x in self._rowid.split(",") if x] rowid = parts[-1] cf = self._dd.m_open_form() cf = [x for x in cf.split("(",1)[1].split(",") if x] cf.reverse() parts = parts + cf M.Globals["DA"].value = parts[0] for i, part in enumerate(parts[1:]): M.Globals["DA"][i+1].value = part # the classic api paths are different that the DBS api paths M.Globals["DIK"].value = self._dd.m_open_form() + "%s," % rowid else: M.Globals["DIK"].value = self._dd.m_open_form() M.Globals["DA"].value = str(self._rowid) M.proc("^DIK") if M.Globals["Y"] == "-1": # I don't know where to look for the error message - Classic API # Sets the flag, but no variables set # This may just mean that the record does not exist raise FilemanError("""DBSRow.delete() : FILEMAN Error : file [%s], fileid = [%s], rowid = [%s]""" % (self._dd.filename, self._dd.fileid, self._rowid))
def _init_fields(self): """ Return information about the dd fields """ if self._fields is None: M.mset('U', "^") # DBS Calls Require this f = self._fields = {} attrs = self.fieldnames = {} fieldid = "0" while 1: # Subscript 0 is field description, .1 is the title, 3 is help fieldid, info, title, fieldhelp = M.ddwalk( self._fileid, fieldid) #fieldid, info, title, fieldhelp = M.mexec( # """set s0=$order(^DD(s2,s0)) Q:s0'=+s0 s s1=$G(^DD(s2,s0,0)),s3=$G(^DD(s2,s0,.1)),s4=$G(^DD(s2,s0,3))""", # M.INOUT(str(fieldid)), M.INOUT(""), str(self._fileid), M.INOUT(""), M.INOUT("")) if fieldid == "" or fieldid[0] not in "0123456789.": break info = info.split("^", 4) label = self._clean_label(info[0]) try: ftype = info[1] except: ftype = None if ftype: finst = None for klass in FIELD_TYPES: if klass.isa(ftype): finst = f[fieldid] = klass(fieldid, label, info) finst.fileid = self.fileid finst.ownerdd = self attrs[label] = fieldid break if finst is None: print finst, "FIELD [%s], spec [%s] was not identified" % ( label, ftype) continue finst.title = title finst.fieldhelp = fieldhelp else: assert finst, "FIELD [%s] %s has no fieldspec" % (label, info) return self._fields
def insert(self, values=None): """ Create a new record. Values is a dictionary containing the values. TODO: Inserts to the state file are prohibited. How is this implemented. """ M.Globals["ERR"].kill() ienid = "ien%s" % id(self) M.Globals[ienid].kill() # Create an FDA format array for fileman fdaid = self._create_fda(values, include_nulls=False) # Flags: # E - use external formats # S - do not clear the row global # TODO: I want the external format for validation, # but the internal format for usablility if self._internal: flags = "" else: flags = "E" M.proc("UPDATE^DIE", flags , fdaid, ienid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): # TODO: Work out the error codes. # ERR.DIERR.6.PARAM.0 = "3" # ERR.DIERR.6.PARAM.FIELD = "1901" # ERR.DIERR.6.PARAM.FILE = "2" # ERR.DIERR.6.PARAM.IENS = "+1," # ERR.DIERR.6.TEXT.1 = "The new record '+1,' lacks some required identifiers." raise FilemanErrorNumber(dierr=err) # What is the id of the new record? self._rowid = int(M.Globals[ienid]['1'].value) self._stored_data = None return self._rowid
def fileid(self): """ Look up the ^DIC array and find the file number for the specified file, e.g. FILE = 1 - result is a string. """ if self._fileid is None: rv = M.mexec("""set s1=$order(^DIC("B",s0,0))""", str(self.filename[:30]), M.INOUT(""))[0] if rv != "": self._fileid = rv return self._fileid
def _init_fields(self): """ Return information about the dd fields """ if self._fields is None: M.mset("U", "^") # DBS Calls Require this f = self._fields = {} attrs = self.fieldnames = {} fieldid = "0" while 1: # Subscript 0 is field description, .1 is the title, 3 is help fieldid, info, title, fieldhelp = M.ddwalk(self._fileid, fieldid) # fieldid, info, title, fieldhelp = M.mexec( # """set s0=$order(^DD(s2,s0)) Q:s0'=+s0 s s1=$G(^DD(s2,s0,0)),s3=$G(^DD(s2,s0,.1)),s4=$G(^DD(s2,s0,3))""", # M.INOUT(str(fieldid)), M.INOUT(""), str(self._fileid), M.INOUT(""), M.INOUT("")) if fieldid == "" or fieldid[0] not in "0123456789.": break info = info.split("^", 4) label = self._clean_label(info[0]) try: ftype = info[1] except: ftype = None if ftype: finst = None for klass in FIELD_TYPES: if klass.isa(ftype): finst = f[fieldid] = klass(fieldid, label, info) finst.fileid = self.fileid finst.ownerdd = self attrs[label] = fieldid break if finst is None: print finst, "FIELD [%s], spec [%s] was not identified" % (label, ftype) continue finst.title = title finst.fieldhelp = fieldhelp else: assert finst, "FIELD [%s] %s has no fieldspec" % (label, info) return self._fields
def retrieve(self): """ Retrieve values Internal or External """ M.Globals["ERR"].kill() flags = 'N' # no nulls if self._internal: flags = flags + "I" f = [] for (k,v) in self._fields.items(): if type(k) == tuple: if k[0]+"*" not in f: f.append(k[0]+"*") else: if v.fmql_type in [FT_SUBFILE]: f.append(k+"*") else: f.append(k) fieldids = ";".join(f) fileid = self._dd.fileid iens = self._iens M.proc("GETS^DIQ", fileid, # numeric file id iens, # IENS fieldids, # Fields to return TODO flags, # Flags N=no nulls, R=return field names self._row_tmpid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): print "error retrieving %s, file %s" % (iens, fileid) #import pdb; pdb.set_trace() raise FilemanErrorNumber(dierr=err) self._save_tmp_global()
def begin(self, label="python"): "It is not necessary to call this" if self.in_transaction: return # called implicitly for fn in self.on_before_begin: fn() # hooks assert len(self.tracking) == 0, "There have been some changes before the begin call" self.transaction_id = M.tstart(label) self.in_transaction = True for fn in self.on_after_begin: fn() # hooks self.cache = {}
def list_files(self): """ Oddly, I cannot see how to list the files using the DBS API. This is required for debugging etc. """ if self.remote: return self.remote.list_files() M.mset('DUZ',self.DUZ) M.mset('U', "^") if self.isProgrammer: M.mset('DUZ(0)', "@") rv = [] s0 = "0" while s0 != "": s0, name = M.mexec( '''set s0=$order(^DIC(s0)) Q:s0'=+s0 I $D(^DIC(s0,0))&$D(^DIC(s0,0,"GL"))&$$VFILE^DILFD(s0) S s1=$P(^DIC(s0,0),U,1)''', M.INOUT(s0), M.INOUT("")) if name: rv.append((name, s0)) return rv
def begin(self, label="python"): "It is not necessary to call this" if self.in_transaction: return # called implicitly for fn in self.on_before_begin: fn() # hooks assert len(self.tracking ) == 0, "There have been some changes before the begin call" self.transaction_id = M.tstart(label) self.in_transaction = True for fn in self.on_after_begin: fn() # hooks self.cache = {}
def next(self): lastkey = self.lastkey lastrowid = self.lastrowid if self.ascending: asc = 1 else: asc = -1 # TODO: Fileman seems to structure indices with keys in the global path # or in the value - need to investigate further # There is a mad collation approach in M, where numbers sort before non-numbers. # this really messes up the keys. # How should I search? # There is an inefficiency here it takes three searches to find the next record. while 1: if lastrowid is None: # locate the next matching index value lastkey, = M.mexec("""set s0=$order(%ss0),%s)""" % (self.gl, asc), M.INOUT(str(lastkey))) if lastkey == "": break if self.ascending: if self.from_value is not None: if self.from_rule == ">" and lastkey <= self.from_value: continue if self.from_rule == ">=" and lastkey < self.from_value: assert 0 if self.to_value is not None: if self.to_rule == "<=" and lastkey > self.to_value: break if self.to_rule == "=" and lastkey != self.to_value: break if self.to_rule == "<" and lastkey >= self.to_value: break self.lastkey = lastkey lastrowid = "0" else: # descending if self.from_value is not None: if self.from_rule == "<" and lastkey >= self.from_value: continue if self.from_rule == "<=" and lastkey > self.from_value: assert 0 if self.to_value is not None: if self.to_rule == ">=" and lastkey < self.to_value: break if self.to_rule == "=" and lastkey != self.to_value: break if self.to_rule == ">" and lastkey <= self.to_value: break self.lastkey = lastkey lastrowid = "" # Have the key, get the first matching rowid lastrowid, = M.mexec("""set s0=$order(%s"%s",s1),%d)""" % (self.gl, self.lastkey, asc), M.INOUT(str(lastkey)), lastrowid) if lastrowid == "": # No match lastrowid = None continue if self.filters: # Are filters to be applied? if not self.filters(lastrowid): continue if self.skip_rows > 0: self.skip_rows -= 1 continue self.lastrowid = lastrowid if self.raw: return self.lastkey, self.lastrowid return self.getter(self.lastrowid) raise StopIteration
def next(self): # Have we exceeded limit if self.limit and self.results_returned >= self.limit: raise StopIteration lastrowid = self.lastrowid # This value should be a string throughout. if self.ascending: asc = 1 else: asc = -1 while not self.results_complete: # If this is the first pass, we may have the id of a record, which needs to # be verified found = False if self.first_pass: self.first_pass = False if lastrowid is None and asc == -1: lastrowid, = M.mexec("""set s0=$order(%ss0),-1)""" % self.gl, M.INOUT('%')) if valid_rowid(lastrowid): found = True elif lastrowid and float(lastrowid) > 0: row_exists, = M.mexec("""set s0=$data(%ss0))""" % (self.gl), M.INOUT(lastrowid)) if valid_rowid(row_exists): found = True if not found: lastrowid, = M.mexec("""set s0=$order(%ss0),%d)""" % (self.gl, asc), M.INOUT(lastrowid)) if not valid_rowid(lastrowid): break # Check boundary values f_lastrowid = float(lastrowid) if self.ascending: if self.from_rowid is not None: if f_lastrowid == self.from_rowid and self.from_rule == ">": continue if self.to_rowid is not None: if f_lastrowid >= self.to_rowid and self.to_rule == "<": break if f_lastrowid > self.to_rowid and self.to_rule == "<=": break else: # descending: if f_lastrowid == 0: break # header record if self.from_rowid is not None: if f_lastrowid == self.from_rowid and self.from_rule == "<": continue if self.to_rowid is not None: if f_lastrowid <= self.to_rowid and self.to_rule == ">": break if f_lastrowid < self.to_rowid and self.to_rule == ">=": break if self.filters: # Are filters to be applied? if not self.filters(lastrowid): continue if self.skip_rows > 0: self.skip_rows -= 1 continue self.lastrowid = lastrowid self.results_returned += 1 if self.raw: return self.lastrowid return self.getter(self.lastrowid) self.results_complete = True raise StopIteration
def next(self): lastkey = self.lastkey lastrowid = self.lastrowid if self.ascending: asc = 1 else: asc = -1 # TODO: Fileman seems to structure indices with keys in the global path # or in the value - need to investigate further # There is a mad collation approach in M, where numbers sort before non-numbers. # this really messes up the keys. # How should I search? # There is an inefficiency here it takes three searches to find the next record. while 1: if lastrowid is None: # locate the next matching index value lastkey, = M.mexec( """set s0=$order(%ss0),%s)""" % (self.gl, asc), M.INOUT(str(lastkey))) if lastkey == "": break if self.ascending: if self.from_value is not None: if self.from_rule == ">" and lastkey <= self.from_value: continue if self.from_rule == ">=" and lastkey < self.from_value: assert 0 if self.to_value is not None: if self.to_rule == "<=" and lastkey > self.to_value: break if self.to_rule == "=" and lastkey != self.to_value: break if self.to_rule == "<" and lastkey >= self.to_value: break self.lastkey = lastkey lastrowid = "0" else: # descending if self.from_value is not None: if self.from_rule == "<" and lastkey >= self.from_value: continue if self.from_rule == "<=" and lastkey > self.from_value: assert 0 if self.to_value is not None: if self.to_rule == ">=" and lastkey < self.to_value: break if self.to_rule == "=" and lastkey != self.to_value: break if self.to_rule == ">" and lastkey <= self.to_value: break self.lastkey = lastkey lastrowid = "" # Have the key, get the first matching rowid lastrowid, = M.mexec( """set s0=$order(%s"%s",s1),%d)""" % (self.gl, self.lastkey, asc), M.INOUT(str(lastkey)), lastrowid) if lastrowid == "": # No match lastrowid = None continue if self.filters: # Are filters to be applied? if not self.filters(lastrowid): continue if self.skip_rows > 0: self.skip_rows -= 1 continue self.lastrowid = lastrowid if self.raw: return self.lastkey, self.lastrowid return self.getter(self.lastrowid) raise StopIteration
def next(self): # Have we exceeded limit if self.limit and self.results_returned >= self.limit: raise StopIteration lastrowid = self.lastrowid # This value should be a string throughout. if self.ascending: asc = 1 else: asc = -1 while not self.results_complete: # If this is the first pass, we may have the id of a record, which needs to # be verified found = False if self.first_pass: self.first_pass = False if lastrowid is None and asc == -1: lastrowid, = M.mexec( """set s0=$order(%ss0),-1)""" % self.gl, M.INOUT('%')) if valid_rowid(lastrowid): found = True elif lastrowid and float(lastrowid) > 0: row_exists, = M.mexec( """set s0=$data(%ss0))""" % (self.gl), M.INOUT(lastrowid)) if valid_rowid(row_exists): found = True if not found: lastrowid, = M.mexec( """set s0=$order(%ss0),%d)""" % (self.gl, asc), M.INOUT(lastrowid)) if not valid_rowid(lastrowid): break # Check boundary values f_lastrowid = float(lastrowid) if self.ascending: if self.from_rowid is not None: if f_lastrowid == self.from_rowid and self.from_rule == ">": continue if self.to_rowid is not None: if f_lastrowid >= self.to_rowid and self.to_rule == "<": break if f_lastrowid > self.to_rowid and self.to_rule == "<=": break else: # descending: if f_lastrowid == 0: break # header record if self.from_rowid is not None: if f_lastrowid == self.from_rowid and self.from_rule == "<": continue if self.to_rowid is not None: if f_lastrowid <= self.to_rowid and self.to_rule == ">": break if f_lastrowid < self.to_rowid and self.to_rule == ">=": break if self.filters: # Are filters to be applied? if not self.filters(lastrowid): continue if self.skip_rows > 0: self.skip_rows -= 1 continue self.lastrowid = lastrowid self.results_returned += 1 if self.raw: return self.lastrowid return self.getter(self.lastrowid) self.results_complete = True raise StopIteration
def c_isa(cls, flags): n = leading_number(strip_leading_chars(flags)) if len(n) > 0: s0, = M.func("$$VFILE^DILFD", n) return s0 != "0" return False
def indices(self): """ Return a list of the indices To be valid, the index must be both in the IX ^DD(200,0,"IX","AASWB",200,654)="" """ if self._indices is None: i = [] # TODO: this is not right for multi-column keys # TODO: new style indexes global_name = '^DD(%s,0,"IX","0")' % self.fileid prefix = '^DD(%s,0,"IX",' % self.fileid while 1: global_name = M.mexec('set s0=$query(%s)' % global_name, M.INOUT(""))[0] if not global_name or not global_name.startswith(prefix): break suffix = global_name[len(prefix):-1] parts = suffix.split(",") idx_name = parts[0][1:-1] idx_table = parts[1] idx_columns = parts[2:] index = Index(idx_name, idx_table, idx_columns) i.append(index) # A second list, gives indices for a field columns = {} for idx in i: for c in idx.columns: columns[c] = 1 # Now trawl the listed columns in the data dictionary, and load their # cross references. cr_names = {} for c in columns.keys(): idx_root = M.Globals["^DD"][self.fileid][c][1] if not idx_root[0].exists(): continue for cr_id, val in idx_root.keys_with_decendants(): if float(cr_id) > 0: cr_header = idx_root[cr_id][0].value parts = cr_header.split("^") if len(parts) == 2 and parts[ 1]: # if more than 2 parts, assume MUMPs trigger f = cr_names.get(parts[1], list()) f.append(c) cr_names[parts[1]] = f # Now, just delete items from the index list if they are not in cr_names self._indices = [] for index in i: cr = cr_names.get(index.name) if cr: # verify columns - lots of errors in real systems if len(cr) == len(index.columns): invalid = False for c in cr: if c not in index.columns: invalid = True continue if not invalid: self._indices.append(index) return self._indices
def index_order_traversal(gl_prefix, index, ranges=None, ascending=True, sf_path=[], explain=False): """ A generator which will traverse an index. The iterator should yield rowids. Indices are stored: GLOBAL,INDEXID,VALUE,ROWID="" ^DIZ(999900,"B","hello there from unit test2",183)="" ^DIZ(999900,"B","hello there from unit test2",184)="" ^DIZ(999900,"B","hello there from unit test2",185)="" ^DIZ(999900,"B","record 1",1)="" """ gl = gl_prefix + '"%s",' % index if ranges: r = ranges[0] from_value = r["from_value"] to_value = r["to_value"] from_rule = r["from_rule"] to_rule = r["to_rule"] else: from_value, to_value, from_rule, to_rule = None, None, None, None if explain: yield "index_order_traversal, ascending=%s, gl=%s, index=%s, X %s '%s' AND X %s '%s'" % ( ascending, gl, index, from_rule, from_value, to_rule, to_value, ) return if from_value != None and to_value != None: if ascending: assert from_value <= to_value else: assert to_value <= from_value if from_value is None: if ascending: lastkey = " " else: lastkey = "ZZZZZZZZZZZZZZ" lastrowid = "" else: lastkey = from_value if from_rule == ">": lastrowid = None # looks for the next key after lastkey else: lastrowid = "" # looks for the lastkey if ascending: asc = 1 else: asc = -1 # TODO: Fileman seems to structure indices with keys in the global path # or in the value - need to investigate further # There is a mad collation approach in M, where numbers sort before non-numbers. # this really messes up the keys. # How should I search? # There is an inefficiency here it takes three searches to find the next record. while 1: if lastrowid is None: # locate the next matching index value lastkey, = M.mexec("""set s0=$order(%ss0),%s)""" % (gl, asc), M.INOUT(str(lastkey))) if lastkey == "": break if ascending: if from_value is not None: if from_rule == ">" and lastkey <= from_value: continue if from_rule == ">=" and lastkey < from_value: assert 0 if to_value is not None: if to_rule == "<=" and lastkey > to_value: break if to_rule == "=" and lastkey != to_value: break if to_rule == "<" and lastkey >= to_value: break lastkey = lastkey lastrowid = "0" else: # descending if from_value is not None: if from_rule == "<" and lastkey >= from_value: continue if from_rule == "<=" and lastkey > from_value: assert 0 if to_value is not None: if to_rule == ">=" and lastkey < to_value: break if to_rule == "=" and lastkey != to_value: break if to_rule == ">" and lastkey <= to_value: break lastkey = lastkey lastrowid = "" # Have the key, get the first matching rowid lastrowid, = M.mexec("""set s0=$order(%s"%s",s1),%d)""" % (gl, lastkey, asc), M.INOUT(str(lastkey)), lastrowid) if lastrowid == "": # No match lastrowid = None continue yield (lastrowid, "%s%s)" % (gl_prefix, lastrowid), sf_path + [lastrowid])
def c_isa(cls, flags): n = leading_number(flags) if len(n) > 0: s0, = M.func("$$VFILE^DILFD", n) return s0 == "0" return False
def index_order_traversal(gl_prefix, index, ranges=None, ascending=True, sf_path=[], explain=False): """ A generator which will traverse an index. The iterator should yield rowids. Indices are stored: GLOBAL,INDEXID,VALUE,ROWID="" ^DIZ(999900,"B","hello there from unit test2",183)="" ^DIZ(999900,"B","hello there from unit test2",184)="" ^DIZ(999900,"B","hello there from unit test2",185)="" ^DIZ(999900,"B","record 1",1)="" """ gl = gl_prefix + '"%s",' % index if ranges: r = ranges[0] from_value = r['from_value'] to_value = r['to_value'] from_rule = r['from_rule'] to_rule = r['to_rule'] else: from_value, to_value, from_rule, to_rule = None, None, None, None if explain: yield "index_order_traversal, ascending=%s, gl=%s, index=%s, X %s '%s' AND X %s '%s'" % ( ascending, gl, index, from_rule, from_value, to_rule, to_value) return if from_value != None and to_value != None: if ascending: assert (from_value <= to_value) else: assert (to_value <= from_value) if from_value is None: if ascending: lastkey = " " else: lastkey = "ZZZZZZZZZZZZZZ" lastrowid = "" else: lastkey = from_value if from_rule == '>': lastrowid = None # looks for the next key after lastkey else: lastrowid = '' # looks for the lastkey if ascending: asc = 1 else: asc = -1 # TODO: Fileman seems to structure indices with keys in the global path # or in the value - need to investigate further # There is a mad collation approach in M, where numbers sort before non-numbers. # this really messes up the keys. # How should I search? # There is an inefficiency here it takes three searches to find the next record. while 1: if lastrowid is None: # locate the next matching index value lastkey, = M.mexec("""set s0=$order(%ss0),%s)""" % (gl, asc), M.INOUT(str(lastkey))) if lastkey == "": break if ascending: if from_value is not None: if from_rule == ">" and lastkey <= from_value: continue if from_rule == ">=" and lastkey < from_value: assert 0 if to_value is not None: if to_rule == "<=" and lastkey > to_value: break if to_rule == "=" and lastkey != to_value: break if to_rule == "<" and lastkey >= to_value: break lastkey = lastkey lastrowid = "0" else: # descending if from_value is not None: if from_rule == "<" and lastkey >= from_value: continue if from_rule == "<=" and lastkey > from_value: assert 0 if to_value is not None: if to_rule == ">=" and lastkey < to_value: break if to_rule == "=" and lastkey != to_value: break if to_rule == ">" and lastkey <= to_value: break lastkey = lastkey lastrowid = "" # Have the key, get the first matching rowid lastrowid, = M.mexec( """set s0=$order(%s"%s",s1),%d)""" % (gl, lastkey, asc), M.INOUT(str(lastkey)), lastrowid) if lastrowid == "": # No match lastrowid = None continue yield (lastrowid, "%s%s)" % (gl_prefix, lastrowid), sf_path + [lastrowid])
def indices(self): """ Return a list of the indices To be valid, the index must be both in the IX ^DD(200,0,"IX","AASWB",200,654)="" """ if self._indices is None: i = [] # TODO: this is not right for multi-column keys # TODO: new style indexes global_name = '^DD(%s,0,"IX","0")' % self.fileid prefix = '^DD(%s,0,"IX",' % self.fileid while 1: global_name = M.mexec("set s0=$query(%s)" % global_name, M.INOUT(""))[0] if not global_name or not global_name.startswith(prefix): break suffix = global_name[len(prefix) : -1] parts = suffix.split(",") idx_name = parts[0][1:-1] idx_table = parts[1] idx_columns = parts[2:] index = Index(idx_name, idx_table, idx_columns) i.append(index) # A second list, gives indices for a field columns = {} for idx in i: for c in idx.columns: columns[c] = 1 # Now trawl the listed columns in the data dictionary, and load their # cross references. cr_names = {} for c in columns.keys(): idx_root = M.Globals["^DD"][self.fileid][c][1] if not idx_root[0].exists(): continue for cr_id, val in idx_root.keys_with_decendants(): if float(cr_id) > 0: cr_header = idx_root[cr_id][0].value parts = cr_header.split("^") if len(parts) == 2 and parts[1]: # if more than 2 parts, assume MUMPs trigger f = cr_names.get(parts[1], list()) f.append(c) cr_names[parts[1]] = f # Now, just delete items from the index list if they are not in cr_names self._indices = [] for index in i: cr = cr_names.get(index.name) if cr: # verify columns - lots of errors in real systems if len(cr) == len(index.columns): invalid = False for c in cr: if c not in index.columns: invalid = True continue if not invalid: self._indices.append(index) return self._indices
def _update_subfile(self, fieldid, sf_new_values): """ Updating a subfile. This may involve inserting / deleting or just updating the data. """ # Step 1 - retrieve the existing data and compare. subfile_dd = self._dd.fields[fieldid].dd M.Globals["ERR"].kill() flags = '' if self._internal: flags = flags + "I" fileid = self._dd.fileid iens = self._iens fieldids = str(fieldid) + "*" tmpid = self._row_tmpid + str(fieldid) M.proc("GETS^DIQ", fileid, # numeric file id iens, # IENS fieldids, # Fields to return flags, # Flags N=no nulls, R=return field names tmpid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): raise FilemanErrorNumber(dierr=err) # Extract the result and store in rows. subfile_data = M.Globals[tmpid][subfile_dd._fileid] sf_live_data = [] for iens in [r[0] for r in subfile_data.keys_with_decendants()]: row = subfile_data[iens] if self._internal: row_fieldids = [x[0] for x in row.keys_with_decendants()] row_data = dict([(x, row[x]['I'].value) for x in row_fieldids]) else: row_fieldids = row.keys() row_data = dict([(x, row[x].value) for x in row_fieldids]) sf_live_data.append((iens, row_data)) # Convert the new data to the same format sf_new_data=[] for (f_fieldid, sf_fieldid), values in sf_new_values[:1]: for i, value in enumerate(values): iens = '%d,%s' % (i+1, self._rowid) d = (iens, {sf_fieldid: value}) sf_new_data.append(d) for (f_fieldid, sf_fieldid), values in sf_new_values[1:]: for i, value in enumerate(values): sf_new_data[i][1][sf_fieldid] = value # Now we have the rows on the database. For each row, we are going # to update, or delete it. fdaid = "fda%s" % id(self) fda = M.Globals[fdaid] fda.kill() sf_fda = fda[subfile_dd._fileid] # Pass 1 - updates do_update = False for i in range(len(sf_live_data)): if i < len(sf_new_data): sf_iens, row_data = sf_live_data[i] n_iens, n_row_data = sf_new_data[i] if row_data != n_row_data: # need to do an update for f,v in n_row_data.items(): sf_fda[sf_iens][f].value = v do_update = True if do_update: M.Globals["ERR"].kill() # Flags: # E - use external formats # K - lock the record # S - do not clear the row global # T - verify the data #TODO: I want to do validation, but use internal format throughout if self._internal: flags = "" else: flags = "ET" M.proc("FILE^DIE", flags, fdaid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): raise FilemanErrorNumber(dierr=err) # Pass 2 - inserts if len(sf_new_data) > len(sf_live_data): inserts = sf_new_data[len(sf_live_data):] fda.kill() sf_fda = fda[subfile_dd._fileid] for i, (iens, row) in enumerate(inserts): sf_iens = '+1,%s,' % self._rowid for f,v in row.items(): sf_fda[sf_iens][f].value = v M.Globals["ERR"].kill() # Flags: # E - use external formats # S - do not clear the row global if self._internal: flags = "" else: flags = "E" ienid = "ien%s" % id(self) M.proc("UPDATE^DIE", flags , fdaid, ienid, "ERR") # Check for error err = M.Globals["ERR"] if err.exists(): raise FilemanErrorNumber(dierr=err) # pass 3 - deletes elif len(sf_new_data) < len(sf_live_data): deletes = sf_live_data[len(sf_new_data):] M.Globals["Y"].kill() for iens, data in deletes: # Generate a DA structure for a multiple. # see manual, P 2-58 parts = [x for x in iens.split(",") if x] M.Globals["DA"].value = parts[0] for i, part in enumerate(parts[1:]): M.Globals["DA"][i+1].value = part M.Globals["DIK"].value = self._dd.m_open_form() + "%s,%s," % (self._rowid, parts[-1]) M.Globals["DA"][i+2].value = self._rowid M.proc("^DIK") if M.Globals["Y"] == "-1": # I don't know where to look for the error message - Classic API # Sets the flag, but no variables set # This may just mean that the record does not exist raise FilemanError("""DBSRow._update_subfile() : file [%s], fileid = [%s], rowid = [%s]""" % (self._dd.filename, self._dd.fileid, self._rowid))
def file_order_traversal(gl, ranges=None, ascending=True, sf_path=[], explain=False): """ Originate records by traversing the file in file order (i.e. no index) """ if ranges: r = ranges[0] from_rowid = r["from_value"] to_rowid = r["to_value"] from_rule = r["from_rule"] to_rule = r["to_rule"] else: from_rowid, to_rowid, from_rule, to_rule = None, None, None, None if explain: yield "file_order_traversal, ascending=%s, gl=%s, X %s %s AND X %s %s" % ( ascending, gl, from_rule, from_rowid, to_rule, to_rowid, ) return # the new person file has non-integer user ids if from_rowid != None: from_rowid = float(from_rowid) if to_rowid != None: to_rowid = float(to_rowid) if from_rowid != None and to_rowid != None: if ascending: assert from_rowid <= to_rowid else: assert to_rowid <= from_rowid if from_rowid is None: if ascending: lastrowid = "0" else: lastrowid = None else: # TODO: I have this in code in shared lastrowid = ("%f" % from_rowid).rstrip("0").rstrip(".").lstrip("0") if from_rowid > 0 and lastrowid[0] == "0": lastrowid = lastrowid[1:] if lastrowid.endswith(".0"): lastrowid = lastrowid[:-2] first_pass = True if ascending: asc = 1 else: asc = -1 while 1: # If this is the first pass, we may have the id of a record, which needs to # be verified found = False if first_pass: first_pass = False if lastrowid is None and asc == -1: lastrowid, = M.mexec("""set s0=$order(%ss0),-1)""" % gl, M.INOUT("%")) if valid_rowid(lastrowid): found = True elif lastrowid and float(lastrowid) > 0: row_exists, = M.mexec("""set s0=$data(%ss0))""" % (gl), M.INOUT(lastrowid)) if valid_rowid(row_exists): found = True if not found: lastrowid, = M.mexec("""set s0=$order(%ss0),%d)""" % (gl, asc), M.INOUT(lastrowid)) if not valid_rowid(lastrowid): break # Check boundary values f_lastrowid = float(lastrowid) if ascending: if from_rowid is not None: if f_lastrowid == from_rowid and from_rule == ">": continue if to_rowid is not None: if f_lastrowid >= to_rowid and to_rule == "<": break if f_lastrowid > to_rowid and to_rule == "<=": break else: # descending: if f_lastrowid == 0: break # header record if from_rowid is not None: if f_lastrowid == from_rowid and from_rule == "<": continue if to_rowid is not None: if f_lastrowid <= to_rowid and to_rule == ">": break if f_lastrowid < to_rowid and to_rule == ">=": break # If this is a subfile, I need to return the full path. yield (lastrowid, "%s%s)" % (gl, lastrowid), sf_path + [lastrowid])
def file_order_traversal(gl, ranges=None, ascending=True, sf_path=[], explain=False): """ Originate records by traversing the file in file order (i.e. no index) """ if ranges: r = ranges[0] from_rowid = r['from_value'] to_rowid = r['to_value'] from_rule = r['from_rule'] to_rule = r['to_rule'] else: from_rowid, to_rowid, from_rule, to_rule = None, None, None, None if explain: yield "file_order_traversal, ascending=%s, gl=%s, X %s %s AND X %s %s" % ( ascending, gl, from_rule, from_rowid, to_rule, to_rowid) return # the new person file has non-integer user ids if from_rowid != None: from_rowid = float(from_rowid) if to_rowid != None: to_rowid = float(to_rowid) if from_rowid != None and to_rowid != None: if ascending: assert (from_rowid <= to_rowid) else: assert (to_rowid <= from_rowid) if from_rowid is None: if ascending: lastrowid = "0" else: lastrowid = None else: # TODO: I have this in code in shared lastrowid = ('%f' % from_rowid).rstrip('0').rstrip('.').lstrip('0') if from_rowid > 0 and lastrowid[0] == "0": lastrowid = lastrowid[1:] if lastrowid.endswith(".0"): lastrowid = lastrowid[:-2] first_pass = True if ascending: asc = 1 else: asc = -1 while 1: # If this is the first pass, we may have the id of a record, which needs to # be verified found = False if first_pass: first_pass = False if lastrowid is None and asc == -1: lastrowid, = M.mexec("""set s0=$order(%ss0),-1)""" % gl, M.INOUT('%')) if valid_rowid(lastrowid): found = True elif lastrowid and float(lastrowid) > 0: row_exists, = M.mexec("""set s0=$data(%ss0))""" % (gl), M.INOUT(lastrowid)) if valid_rowid(row_exists): found = True if not found: lastrowid, = M.mexec("""set s0=$order(%ss0),%d)""" % (gl, asc), M.INOUT(lastrowid)) if not valid_rowid(lastrowid): break # Check boundary values f_lastrowid = float(lastrowid) if ascending: if from_rowid is not None: if f_lastrowid == from_rowid and from_rule == ">": continue if to_rowid is not None: if f_lastrowid >= to_rowid and to_rule == "<": break if f_lastrowid > to_rowid and to_rule == "<=": break else: # descending: if f_lastrowid == 0: break # header record if from_rowid is not None: if f_lastrowid == from_rowid and from_rule == "<": continue if to_rowid is not None: if f_lastrowid <= to_rowid and to_rule == ">": break if f_lastrowid < to_rowid and to_rule == ">=": break # If this is a subfile, I need to return the full path. yield (lastrowid, "%s%s)" % (gl, lastrowid), sf_path + [lastrowid])