def save_message(self, sender, recipient, msgid, header, body, origcharset, recvfrom, processed=0, bulkload=False): """import msg as part of transaction. if msg is correct then it is stored in base. if msg fails validation then it will be saved in bad messages' directory """ if (len(body)+len(repr(header))) > MSGSIZELIMIT: raise FTNExcessiveMessageSize(len(body)+len(repr(header)), MSGSIZELIMIT) origdomname, origaddr = sender destdomname, destaddr = recipient #origdom=self.domains[origdomname] destdom=self.domains[destdomname] #print "-"*79 #sys.stdout.write(body.encode("utf-8")) #print "="*79 # raise Exception("Verify senderaddress and destination address are listed. (area autocreate, destination is listed, ...)") origid = self.check_addr(origdomname, origaddr) # allow autocreate for source # no autocreate. only on request and if exists in uplink lists # database must have trigger to check address to be created for nodelist and area for echolist and uplinks if recvfrom: recvfrom_id = get_addr_id(self.db, self.FIDOADDR, recvfrom) else: recvfrom_id = None if not bulkload: # check domain's "verifysubscription" and if true refuse if not subscribed r=self.db.prepare("select verifysubscriptions from domains where id=$1")(destdom) if len(r) == 0: raise Exception("invalid domain after checkaddress ??? %d"%destdom) verify_subscription = r[0][0] if verify_subscription: # move to ftnaccess # check if message received from subscribed to the destination's address maypost, do_create = ftnaccess.may_post(self.db, recvfrom_id, (destdomname, destaddr)) if maypost: print("posting allowed for %d (%d/%s) to %d/%s"%(recvfrom_id, self.FIDOADDR, recvfrom, destdom, destaddr)) destid = self.check_addr(destdomname, destaddr) # check node in nodelist and area exists if do_create: print ("subscribe %s to %s %s as uplink"%(recvfrom, destdomname, destaddr)) self.add_subscription(True, destdomname, destaddr, recvfrom) else: raise FTNNotSubscribed("%d/%s (id=%d)"%(self.FIDOADDR, recvfrom, recvfrom_id), "%d/%s"%(destdom, destaddr)) else: # flaw: allow to flood with messages to non-existent addresses. should be checked with trigger in database # (allow create node/point only if nodelisted, area only if listed on bone/uplinks) destid = self.check_addr(destdomname, destaddr) # check node in nodelist and area exists if len(self.db.prepare("select id from messages where msgid=$1")(msgid)): raise FTNDupMSGID(msgid) if not self.Q_msginsert: self.Q_msginsert = self.db.prepare("insert into messages (source, destination, msgid, header, body, " "origcharset, processed, receivedfrom, receivedtimestamp, numberfordestination) " "values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id") if not self.Q_update_addr_msg: self.Q_update_addr_msg = self.db.prepare("update addresses set last=$2 where id=$1") if not self.Q_get_max_numfordest: self.Q_get_max_numfordest = self.db.prepare("select last from addresses where id=$1") with postgresql.alock.ExclusiveLock(self.db, IMPORTLOCK): # lock per all table messages # begin insert-lock # guarantees that for greater id greater timestamp timestamp = datetime.datetime.now(datetime.timezone.utc) print ("Transaction state", self.x.state, str(timestamp)) if destdomname=="echo": numfordest = (self.Q_get_max_numfordest.first(destid) or 0) + 1 else: numfordest = None new_msg=self.Q_msginsert(origid, destid, msgid, header, body, origcharset, processed, recvfrom_id, timestamp, numfordest)[0][0] print ("insert msg #", new_msg, "to address", destid, "number", numfordest) self.Q_update_addr_msg(destid, new_msg) # end insert-lock self.last_message_for_address[destid] = new_msg self.poller.add_one(destid)
def import_tic(db, fullname, expect_addr=None, import_utime=None, ticdata=None, ignore_pw=False, skip_access_check=False): " specify older import_utime value to make imported file the status of aarchive " # if "TO" is present # get from links with matching from and to addresses and verify password # if "TO" is absent # get to and password from links by from. if two rows are fetched - refuse tic # # in both cases refuse tic if no row fetched - tics are allowed for password links only if ticdata is None: filepath, filename = os.path.split(fullname) ticdata = read_tic(fullname) else: filepath = os.path.dirname(fullname) tic_src = get_optional(ticdata, "FROM") print ("from", tic_src) if tic_src is None: tic_src=expect_addr tic_dest = get_optional(ticdata, "TO") print ("to", tic_dest) if tic_src is None and tic_dest is None and skip_access_check: print ("Importing non-FTN file") src_id = None dest_id = None else: q="select l.address, l.my, l.authentication from links l" q_args=[] if tic_src: src_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_src) q += (" and" if q_args else " where") + " address=$%d"%(len(q_args)+1) q_args.append(src_id) else: src_id = None if tic_dest: dest_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_dest) q += (" and" if q_args else " where") + " my=$%d"%(len(q_args)+1) q_args.append(dest_id) else: dest_id = None print (q) print (q_args) possible_links = db.prepare(q)(*q_args) if len(possible_links) > 1: raise WrongTic("ambiguos link %s->%s"%(str(tic_src),str(tic_dest))) if len(possible_links) == 0: raise WrongTic("no matching link %s->%s"%(str(tic_src),str(tic_dest))) src_id, dest_id, authinfo = possible_links[0] pw = authinfo.find("RobotsPassword").text print (src_id, dest_id, pw) if not ignore_pw: tic_passw = get_single(ticdata, "PW") if not ftnaccess.check_pw(pw, tic_passw): raise WrongTic("invalid password [%s] for %s"%(tic_passw,tic_src)) # source and destination verified, now try to find file # but before we should check if link can post to specified area area = get_single(ticdata, "AREA").upper() # FTN areas must be uppercase print (area) if not skip_access_check: maypost = ftnaccess.may_post(db, src_id, ("fileecho", area)) if not maypost: raise WrongTic("%s may not post to %s"%(tic_src, area)) fname = os.path.split(get_single(ticdata, "FILE"))[1] try: fsize = get_single(ticdata, "SIZE", int) except BadTic: fsize = None fcrc = get_single(ticdata, "CRC", remove=False) print (fname, fsize, fcrc) ffullname=find_file(fname, filepath) if not os.path.exists(ffullname): raise NoFile("file %s does not exists"%ffullname) if fsize is not None and os.path.getsize(ffullname)!=fsize: raise NoFile("file %s size != %d"%(ffullname, fsize)) fsize, checksum = sz_crc32(ffullname) if checksum != fcrc.upper(): raise NoFile("file %s crc32 %s != %s"%(ffullname, checksum, fcrc)) print ("file matches") # >>> LOCK FILEECHOES POSTINGS if db.FECHOIMPORTLOCK is None: db.FECHOIMPORTLOCK=db.prepare("select oid from pg_class where relname='file_post'").first() with postgresql.alock.ExclusiveLock(db, db.FECHOIMPORTLOCK, 0): # calculate hash # verify if it exists in database # if not, post as new (new blob, new name, new destination) # if yes, register new name (if differ) and destination for file # check if it is not duplicate tic # select posting of same origin, area, filename, origin_record # if any has same filesize and hash - compare content and drop duplicate tic_origin = get_optional(ticdata, "ORIGIN") if tic_origin: with ftnimport.session(db) as sess: tic_origin_id = sess.check_addr("node", tic_origin) else: tic_origin_id = None area_id = ftnconfig.get_addr_id(db, db.FTN_domains["fileecho"], area) try: tic_originrec = get_first(ticdata, "PATH") except BadTic as e: print ("PATH is missing, no dupe checking") print (e) tic_originrec = None if tic_originrec: print("check if tic is first %s %d %s %s"%((tic_origin, area_id, fname, tic_originrec))) for prev_f, prev_l, prev_h, prev_p in db.prepare("select f.id, f.length, f.sha512, p.id from files f inner join file_post p ON p.filedata=f.id " "where p.origin=$1 and p.destination=$2 and p.filename=$3 and p.origin_record=$4")(tic_origin_id, area_id, fname, tic_originrec): os.rename(ffullname, ffullname+".dup") if not fullname.endswith(".faketic"): os.rename(fullname, fullname+".dup") raise DupPost("similar posting %d, abandom"%prev_p, ffullname) # tic with the same first record of PATH - the same posting sha512 = hashlib.new("sha512") f=open(ffullname, "rb") while(True): z=f.read(262144) if not z: break sha512.update(z) f.close() print(sha512.hexdigest()) oldf_id = db.prepare("select id from files where sha512=$1").first(sha512.digest()) if oldf_id is None: print("new file content") if fsize<=262144: print ("save as bytea") newf_id = db.prepare("insert into files (length, sha512, content) values ($1, $2, $3) returning id").first(fsize, sha512.digest(), open(ffullname, "rb").read()) else: print ("save as large object") with ftnimport.session(db) as sess: lo=sess.db.prepare("select lo_create(0)").first() print("created lo", lo,end='') lo_handle=sess.db.prepare("select lo_open($1, 131072)").first(lo) f=open(ffullname, "rb") while(True): z=f.read(262144) if not z: break print(".", end='', flush=True) if sess.db.prepare("select lowrite($1, $2)").first(lo_handle, z) != len(z): raise Exception("error writing file data to database") f.close() if sess.db.prepare("select lo_close($1)").first(lo_handle) != 0: raise Exception("error closing large object") newf_id = db.prepare("insert into files (length, sha512, lo) values ($1, $2, $3) returning id").first(fsize, sha512.digest(), lo) f_id = newf_id else: print("use old", oldf_id) f_id = oldf_id # add name for filedata is_with_name = db.prepare("select id from files where $1 = ANY(names) and id=$2").first(fname, f_id) if not is_with_name: fnameslen = int(db.prepare("select array_upper(names, 1) from files where id=$1").first(f_id) or 0) db.prepare("update files set names[$1]=$2 where id=$3")(fnameslen+1, fname, f_id) if import_utime is None: utime = int(time.mktime(time.gmtime())) # convert_post time to float and use fractions if you have rate more than one file per some seconds else: utime = int(import_utime) print ("post_time=", utime) db.prepare("insert into file_post (filedata, origin, destination, recv_from, recv_as, recv_timestamp, origin_record, filename, other, post_time) " "values ($1, $2, $3, $4, $5, $6, $7, $8, $9, free_posttime($10))")\ (f_id, tic_origin_id, area_id, src_id, dest_id, datetime.datetime.now(datetime.timezone.utc), tic_originrec, fname, json.dumps(ticdata), utime) print ("inserted successfully") print ("unlink", ffullname) os.unlink(ffullname) if not fullname.endswith(".faketic"): print ("unlink", fullname) os.unlink(fullname)
def import_tic(db, fullname, expect_addr=None, import_utime=None, ticdata=None, ignore_pw=False, skip_access_check=False): " specify older import_utime value to make imported file the status of aarchive " # if "TO" is present # get from links with matching from and to addresses and verify password # if "TO" is absent # get to and password from links by from. if two rows are fetched - refuse tic # # in both cases refuse tic if no row fetched - tics are allowed for password links only if ticdata is None: filepath, filename = os.path.split(fullname) ticdata = read_tic(fullname) else: filepath = os.path.dirname(fullname) tic_src = get_optional(ticdata, "FROM") print("TIC from:", tic_src) if tic_src is None: tic_src = expect_addr tic_dest = get_optional(ticdata, "TO") print("TIC to", tic_dest) if tic_src is None and tic_dest is None and skip_access_check: print("Importing non-FTN file") src_id = None dest_id = None else: q = "select l.address, l.my, l.authentication from links l" q_args = [] if tic_src: src_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_src) q += (" and" if q_args else " where") + " address=$%d" % (len(q_args) + 1) q_args.append(src_id) else: src_id = None if tic_dest: dest_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_dest) q += (" and" if q_args else " where") + " my=$%d" % (len(q_args) + 1) q_args.append(dest_id) else: dest_id = None #print (q) #print (q_args) possible_links = db.prepare(q)(*q_args) if len(possible_links) > 1: raise WrongTic("ambiguos link %s->%s" % (str(tic_src), str(tic_dest))) if len(possible_links) == 0: raise WrongTic("no matching link %s->%s" % (str(tic_src), str(tic_dest))) src_id, dest_id, authinfo = possible_links[0] pw = authinfo.find("RobotsPassword").text print("TIC src_id, dst_id, pw:", src_id, dest_id, pw) if not ignore_pw: tic_passw = get_single(ticdata, "PW") if not ftnaccess.check_pw(pw, tic_passw): raise WrongTic("invalid password [%s] for %s" % (tic_passw, tic_src)) # source and destination verified, now try to find file # but before we should check if link can post to specified area area = get_single(ticdata, "AREA").upper() # FTN areas must be uppercase print("TIC area:", area) if not skip_access_check: maypost = ftnaccess.may_post(db, src_id, ("fileecho", area)) if not maypost: raise WrongTic("%s may not post to %s" % (tic_src, area)) fname = os.path.split(get_single(ticdata, "FILE"))[1] try: fsize = get_single(ticdata, "SIZE", int) except BadTic: fsize = None fcrc = get_single(ticdata, "CRC", remove=False) print("TIC name, size, crc:", fname, fsize, fcrc) ffullname = find_matching_file(filepath, fname, fsize, fcrc) if not os.path.exists(ffullname): raise NoFile("file %s does not exists" % ffullname) if fsize is not None and os.path.getsize(ffullname) != fsize: raise NoFile("file %s size != %d" % (ffullname, fsize)) fsize, checksum = sz_crc32(ffullname) if checksum != fcrc.upper(): raise NoFile("file %s crc32 %s != %s" % (ffullname, checksum, fcrc)) print("file matches") # >>> LOCK FILEECHOES POSTINGS if db.FECHOIMPORTLOCK is None: db.FECHOIMPORTLOCK = db.prepare( "select oid from pg_class where relname='file_post'").first() with postgresql.alock.ExclusiveLock(db, db.FECHOIMPORTLOCK, 0): # calculate hash # verify if it exists in database # if not, post as new (new blob, new name, new destination) # if yes, register new name (if differ) and destination for file # check if it is not duplicate tic # select posting of same origin, area, filename, origin_record # if any has same filesize and hash - compare content and drop duplicate tic_origin = get_optional(ticdata, "ORIGIN") if tic_origin: with ftnimport.session(db) as sess: tic_origin_id = sess.check_addr("node", tic_origin) else: tic_origin_id = None area_id = ftnconfig.get_addr_id(db, db.FTN_domains["fileecho"], area) try: tic_originrec = get_first(ticdata, "PATH") except BadTic as e: print("PATH is missing, no dupe checking") print(e) tic_originrec = None if tic_originrec: print("check if tic is first %s %d %s %s" % ((tic_origin, area_id, fname, tic_originrec))) for prev_f, prev_l, prev_h, prev_p in db.prepare( "select f.id, f.length, f.sha512, p.id from files f inner join file_post p ON p.filedata=f.id " "where p.origin=$1 and p.destination=$2 and p.filename=$3 and p.origin_record=$4" )(tic_origin_id, area_id, fname, tic_originrec): os.rename(ffullname, ffullname + ".dup") if not fullname.endswith(".faketic"): os.rename(fullname, fullname + ".dup") raise DupPost("similar posting %d, abandom" % prev_p, ffullname) # tic with the same first record of PATH - the same posting sha512 = hashlib.new("sha512") f = open(ffullname, "rb") while (True): z = f.read(262144) if not z: break sha512.update(z) f.close() print(sha512.hexdigest()) oldf_id = db.prepare("select id from files where sha512=$1").first( sha512.digest()) if oldf_id is None: print("new file content") if fsize <= 262144: print("save as bytea") newf_id = db.prepare( "insert into files (length, sha512, content) values ($1, $2, $3) returning id" ).first(fsize, sha512.digest(), open(ffullname, "rb").read()) else: print("save as large object") with ftnimport.session(db) as sess: lo = sess.db.prepare("select lo_create(0)").first() print("created lo", lo, end='') lo_handle = sess.db.prepare( "select lo_open($1, 131072)").first(lo) f = open(ffullname, "rb") while (True): z = f.read(262144) if not z: break print(".", end='', flush=True) if sess.db.prepare("select lowrite($1, $2)").first( lo_handle, z) != len(z): raise Exception( "error writing file data to database") f.close() if sess.db.prepare("select lo_close($1)").first( lo_handle) != 0: raise Exception("error closing large object") newf_id = db.prepare( "insert into files (length, sha512, lo) values ($1, $2, $3) returning id" ).first(fsize, sha512.digest(), lo) f_id = newf_id else: print("use old", oldf_id) f_id = oldf_id # add name for filedata is_with_name = db.prepare( "select id from files where $1 = ANY(names) and id=$2").first( fname, f_id) if not is_with_name: fnameslen = int( db.prepare( "select array_upper(names, 1) from files where id=$1"). first(f_id) or 0) db.prepare("update files set names[$1]=$2 where id=$3")( fnameslen + 1, fname, f_id) if import_utime is None: utime = int( time.mktime(time.gmtime()) ) # convert_post time to float and use fractions if you have rate more than one file per some seconds else: utime = int(import_utime) print("post_time=", utime) db.prepare("insert into file_post (filedata, origin, destination, recv_from, recv_as, recv_timestamp, origin_record, filename, other, post_time) " "values ($1, $2, $3, $4, $5, $6, $7, $8, $9, free_posttime($10))")\ (f_id, tic_origin_id, area_id, src_id, dest_id, datetime.datetime.now(datetime.timezone.utc), tic_originrec, fname, json.dumps(ticdata), utime) print("inserted successfully") print("unlink", ffullname) os.unlink(ffullname) if not fullname.endswith(".faketic"): print("unlink", fullname) os.unlink(fullname)