Example #1
0
def oblive(db, address):
  robot = ftnconfig.robotnames[address[0]]

  addr_id = ftnconfig.get_taddr_id(db, address)
  if addr_id is None:
    raise Exception("not found")

  msg_from = db.prepare("select count (*) from messages where source=$1").first(addr_id)
  msg_to = db.prepare("select count (*) from messages where destination=$1").first(addr_id)
  print (address, addr_id, "from:", msg_from, "to:", msg_to)

  if msg_from!=0 or msg_to!=0:
    print("messages exist")
    return
#  assert( input("enter 'yes' to confirm: ")=="yes" )

  with ftnimport.session(db) as sess:
    for (link_addr,subs_id) in db.prepare("select a.text, s.id from addresses a, subscriptions s where a.id=s.subscriber and s.target=$1")(addr_id):
      print ("unsubscribing",link_addr)
      link_id = ftnconfig.find_link(db, link_addr)
      my_id, pw=ftnaccess.link_password(db, link_id, forrobots=True)
      sess.send_message(ftnconfig.get_taddr(db, my_id), ftnconfig.SYSOP, ("node", link_addr), robot, None, pw, "-"+address[1], sendmode="direct")
      db.prepare("delete from subscriptions where id=$1")(subs_id)
    sess.send_message(("node", ftnconfig.ADDRESS), ftnconfig.SYSOP, ("echo", "FLUID.LOCAL"), "All", None, "removal", address[0]+" "+address[1]+" removed from node")
    db.prepare("delete from deletedvitalsubscriptionwatermarks where target=$1")(addr_id)
    db.prepare("delete from addresses where id=$1")(addr_id)
Example #2
0
def submit(file, sendmode=None):
    doheader = True
    for l in file.splitlines():
        l = l.rstrip()
        if doheader:
            if l == "":
                doheader = False
                body = []
                continue
            if l.startswith("From: "):
                fromname = ast.literal_eval(l[6:])
            elif l.startswith("To: "):
                toname = ast.literal_eval(l[4:])
            elif l.startswith("Subject: "):
                subj = ast.literal_eval(l[9:])
            elif l.startswith("ReplyTo: "):
                msgid = ast.literal_eval(l[9:])
            elif l.startswith("Destination: "):
                dest = ast.literal_eval(l[13:])
            elif l.startswith("Attr: "):
                flags = ast.literal_eval(l[6:])
            else:
                if l[0] != "#":
                    raise Exception("invalid header %s" % repr(l))
        else:
            body.append(l)

    with ftnimport.session(db) as sess:
        sess.send_message(("node", ftnconfig.ADDRESS), fromname, dest, toname,
                          msgid, subj, "\n".join(body), flags, sendmode)
Example #3
0
def submit(file, sendmode=None):
  doheader = True
  for l in file.splitlines():
    l=l.rstrip()
    if doheader:
      if l=="":
        doheader = False
        body = []
        continue
      if l.startswith("From: "):
        fromname = ast.literal_eval(l[6:])
      elif l.startswith("To: "):
        toname = ast.literal_eval(l[4:])
      elif l.startswith("Subject: "):
        subj = ast.literal_eval(l[9:])
      elif l.startswith("ReplyTo: "):
        msgid = ast.literal_eval(l[9:])
      elif l.startswith("Destination: "):
        dest = ast.literal_eval(l[13:])
      elif l.startswith("Attr: "):
        flags = ast.literal_eval(l[6:])
      else:
        if l[0]!="#":
          raise Exception("invalid header %s"%repr(l))
    else:
      body.append(l)

  with ftnimport.session(db) as sess:
    sess.send_message(fromname, dest, toname, msgid, subj, "\n".join(body), flags, sendmode)
Example #4
0
def group_points():

  with db.xact():
    for n_id, n_text, p_id, p_text, p_group in db.prepare(
        "select n.id, n.text, p.id, p.text, p.group from addresses n, addresses p "
        "where n.domain=$1 and p.domain=$1 and p.text LIKE n.text || '.%'")(db.FTN_domains["node"]):
      if p_group!=n_id:
        print("update point:", n_text, "<-", p_text, p_group==n_id)
        update_group(n_id, p_id)

  with ftnimport.session(db) as sess:
    for p_id, p_text in db.prepare("""select id, text from addresses where "group" is NULL and text LIKE '%:%/%.%' and domain=$1""")(db.FTN_domains["node"]):
      newnode=p_text[:p_text.rfind(".")]
      print("point without node:", p_id, p_text, p_text[:p_text.rfind(".")])
      newid=sess.check_addr("node", newnode)
      print("addind new node and grouping to", newnode)
      update_group(newid, p_id)
Example #5
0
def oblive(db, address):
    robot = ftnconfig.robotnames[address[0]]

    addr_id = ftnconfig.get_taddr_id(db, address)
    if addr_id is None:
        raise Exception("not found")

    msg_from = db.prepare(
        "select count (*) from messages where source=$1").first(addr_id)
    msg_to = db.prepare(
        "select count (*) from messages where destination=$1").first(addr_id)
    print(address, addr_id, "from:", msg_from, "to:", msg_to)

    if msg_from != 0 or msg_to != 0:
        print("messages exist")
        return


#  assert( input("enter 'yes' to confirm: ")=="yes" )

    with ftnimport.session(db) as sess:
        for (link_addr, subs_id) in db.prepare(
                "select a.text, s.id from addresses a, subscriptions s where a.id=s.subscriber and s.target=$1"
        )(addr_id):
            print("unsubscribing", link_addr)
            link_id = ftnconfig.find_link(db, link_addr)
            my_id, pw = ftnaccess.link_password(db, link_id, forrobots=True)
            sess.send_message(ftnconfig.get_taddr(db, my_id),
                              ftnconfig.SYSOP, ("node", link_addr),
                              robot,
                              None,
                              pw,
                              "-" + address[1],
                              sendmode="direct")
            db.prepare("delete from subscriptions where id=$1")(subs_id)
        sess.send_message(("node", ftnconfig.ADDRESS), ftnconfig.SYSOP,
                          ("echo", "FLUID.LOCAL"), "All", None, "removal",
                          address[0] + " " + address[1] + " removed from node")
        db.prepare(
            "delete from deletedvitalsubscriptionwatermarks where target=$1")(
                addr_id)
        db.prepare("delete from addresses where id=$1")(addr_id)
Example #6
0
def group_points():

    with db.xact():
        for n_id, n_text, p_id, p_text, p_group in db.prepare(
                "select n.id, n.text, p.id, p.text, p.group from addresses n, addresses p "
                "where n.domain=$1 and p.domain=$1 and p.text LIKE n.text || '.%'"
        )(db.FTN_domains["node"]):
            if p_group != n_id:
                print("update point:", n_text, "<-", p_text, p_group == n_id)
                update_group(n_id, p_id)

    with ftnimport.session(db) as sess:
        for p_id, p_text in db.prepare(
                """select id, text from addresses where "group" is NULL and text LIKE '%:%/%.%' and domain=$1"""
        )(db.FTN_domains["node"]):
            newnode = p_text[:p_text.rfind(".")]
            print("point without node:", p_id, p_text,
                  p_text[:p_text.rfind(".")])
            newid = sess.check_addr("node", newnode)
            print("addind new node and grouping to", newnode)
            update_group(newid, p_id)
Example #7
0
  def commit(self):
    try:
     with ftnimport.session(self.db) as sess:
      for addr, name, deliverto, msg, charset in self.msgarqlist:
        print("send audit request to", addr)
        sess.send_message(("node", ADDRESS), "Audit tracker", addr, name, None, "Audit tracking response", """
This reply confirms that your message has been successfully delivered 
to node %s

*******************************************************************************
%s
*******************************************************************************
"""%(deliverto, msg.as_str(shorten=True).decode(charset)))

    except:
      print("error sending ARq reply")
      traceback.print_exc()

    for msg in self.msglist:
      self.db.prepare("update messages set processed=$2 where id=$1")(msg,self.newstatus)
      print("commit msg #%d"%msg)

    self.msglist=set()
    self.msgarqlist=[]
Example #8
0
      continue
    except:
      pass

    l1=l.decode("utf-8", "replace")
    outp.append("Error on line %d"% l_pos)
    outp.append(l1.strip())
#    print(repr(l1))
    error_count +=1

    l_pos+=1


  u.close()

  outp.append("ASCII lines: %d"%ascii_count)
  outp.append("UTF-8 lines: %d"%utf8_count)
  outp.append("Error lines: %d"%error_count)


db = ftnconfig.connectdb()

with ftnimport.session(db) as sess:
  sess.send_message(("node", ftnconfig.ADDRESS), "Sergey Dorofeev", ("echo", "FLUID.REPORTS"), "All", None, "UTF-8 nodelist stats",
"""Привет All

%s

Вот так
"""%("\n".join(outp)))
Example #9
0
    # 1. verify that there is nothing there
    if domain == "fileecho":
        print(area, os.listdir("/tank/home/fido/fareas/" + area.lower()))
    elif domain == "echo":
        count = db.prepare(
            "select count(*) from messages where destination=$1").first(aid)
        print(area, count)
        if count:
            continue

    else:
        1 / 0
    # 2. send message to subscribers
    # 3. remove subscriptions and address
    with ftnimport.session(db) as sess:
        for node in [
                ftnconfig.get_addr(db, x[0])[1]
                for x in ftnexport.get_subscribers(db, aid)
        ]:
            print(node)

        input("enter to purge, Ctrl-C to abort")
        for node in [
                ftnconfig.get_addr(db, x[0])[1]
                for x in ftnexport.get_subscribers(db, aid)
        ]:
            print(node)
            sess.send_message(
                ftnconfig.SYSOP, ("node", node), "Sysop", None,
                "Area Expunging notification", """Dear Sysop,
Example #10
0
def import_tic(db, fullname, expect_addr=None, import_utime=None, ticdata=None, ignore_pw=False, skip_access_check=False):
  " specify older import_utime value to make imported file the status of aarchive "
  # if "TO" is present
  #   get from links with matching from and to addresses and verify password
  # if "TO" is absent
  #   get to and password from links by from. if two rows are fetched - refuse tic
  #
  # in both cases refuse tic if no row fetched - tics are allowed for password links only
  if ticdata is None:
    filepath, filename = os.path.split(fullname)
    ticdata = read_tic(fullname)
  else:
    filepath = os.path.dirname(fullname)

  tic_src = get_optional(ticdata, "FROM")
  print ("from", tic_src)
  if tic_src is None:
    tic_src=expect_addr

  tic_dest = get_optional(ticdata, "TO")
  print ("to", tic_dest)

  if tic_src is None and tic_dest is None and skip_access_check:
    print ("Importing non-FTN file")
    src_id = None
    dest_id = None

  else:

    q="select l.address, l.my, l.authentication from links l"
    q_args=[]
    if tic_src:
      src_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_src)
      q += (" and" if q_args else " where") + " address=$%d"%(len(q_args)+1)
      q_args.append(src_id)
    else:
      src_id = None

    if tic_dest:
      dest_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_dest)
      q += (" and" if q_args else " where") + " my=$%d"%(len(q_args)+1)
      q_args.append(dest_id)
    else:
      dest_id = None

    print (q)
    print (q_args)

    possible_links = db.prepare(q)(*q_args)
    if len(possible_links) > 1:
      raise WrongTic("ambiguos link %s->%s"%(str(tic_src),str(tic_dest)))

    if len(possible_links) == 0:
      raise WrongTic("no matching link %s->%s"%(str(tic_src),str(tic_dest)))

    src_id, dest_id, authinfo = possible_links[0]
    pw = authinfo.find("RobotsPassword").text

    print (src_id, dest_id, pw)

    if not ignore_pw:
      tic_passw = get_single(ticdata, "PW")
      if not ftnaccess.check_pw(pw, tic_passw):
        raise WrongTic("invalid password [%s] for %s"%(tic_passw,tic_src))

  # source and destination verified, now try to find file
  # but before we should check if link can post to specified area
  area = get_single(ticdata, "AREA").upper() # FTN areas must be uppercase
  print (area)
  if not skip_access_check:
    maypost = ftnaccess.may_post(db, src_id, ("fileecho", area))
    if not maypost:
      raise WrongTic("%s may not post to %s"%(tic_src, area))

  fname = os.path.split(get_single(ticdata, "FILE"))[1]

  try:
    fsize = get_single(ticdata, "SIZE", int)
  except BadTic:
    fsize = None

  fcrc = get_single(ticdata, "CRC", remove=False)

  print (fname, fsize, fcrc)
  ffullname=find_file(fname, filepath)
  if not os.path.exists(ffullname):
    raise NoFile("file %s does not exists"%ffullname)

  if fsize is not None and os.path.getsize(ffullname)!=fsize:
    raise NoFile("file %s size != %d"%(ffullname, fsize))

  fsize, checksum = sz_crc32(ffullname)

  if checksum != fcrc.upper():
    raise NoFile("file %s crc32 %s != %s"%(ffullname, checksum, fcrc))

  print ("file matches")
  # >>> LOCK FILEECHOES POSTINGS
  if db.FECHOIMPORTLOCK is None:
    db.FECHOIMPORTLOCK=db.prepare("select oid from pg_class where relname='file_post'").first()
  with postgresql.alock.ExclusiveLock(db, db.FECHOIMPORTLOCK, 0):
    # calculate hash
    # verify if it exists in database
    # if not, post as new (new blob, new name, new destination)
    # if yes, register new name (if differ) and destination for file

    # check if it is not duplicate tic
    # select posting of same origin, area, filename, origin_record
    # if any has same filesize and hash - compare content and drop duplicate

    tic_origin = get_optional(ticdata, "ORIGIN")
    if tic_origin:
      with ftnimport.session(db) as sess:
        tic_origin_id = sess.check_addr("node", tic_origin)
    else:
      tic_origin_id = None

    area_id = ftnconfig.get_addr_id(db, db.FTN_domains["fileecho"], area)

    try:
      tic_originrec = get_first(ticdata, "PATH")
    except BadTic as e:
      print ("PATH is missing, no dupe checking")
      print (e)
      tic_originrec = None

    if tic_originrec:
      print("check if tic is first %s %d %s %s"%((tic_origin, area_id, fname, tic_originrec)))

      for prev_f, prev_l, prev_h, prev_p in db.prepare("select f.id, f.length, f.sha512, p.id from files f inner join file_post p ON p.filedata=f.id "
          "where p.origin=$1 and p.destination=$2 and p.filename=$3 and p.origin_record=$4")(tic_origin_id, area_id, fname, tic_originrec):
        os.rename(ffullname, ffullname+".dup")
        if not fullname.endswith(".faketic"):
          os.rename(fullname, fullname+".dup")
        raise DupPost("similar posting %d, abandom"%prev_p, ffullname)
        # tic with the same first record of PATH - the same posting

    sha512 = hashlib.new("sha512")
    f=open(ffullname, "rb")
    while(True):
      z=f.read(262144)
      if not z:
        break
      sha512.update(z)
    f.close()
    print(sha512.hexdigest())

    oldf_id = db.prepare("select id from files where sha512=$1").first(sha512.digest())
    if oldf_id is None:
      print("new file content")
      if fsize<=262144:
        print ("save as bytea")
        newf_id = db.prepare("insert into files (length, sha512, content) values ($1, $2, $3) returning id").first(fsize, sha512.digest(), open(ffullname, "rb").read())
      else:
        print ("save as large object")
        with ftnimport.session(db) as sess:
          lo=sess.db.prepare("select lo_create(0)").first()
          print("created lo", lo,end='')
          lo_handle=sess.db.prepare("select lo_open($1, 131072)").first(lo)
          f=open(ffullname, "rb")
          while(True):
            z=f.read(262144)
            if not z:
              break
            print(".", end='', flush=True)
            if sess.db.prepare("select lowrite($1, $2)").first(lo_handle, z) != len(z):
              raise Exception("error writing file data to database")
          f.close()
          if sess.db.prepare("select lo_close($1)").first(lo_handle) != 0:
            raise Exception("error closing large object")

          newf_id = db.prepare("insert into files (length, sha512, lo) values ($1, $2, $3) returning id").first(fsize, sha512.digest(), lo)

      f_id = newf_id
    else:
      print("use old", oldf_id)
      f_id = oldf_id

    # add name for filedata
    is_with_name = db.prepare("select id from files where $1 = ANY(names) and id=$2").first(fname, f_id)
    if not is_with_name:
      fnameslen = int(db.prepare("select array_upper(names, 1) from files where id=$1").first(f_id) or 0)
      db.prepare("update files set names[$1]=$2 where id=$3")(fnameslen+1, fname, f_id)

    if import_utime is None:
      utime = int(time.mktime(time.gmtime())) # convert_post  time to float and use fractions if you have rate more than one file per some seconds 
    else:
      utime = int(import_utime)

    print ("post_time=", utime)

    db.prepare("insert into file_post (filedata, origin, destination, recv_from, recv_as, recv_timestamp, origin_record, filename, other, post_time) "
                    "values ($1, $2, $3, $4, $5, $6, $7, $8, $9, free_posttime($10))")\
      (f_id, tic_origin_id, area_id, src_id, dest_id, datetime.datetime.now(datetime.timezone.utc), tic_originrec, fname, json.dumps(ticdata), utime)
    print ("inserted successfully")
    print ("unlink", ffullname)
    os.unlink(ffullname)
    if not fullname.endswith(".faketic"):
      print ("unlink", fullname)
      os.unlink(fullname)
Example #11
0
for k, v in dr2.items():
  dr.setdefault(k, set()).update(v)

alls = set()
for peer, targets in dr.items():
  for t in targets:
      #print(peer, "receives for", t)
      alls.add(("node", t, peer))


#for x, y, z in alls:
#  print ("%s %-24s via %-24s"%(x,y,z))
#exit()

with session(db) as sess:
  # fetch old subscriptions
  for sid, target, subscriber in db.prepare("select s.id, t.text, sr.text from subscriptions s, addresses t, addresses sr "
            "where s.target=t.id and t.domain=$1 and s.subscriber=sr.id")(db.FTN_domains["node"]):
    if ('node', target, subscriber) in alls:
      alls.remove(('node', target, subscriber))
    else:
      print ("remove subscription to", target, "for", subscriber)
      db.prepare("delete from subscriptions where id = $1")(sid)
    
  for domain, target, subscriber in alls:
    print (target,"via",subscriber)
    try:
        sess.add_subscription(True, "node", target, subscriber)
        pass
    except FTNNoAddressInBase:
Example #12
0
                        raise Exception("unknown flag %s" % repr(fl))
            else:
                raise Exception("unknown parameter %s" % repr(x[0]))

            x = x[2:]
        nodes[node] = info

    else:
        raise Exception("bad - %s" % repr(l))

for l in fileinput.input("import/binkd.nl"):
    x = shlex.split(l, "#")
    if not x:
        continue
    if not x[2].endswith("fidonet.net"):
        nodes.setdefault(x[1], {})["addr"] = x[2]
        nodes.setdefault(x[1], {})["binkp"] = True
        nodes.setdefault(x[1], {})["time"] = "00:00-24:00"

with ftnimport.session(ftnimport.db) as sess:
    counter = 0
    for n, i in nodes.items():
        sys.stdout.write("%d[%d]\r" % (counter, len(nodes)))
        sys.stdout.flush()
        counter += 1
        if "addr" not in i:
            continue
        for proto in ["binkp", "ifcico"]:
            if proto in i:
                sess.import_link_conn(n, (proto, i["addr"], i["time"]))
Example #13
0
for k, v in dr2.items():
  dr.setdefault(k, set()).update(v)

alls = set()
for peer, targets in dr.items():
  for t in targets:
      #print(peer, "receives for", t)
      alls.add(("node", t, peer))


#for x, y, z in alls:
#  print ("%s %-24s via %-24s"%(x,y,z))
#exit()

with session(db) as sess:
  # fetch old subscriptions
  for sid, target, subscriber in db.prepare("select s.id, t.text, sr.text from subscriptions s, addresses t, addresses sr "
            "where s.target=t.id and t.domain=$1 and s.subscriber=sr.id")(db.FTN_domains["node"]):
    if ('node', target, subscriber) in alls:
      alls.remove(('node', target, subscriber))
    else:
      print ("remove subscription to", target, "for", subscriber)
      db.prepare("delete from subscriptions where id = $1")(sid)
    
  for domain, target, subscriber in alls:
    print (target,"via",subscriber)
    try:
        sess.add_subscription(True, "node", target, subscriber)
        pass
    except FTNNoAddressInBase:
Example #14
0
def import_tic(db,
               fullname,
               expect_addr=None,
               import_utime=None,
               ticdata=None,
               ignore_pw=False,
               skip_access_check=False):
    " specify older import_utime value to make imported file the status of aarchive "
    # if "TO" is present
    #   get from links with matching from and to addresses and verify password
    # if "TO" is absent
    #   get to and password from links by from. if two rows are fetched - refuse tic
    #
    # in both cases refuse tic if no row fetched - tics are allowed for password links only
    if ticdata is None:
        filepath, filename = os.path.split(fullname)
        ticdata = read_tic(fullname)
    else:
        filepath = os.path.dirname(fullname)

    tic_src = get_optional(ticdata, "FROM")
    print("TIC from:", tic_src)
    if tic_src is None:
        tic_src = expect_addr

    tic_dest = get_optional(ticdata, "TO")
    print("TIC to", tic_dest)

    if tic_src is None and tic_dest is None and skip_access_check:
        print("Importing non-FTN file")
        src_id = None
        dest_id = None

    else:

        q = "select l.address, l.my, l.authentication from links l"
        q_args = []
        if tic_src:
            src_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"], tic_src)
            q += (" and"
                  if q_args else " where") + " address=$%d" % (len(q_args) + 1)
            q_args.append(src_id)
        else:
            src_id = None

        if tic_dest:
            dest_id = ftnconfig.get_addr_id(db, db.FTN_domains["node"],
                                            tic_dest)
            q += (" and"
                  if q_args else " where") + " my=$%d" % (len(q_args) + 1)
            q_args.append(dest_id)
        else:
            dest_id = None

        #print (q)
        #print (q_args)

        possible_links = db.prepare(q)(*q_args)
        if len(possible_links) > 1:
            raise WrongTic("ambiguos link %s->%s" %
                           (str(tic_src), str(tic_dest)))

        if len(possible_links) == 0:
            raise WrongTic("no matching link %s->%s" %
                           (str(tic_src), str(tic_dest)))

        src_id, dest_id, authinfo = possible_links[0]
        pw = authinfo.find("RobotsPassword").text

        print("TIC src_id, dst_id, pw:", src_id, dest_id, pw)

        if not ignore_pw:
            tic_passw = get_single(ticdata, "PW")
            if not ftnaccess.check_pw(pw, tic_passw):
                raise WrongTic("invalid password [%s] for %s" %
                               (tic_passw, tic_src))

    # source and destination verified, now try to find file
    # but before we should check if link can post to specified area
    area = get_single(ticdata, "AREA").upper()  # FTN areas must be uppercase
    print("TIC area:", area)
    if not skip_access_check:
        maypost = ftnaccess.may_post(db, src_id, ("fileecho", area))
        if not maypost:
            raise WrongTic("%s may not post to %s" % (tic_src, area))

    fname = os.path.split(get_single(ticdata, "FILE"))[1]

    try:
        fsize = get_single(ticdata, "SIZE", int)
    except BadTic:
        fsize = None

    fcrc = get_single(ticdata, "CRC", remove=False)

    print("TIC name, size, crc:", fname, fsize, fcrc)
    ffullname = find_matching_file(filepath, fname, fsize, fcrc)

    if not os.path.exists(ffullname):
        raise NoFile("file %s does not exists" % ffullname)

    if fsize is not None and os.path.getsize(ffullname) != fsize:
        raise NoFile("file %s size != %d" % (ffullname, fsize))

    fsize, checksum = sz_crc32(ffullname)

    if checksum != fcrc.upper():
        raise NoFile("file %s crc32 %s != %s" % (ffullname, checksum, fcrc))

    print("file matches")
    # >>> LOCK FILEECHOES POSTINGS
    if db.FECHOIMPORTLOCK is None:
        db.FECHOIMPORTLOCK = db.prepare(
            "select oid from pg_class where relname='file_post'").first()
    with postgresql.alock.ExclusiveLock(db, db.FECHOIMPORTLOCK, 0):
        # calculate hash
        # verify if it exists in database
        # if not, post as new (new blob, new name, new destination)
        # if yes, register new name (if differ) and destination for file

        # check if it is not duplicate tic
        # select posting of same origin, area, filename, origin_record
        # if any has same filesize and hash - compare content and drop duplicate

        tic_origin = get_optional(ticdata, "ORIGIN")
        if tic_origin:
            with ftnimport.session(db) as sess:
                tic_origin_id = sess.check_addr("node", tic_origin)
        else:
            tic_origin_id = None

        area_id = ftnconfig.get_addr_id(db, db.FTN_domains["fileecho"], area)

        try:
            tic_originrec = get_first(ticdata, "PATH")
        except BadTic as e:
            print("PATH is missing, no dupe checking")
            print(e)
            tic_originrec = None

        if tic_originrec:
            print("check if tic is first %s %d %s %s" %
                  ((tic_origin, area_id, fname, tic_originrec)))

            for prev_f, prev_l, prev_h, prev_p in db.prepare(
                    "select f.id, f.length, f.sha512, p.id from files f inner join file_post p ON p.filedata=f.id "
                    "where p.origin=$1 and p.destination=$2 and p.filename=$3 and p.origin_record=$4"
            )(tic_origin_id, area_id, fname, tic_originrec):
                os.rename(ffullname, ffullname + ".dup")
                if not fullname.endswith(".faketic"):
                    os.rename(fullname, fullname + ".dup")
                raise DupPost("similar posting %d, abandom" % prev_p,
                              ffullname)
                # tic with the same first record of PATH - the same posting

        sha512 = hashlib.new("sha512")
        f = open(ffullname, "rb")
        while (True):
            z = f.read(262144)
            if not z:
                break
            sha512.update(z)
        f.close()
        print(sha512.hexdigest())

        oldf_id = db.prepare("select id from files where sha512=$1").first(
            sha512.digest())
        if oldf_id is None:
            print("new file content")
            if fsize <= 262144:
                print("save as bytea")
                newf_id = db.prepare(
                    "insert into files (length, sha512, content) values ($1, $2, $3) returning id"
                ).first(fsize, sha512.digest(),
                        open(ffullname, "rb").read())
            else:
                print("save as large object")
                with ftnimport.session(db) as sess:
                    lo = sess.db.prepare("select lo_create(0)").first()
                    print("created lo", lo, end='')
                    lo_handle = sess.db.prepare(
                        "select lo_open($1, 131072)").first(lo)
                    f = open(ffullname, "rb")
                    while (True):
                        z = f.read(262144)
                        if not z:
                            break
                        print(".", end='', flush=True)
                        if sess.db.prepare("select lowrite($1, $2)").first(
                                lo_handle, z) != len(z):
                            raise Exception(
                                "error writing file data to database")
                    f.close()
                    if sess.db.prepare("select lo_close($1)").first(
                            lo_handle) != 0:
                        raise Exception("error closing large object")

                    newf_id = db.prepare(
                        "insert into files (length, sha512, lo) values ($1, $2, $3) returning id"
                    ).first(fsize, sha512.digest(), lo)

            f_id = newf_id
        else:
            print("use old", oldf_id)
            f_id = oldf_id

        # add name for filedata
        is_with_name = db.prepare(
            "select id from files where $1 = ANY(names) and id=$2").first(
                fname, f_id)
        if not is_with_name:
            fnameslen = int(
                db.prepare(
                    "select array_upper(names, 1) from files where id=$1").
                first(f_id) or 0)
            db.prepare("update files set names[$1]=$2 where id=$3")(
                fnameslen + 1, fname, f_id)

        if import_utime is None:
            utime = int(
                time.mktime(time.gmtime())
            )  # convert_post  time to float and use fractions if you have rate more than one file per some seconds
        else:
            utime = int(import_utime)

        print("post_time=", utime)

        db.prepare("insert into file_post (filedata, origin, destination, recv_from, recv_as, recv_timestamp, origin_record, filename, other, post_time) "
                        "values ($1, $2, $3, $4, $5, $6, $7, $8, $9, free_posttime($10))")\
          (f_id, tic_origin_id, area_id, src_id, dest_id, datetime.datetime.now(datetime.timezone.utc), tic_originrec, fname, json.dumps(ticdata), utime)
        print("inserted successfully")
        print("unlink", ffullname)
        os.unlink(ffullname)
        if not fullname.endswith(".faketic"):
            print("unlink", fullname)
            os.unlink(fullname)