def setUp(self): # Connect to the GPRSDB. self.gprs_db = gprs_database.GPRSDB() # Add some records to the GPRSDB. self.now = time.time() records = [ (psycopg2.TimestampFromTicks(self.now - 60), 'IMSI901550000000084', '192.168.99.1', 100, 200, 100, 200), (psycopg2.TimestampFromTicks(self.now - 30), 'IMSI901550000000084', '192.168.99.1', 300, 500, 200, 300), (psycopg2.TimestampFromTicks(self.now - 10), 'IMSI901550000000084', '192.168.99.1', 700, 600, 400, 100), (psycopg2.TimestampFromTicks(self.now - 5), 'IMSI901550000000084', '192.168.99.1', 750, 625, 50, 25), ] schema = ('record_timestamp, imsi, ipaddr, uploaded_bytes,' ' downloaded_bytes, uploaded_bytes_delta,' ' downloaded_bytes_delta') connection = psycopg2.connect(host='localhost', database='endaga', user=PG_USER, password=PG_PASSWORD) with connection.cursor() as cursor: for record in records: values = "%s, '%s', '%s', %s, %s, %s, %s" % record command = 'insert into gprs_records (%s) values(%s)' % (schema, values) cursor.execute(command) connection.commit()
def get_records(self, start_timestamp=0, end_timestamp=None): """Gets records from the table between the specified timestamps. Args: Timestamps are given in seconds since epoch. Returns a list of dicts, each of the form: { 'id': 3, 'record_timestamp': 1341556432, 'imsi': 'IMSI901550000000084', 'ipaddr': '192.168.99.3', 'uploaded_bytes': 5567, 'downloaded_bytes': 9987, 'uploaded_bytes_delta': 74, 'downloaded_bytes_delta': 139 } """ start = psycopg2.TimestampFromTicks(start_timestamp) if not end_timestamp: end_timestamp = time.time() end = psycopg2.TimestampFromTicks(end_timestamp) template = ('select * from %s where record_timestamp >= %s' ' and record_timestamp <= %s') command = template % (self.table_name, start, end) with self.connection.cursor( cursor_factory=psycopg2.extras.RealDictCursor) as cursor: cursor.execute(command) self.connection.commit() return cursor.fetchall()
def redbackSNMPWalk(data, ip, version, community): """ Call a snmp walk on the designated REDBACK device (ip), retrieve list of connected subscriber :param ip: :param version: :param community: :return Vars: List of OID where usernames are encoded """ session = netsnmp.Session(DestHost=ip, Version=version, Community=community) session.UseLongNames = 1 oid = ".1.3.6.1.4.1.2352.2.27.1.1.1.1.3" var = netsnmp.Varbind(oid) vars_list = netsnmp.VarList(var) logging.debug("--- %s seconds ---" % (time.time() - start_time)) logging.debug(f'Snmp walk to {ip}') subscribers = (session.walk(vars_list)) i = 0 for sub in subscribers: subtype = redbackVlanFind(sub) user = redbackLoginDecode(vars_list[i].tag) realm = ".".join(user.split('@')[1].split('.')[1:3]) if subtype != 0: subtype = subtype else: subtype = loginClassify(data, realm) connected_logins.append( (user, psycopg2.TimestampFromTicks(time.time() // 1), psycopg2.TimestampFromTicks(time.time() // 1), True, subtype, True)) i += 1
def ciscoSNMPGet(data, h, ip, version, community): """ Get detailed subs OID : .1.3.6.1.4.1.9.9.786.1.1.1.1.22 This object indicates the NAS port-identifier identifying the port on the NAS providing access to the subscriber. OID : .1.3.6.1.4.1.9.9.786.1.1.1.1.23 This object indicates the domain associated with the subscriber. OID : .1.3.6.1.4.1.9.9.786.1.1.1.1.24 This object indicates the username identifying the subscriber. This function collect information (NAS port, domain and username) about subscribers. """ var_oid_22 = netsnmp.Varbind(OID_CISCO_22) var_oid_23 = netsnmp.Varbind(OID_CISCO_23) var_oid_24 = netsnmp.Varbind(OID_CISCO_24) vars_list = netsnmp.VarList(var_oid_22, var_oid_23, var_oid_24) session = netsnmp.Session(DestHost=ip, Version=version, Community=community) session.UseLongNames = 1 logging.debug("--- %s seconds ---" % (time.time() - start_time)) logging.debug(f'Snmp walk to {ip}') for i in range(data['hosts'][h]['nb_sub']): try: reply = session.getnext(vars_list) subtype = (int( reply[0].decode("utf-8").split('/')[-1].split('.')[0])) realm = (reply[1].decode("utf-8").split('.', 1)[1]) user = (reply[2].decode("utf-8")) if subtype != 0: connected_logins.append( (user, psycopg2.TimestampFromTicks(time.time() // 1), psycopg2.TimestampFromTicks(time.time() // 1), True, subtype, True)) else: subtype = loginClassify(data, realm) connected_logins.append( (user, psycopg2.TimestampFromTicks(time.time() // 1), psycopg2.TimestampFromTicks(time.time() // 1), True, subtype, True)) except IndexError: print(reply) pass except ValueError: print(reply) pass except AttributeError: print(reply) pass
def scheduleObject(self,instance,when,duration,value): _when=psycopg2.TimestampFromTicks(when) _until=psycopg2.TimestampFromTicks(when+duration) cur=self.db.cursor() cur.execute(""" INSERT INTO Schedule (objectID,active,until,value) VALUES (%s,%s,%s,%s) RETURNING scheduleID; """, (instance.id,_when,_until,value)) scheduleID=cur.fetchone()[0] print "Database.scheduleObject>", scheduleID, instance, value return scheduleID
def consume_and_insert_in_db(self, limit=None): topic = self.kafka_client.topics[self.KAFKA_TOPIC] consumer = topic.get_simple_consumer() count = 0 with self.pg_con.cursor() as cur: for message in consumer: if message is not None: obj_msg = loads(message.value) try: # TODO: batch insertion cur.execute( "INSERT INTO metrics " "VALUES(%s, %s, %s);", (psycopg2.TimestampFromTicks( obj_msg['time']), obj_msg['machine'], json.dumps(obj_msg['metrics']))) self.logger.log(logging.INFO, f"{cur.rowcount} rows inserted") # TODO: consider a better commit strategy self.pg_con.commit() except psycopg2.IntegrityError: self.logger.log(logging.INFO, "skipping already existing row") self.pg_con.rollback() count += 1 if limit and count >= limit: break
def get_sets(self, tdb): s1 = tdb.get_draws(self.already_fetched_prefixes, self.time) s2 = tdb.get_draws(self.first_time_fetched_prefixes, psycopg2.TimestampFromTicks(0)) for k, v in s2.iteritems(): if not k in s1: s1[k] = v else: if v[0] == False: continue else: s1[k][3].extend(v[3]) set_names = {} for set_id in s1.keys(): set_names[set_id] = tdb.get_set_name(set_id) sets = [] for k, v in s1.iteritems(): sets.append((set_names[k], v)) self.sets = sets
def delete_records(self, timestamp): """Deletes records older than the given epoch timestamp.""" timestamp = psycopg2.TimestampFromTicks(timestamp) template = 'delete from %s where record_timestamp < %s' command = template % (self.table_name, timestamp) with self.connection.cursor() as cursor: cursor.execute(command) self.connection.commit()
def test_date_time_allocation_bug(self): d1 = psycopg2.Date(2002,12,25) d2 = psycopg2.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0))) t1 = psycopg2.Time(13,45,30) t2 = psycopg2.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0))) t1 = psycopg2.Timestamp(2002,12,25,13,45,30) t2 = psycopg2.TimestampFromTicks( time.mktime((2002,12,25,13,45,30,0,0,0)))
def get_params(self, tbd): params = [] params.extend(tbd.get_params(self.already_fetched_prefixes, self.time)) params.extend( tbd.get_params(self.first_time_fetched_prefixes, psycopg2.TimestampFromTicks(0))) self.params = params
def new(self, name, filename=None): blob = self.conn.lobject(0, 'w', 0, filename) csr = self.conn.cursor() csr.execute( "INSERT INTO downloadedFiles (id, name, inserted, data)" + "VALUES (DEFAULT, %s, %s, %s) RETURNING id", (name, psycopg2.TimestampFromTicks(time.time()), blob.oid)) id_ = csr.fetchone()[0] self.conn.commit() return DBFile(self.conn, id_)
def process_node(conn, source_path, source_dict, is_file, is_insert=True): absolute_name = os.path.abspath(source_path) owner_name = u"" domain_name = u"" try: output = subprocess.check_output([GETOWNER.encode(FILESYSTEMENCODING), (EXTENDEDLENGTH + absolute_name).encode(FILESYSTEMENCODING)]).strip().split(os.sep) domain_name, owner_name = output[0], output[1] except subprocess.CalledProcessError as e: logging.exception(e.message) return except Exception as e: logging.error("FailedOnFile: %s" % absolute_name) logging.exception(e.message) return unc_name = basename_2_uncname(absolute_name, source_dict) if is_file: basename = os.path.basename(absolute_name) dir_name = os.path.dirname(absolute_name) else: basename = u"" dir_name = absolute_name # It's hard to image the file in the file system which without modified timestamp, but this kind sick file exist on # our windows NTFS system. # when it's empty, the os.path.getmtime will return value 46001572830.34235, it will fail functions from datetime # module as I tried, it also failed psycopg2's TimestampFromTicks # # adding protection here, when it happens using the now time instead. last_modified_time = psycopg2.TimestampFromTicks(time.time()) try: last_modified_time = psycopg2.TimestampFromTicks(os.path.getmtime(EXTENDEDLENGTH + absolute_name)) except Exception as e: logging.exception(e.message) file_size = os.path.getsize(EXTENDEDLENGTH + absolute_name) insert_or_update(conn, is_insert, absolute_name, unc_name, basename, dir_name, domain_name, owner_name, last_modified_time, file_size, is_file)
def test_timestamp_value_error_sec_59_99(self): s = psycopg2.TimestampFromTicks(1273173119.99992) self.assertEqual( s.adapted, datetime(2010, 5, 6, 14, 11, 59, 999920, tzinfo=FixedOffsetTimezone(-5 * 60)))
def convert_TIMESTAMP(ts): val = ts.value if havemx and isinstance(val, mx.DateTime.DateTimeType): return TimestampFromMx(val) elif isinstance(val, datetime.datetime): return psycopg.TimestampFromTicks(time.mktime(ts.timetuple())) elif isinstance(val, (int, float, long)): return psycopg.TimestampFromTicks(val) elif isinstance(val, (tuple, list)) and len(val) == 9: return psycopg.TimestampFromTicks(time.mktime(val)) elif isinstance(val, basestring): for f in timestamp_formats: try: t = time.strptime(val, f) except ValueError: continue else: return psycopg.TimestampFromTicks(time.mktime(t)) else: raise ValueError, "cannot parse timestamp format: '%s'" % val raise ValueError, val
def update_set_mod_time(self, set_id, deleted): self.trans.execute( """ UPDATE draw_set SET mod_time = %(time)s, deleted = %(deleted)s WHERE set_id = %(set_id)s""", { 'set_id': set_id, 'time': psycopg2.TimestampFromTicks(time.time()), 'deleted': deleted })
def add_snapshot(user_id, user_config_id, size): if not (resources_crud.owns(user_id, user_config_id)): raise Exception('access denied') con = db.get_connection_func() cur = con.cursor() cur.execute( "insert into \"snapshot\"(create_date, size, user_config_id)" "values (%s, %s, %s)", (psycopg2.TimestampFromTicks(time.time()), size, user_config_id)) con.commit() con.close()
def addUrl(url, parent_id=0): parsed = urlparse(url) #TODO: clear segment path = _getPath(parsed) websiteId = getWebsiteId(parsed.netloc) result = execute( "SELECT id FROM urls WHERE website_id = %s AND path = %s;", (websiteId, path)) if result: return None result = execute( "INSERT INTO urls (path, website_id, scanned, parent_id) VALUES (%s, %s, %s, %s) RETURNING id", (path, websiteId, psycopg2.TimestampFromTicks(time()), parent_id)) save() return (result[0], parsed.geturl())
def insert_remark(self, remark, prefix_id): self.trans.execute( """ INSERT INTO remark (content, post_time, id, prefix_id, server_id) values (%(content)s, %(time)s, (select nextval('remarks_seq')), %(prefix_id)s, %(server_id)s) """, { 'content': remark, 'time': psycopg2.TimestampFromTicks(time.time()), 'prefix_id': prefix_id, 'server_id': self.db.server_id })
def setUpClass(cls): # Monkeypatch Subscriber so sub balance lookups succeed. cls.original_subscriber = utilities.subscriber cls.mock_subscriber = mocks.MockSubscriber() utilities.subscriber = cls.mock_subscriber subscriber.create_subscriber('IMSI901550000000084', '5551234') subscriber.create_subscriber('IMSI901550000000082', '5551235') # Connect to the GPRSDB and EventStore. cls.gprs_db = gprs_database.GPRSDB() cls.event_store = events.EventStore() # Add some records to the GPRSDB. The method we're testing should # extract these records and create events in the EventStore. cls.now = time.time() records = [ (psycopg2.TimestampFromTicks(cls.now - 120), 'IMSI901550000000084', '192.168.99.1', 50, 80, 50, 80), (psycopg2.TimestampFromTicks(cls.now - 60), 'IMSI901550000000084', '192.168.99.1', 50, 80, 0, 0), (psycopg2.TimestampFromTicks(cls.now - 30), 'IMSI901550000000084', '192.168.99.1', 300, 500, 250, 420), (psycopg2.TimestampFromTicks(cls.now - 10), 'IMSI901550000000084', '192.168.99.1', 700, 600, 400, 100), (psycopg2.TimestampFromTicks(cls.now - 5), 'IMSI901550000000084', '192.168.99.1', 750, 625, 50, 25), # Create events for a different IMSI. (psycopg2.TimestampFromTicks(cls.now - 60), 'IMSI901550000000082', '192.168.99.2', 50, 80, 0, 0), (psycopg2.TimestampFromTicks(cls.now - 10), 'IMSI901550000000082', '192.168.99.2', 400, 300, 350, 220), (psycopg2.TimestampFromTicks(cls.now - 5), 'IMSI901550000000082', '192.168.99.2', 450, 325, 50, 25), ] schema = ('record_timestamp, imsi, ipaddr, uploaded_bytes,' ' downloaded_bytes, uploaded_bytes_delta,' ' downloaded_bytes_delta') connection = psycopg2.connect(host='localhost', database='endaga', user=PG_USER, password=PG_PASSWORD) with connection.cursor() as cursor: for record in records: values = "%s, '%s', '%s', %s, %s, %s, %s" % record command = 'insert into gprs_records (%s) values(%s)' % (schema, values) cursor.execute(command) connection.commit()
def remove_param(self, prefix, name, user_id): self.trans.execute( """ UPDATE param SET deleted = 't', mod_time = %(mod_time)s WHERE prefix_id = (SELECT id FROM prefix WHERE prefix = %(prefix)s) AND pname = %(pname)s AND user_id = %(user_id)s""", { 'mod_time': psycopg2.TimestampFromTicks(time.time()), 'prefix': prefix, 'pname': name, 'user_id': user_id }) return self.trans.rowcount > 0
def process_node(conn, source_path, source_dict, is_file, is_insert=True): fi_absolute_name = os.path.abspath(source_path) fi_unc_name = basename_2_uncname(fi_absolute_name, source_dict) if is_file: fi_name = os.path.basename(fi_absolute_name) fi_dir_name = os.path.dirname(fi_absolute_name) else: fi_name = "" fi_dir_name = fi_absolute_name fi_last_modified_time = psycopg2.TimestampFromTicks( os.path.getmtime(fi_absolute_name)) fi_size = os.path.getsize(fi_absolute_name) try: cursor = conn.cursor() if is_insert: cursor.execute(sql_insert_statement, [ fi_absolute_name, fi_unc_name, fi_name, fi_dir_name, fi_last_modified_time, fi_size, is_file ]) else: cursor.execute(sql_update_statement, [ fi_absolute_name, fi_unc_name, fi_name, fi_dir_name, fi_last_modified_time, fi_size, is_file, fi_unc_name ]) conn.commit() except psycopg2.DatabaseError as e: if e.message.find('invalid byte sequence for encoding "UTF8"') != -1: logging.exception(e) conn.rollback() process_node(conn, source_path.decode("gb18030"), source_dict, is_file) elif e.message.find( 'duplicate key value violates unique constraint') != -1: conn.rollback() process_node(conn, source_path, source_dict, is_file, False) else: print e finally: cursor.close()
def update_param(self, param, param_id, prefix_id, user_id): self.trans.execute( """ UPDATE param SET prefix_id = %(prefix_id)s, formula = %(formula)s, type=%(type)s, unit=%(unit)s, start_date=%(start_date)s, prec=%(prec)s, mod_time = %(mod_time)s, deleted = 'f' WHERE id = %(id)s AND user_id=%(user_id)s""", { 'prefix_id': prefix_id, 'type': param.type, 'unit': param.unit, 'start_date': param.start_date, 'prec': param.prec, 'formula': param.formula, 'mod_time': psycopg2.TimestampFromTicks(time.time()), 'id': param_id, 'user_id': user_id })
def insert_param(self, param, user_id, prefix_id): self.trans.execute( u""" INSERT INTO param (pname, prefix_id, type, unit, formula, start_date, prec, mod_time, user_id) VALUES (%(pname)s, %(prefix_id)s, %(type)s, %(unit)s, %(formula)s, %(start_date)s, %(prec)s, %(mod_time)s, %(user_id)s)""", { 'pname': param.name, 'prefix_id': prefix_id, 'type': param.type, 'unit': param.unit, 'formula': param.formula, 'start_date': param.start_date, 'prec': param.prec, 'mod_time': psycopg2.TimestampFromTicks(time.time()), 'user_id': user_id })
def insertConnection(self, tableName, attackerIP, attackerPort, victimIP, victimPort, vulnName): try: ### check for already existing entry query = "SELECT id FROM honeypot_amun.amun_connections WHERE hostileip='%s' AND targetip='%s' AND targetport='%s'" % (attackerIP, victimIP, victimPort) result = self.query(query) if result and (len(result) > 0): ### existing connection updateID = str(result[0][0]) query = "UPDATE honeypot_amun.amun_connections SET count=count+1 WHERE id='%s' RETURNING count" % (updateID) self.query(query) else: ### new connection curTimestamp = psycopg2.TimestampFromTicks(time.time()) query = "INSERT INTO honeypot_amun.amun_connections (timestamp,hostileip,hostileport,targetip,targetport,DialogueName) VALUES (%s,'%s','%s','%s','%s','%s') RETURNING id" % (curTimestamp, attackerIP, attackerPort, victimIP, victimPort, vulnName) self.query(query) ### return from insert connection return True except KeyboardInterrupt: raise
def insertBinary(self, tableName, md5hash, attIP, victimIP, downURL, file_data_length, file_data): try: ### current timestamp curTimestamp = psycopg2.TimestampFromTicks(time.time()) ### check if binary exists query = "SELECT id, md5hash FROM honeypot_amun.amun_storage WHERE md5hash='%s'" % (md5hash) result = self.query(query) if len(result)>0: ### binary exists binaryID = str(result[0][0]) query = "SELECT eventid FROM honeypot_amun.amun_hits WHERE hostileip='%s' AND targetip='%s' AND downurl='%s'" % (attIP, victimIP, MySQLdb.escape_string(downURL)) result = self.query(query) if len(result)>0: ### entry exists pass else: ### create incident entry query = "INSERT INTO honeypot_amun.amun_hits (hostileip, targetip, timestamp, downurl, binaryid) VALUES ('%s','%s','%s','%s','%s') RETURNING id" % (attIP, victimIP, curTimestamp, MySQLdb.escape_string(downURL), binaryID) self.query(query) else: ### new binary query = "INSERT INTO honeypot_amun.amun_hits (hostileip,targetip,timestamp,downurl) VALUES ('%s','%s','%s','%s') RETURNING id" % (attIP, victimIP, curTimestamp, MySQLdb.escape_string(downURL)) self.query(query) ### insert common data query = "INSERT INTO honeypot_amun.amun_storage (md5hash,filesize,comment) VALUES ('%s','%s','None')RETURNING id" % (md5hash, file_data_length) result = self.query(query) ### id of last insert eventID = result[0][0] query = "UPDATE honeypot_amun.amun_hits SET binaryid='%s' WHERE eventid='%s' RETURING eventid" % (eventID, eventID) self.query(query) if file_data_length<2000000: encodedBin = base64.encodestring(file_data) query = "INSERT INTO honeypot_amun.amun_binaries (id,binary_data) VALUES ('%s','%s') RETURNING id" % (eventID, MySQLdb.escape_string(encodedBin)) self.query(query) query = "INSERT INTO honeypot_amun.amun_cwsandbox (id,cwanalyse,flag,comment) VALUES ('%s','None','0','None') RETURNING id" % (eventID) self.query(query) else: query = "UPDATE honeypot_amun.amun_storage SET comment='binary too big' WHERE id='%s' RETURNING id" % (eventID) self.query(query) except KeyboardInterrupt: raise
def run(self): self.log.info("Thread %s started" % self.num) while self.kill is False: try: data = self.queue.pop() self.cur.execute( "INSERT INTO collectd_data (type,type_instance,plugin,plugin_instance,host,time,values) VALUES (%s,%s,%s,%s,%s,%s,%s);", (data.type, data.type_instance, data.plugin, data.plugin_instance, data.host, psycopg2.TimestampFromTicks(data.time), data.values)) except IndexError: sleep(0.1) pass except Exception as error: self.log.critical("Got error while writing to database") self.log.critical("%s" % type(error)) self.log.critical("%s" % error.message) self.kill = True self.conn.close() self.log.debug("Successfully disconnected from database") self.log.info("Thread %s stopped" % self.num)
def db_TimestampFromTicks(*args, **kwargs): return psycopg2.TimestampFromTicks(*args, **kwargs)
def run(self): self.h.newpin("beat", hal.HAL_BIT, hal.HAL_OUT) self.h.newpin("error", hal.HAL_BIT, hal.HAL_OUT) self.h.newpin("enable", hal.HAL_BIT, hal.HAL_IN) self.h.newpin("line", hal.HAL_S32, hal.HAL_IN) self.h.newpin("i11", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i12", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i13", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i21", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i22", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i23", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i31", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i32", hal.HAL_FLOAT, hal.HAL_IN) self.h.newpin("i33", hal.HAL_FLOAT, hal.HAL_IN) self.h.ready() try: while 1: try: self.h['beat'] = not self.h['beat'] self.stat.poll() #for x in dir(self.stat): # if not x.startswith('_'): # print x, getattr(self.stat,x) t = psycopg2.TimestampFromTicks(time.time()) tm = getattr(self.stat, "task_mode") f = getattr(self.stat, "file") l = self.h["line"] x_min = self.h["i11"] x_max = self.h["i12"] x_avg = self.h["i13"] y_min = self.h["i21"] y_max = self.h["i22"] y_avg = self.h["i23"] z_min = self.h["i31"] z_max = self.h["i32"] z_avg = self.h["i33"] self.cur.execute(self.log_insert, (t, tm, f, l, x_min, x_max, x_avg, y_min, y_max, y_avg, z_min, z_max, z_avg)) self.con.commit() time.sleep(work_thread) except KeyboardInterrupt: self.con.close() raise SystemExit except Exception, e: print str(e) self.h["error"] = False else: self.h["error"] = True except KeyboardInterrupt: self.con.close() raise SystemExit
elif isinstance(val, basestring): for f in timestamp_formats: try: t = time.strptime(val, f) except ValueError: continue else: return psycopg.TimestampFromTicks(time.mktime(t)) else: raise ValueError, "cannot parse timestamp format: '%s'" % val raise ValueError, val _converters = { datetime.datetime: lambda x: psycopg.TimestampFromTicks(time.mktime(x.timetuple())), datetime.date: lambda x: psycopg.Date(x.year, x.month, x.day), DATE: convert_DATE, TIMESTAMP: convert_TIMESTAMP, BINARY: lambda x: psycopg.Binary(x.value), INTERVAL: lambda x: x.value } if havemx: # add automatic wrapping for mx.DateTime types _converters[mx.DateTime.DateTimeType] = TimestampFromMx
def TimestampFromMx(x): return psycopg.TimestampFromTicks(x.ticks())