def test_init_with_timedelta(self): td = timedelta(minutes=5 * 60) tzinfo = FixedOffsetTimezone(td) self.assertEqual(tzinfo, FixedOffsetTimezone(5 * 60)) self.assertEqual( repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=None)" % td)
def validate_time(s, loc, toks): datetime_string = toks[0] # Check the precision precision = len(datetime_string.replace('T', ':').split(':')) # Parse try: dt = dtparser.parse(datetime_string) except ValueError: raise RestInputValidationError("time value has wrong format. The " "right format is " "<date>T<time><offset>, " "where <date> is expressed as " "[YYYY]-[MM]-[DD], " "<time> is expressed as [HH]:[MM]:[" "SS], " "<offset> is expressed as +/-[HH]:[" "MM] " "given with " "respect to UTC") if dt.tzinfo is not None: tzoffset_minutes = int( dt.tzinfo.utcoffset(None).total_seconds() / 60) return datetime_precision( dt.replace(tzinfo=FixedOffsetTimezone(offset=tzoffset_minutes, name=None)), precision) else: return datetime_precision( dt.replace(tzinfo=FixedOffsetTimezone(offset=0, name=None)), precision)
def test_time_24(self): t = self.execute("select '24:00'::time;") self.assertEqual(t, time(0, 0)) t = self.execute("select '24:00+05'::timetz;") self.assertEqual(t, time(0, 0, tzinfo=FixedOffsetTimezone(300))) t = self.execute("select '24:00+05:30'::timetz;") self.assertEqual(t, time(0, 0, tzinfo=FixedOffsetTimezone(330)))
def test_instance_caching(self): self.assertTrue(FixedOffsetTimezone(name="FOO") is FixedOffsetTimezone(name="FOO")) self.assertTrue(FixedOffsetTimezone(7 * 60) is FixedOffsetTimezone(7 * 60)) self.assertTrue(FixedOffsetTimezone(-9 * 60, 'FOO') is FixedOffsetTimezone(-9 * 60, 'FOO')) self.assertTrue(FixedOffsetTimezone(9 * 60) is not FixedOffsetTimezone(9 * 60, 'FOO')) self.assertTrue(FixedOffsetTimezone(name='FOO') is not FixedOffsetTimezone(9 * 60, 'FOO'))
def test_pickle(self): # ticket #135 tz11 = FixedOffsetTimezone(60) tz12 = FixedOffsetTimezone(120) for proto in [-1, 0, 1, 2]: tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto)) self.assertEqual(tz11, tz21) self.assertEqual(tz12, tz22) tz11 = FixedOffsetTimezone(60, name='foo') tz12 = FixedOffsetTimezone(120, name='bar') for proto in [-1, 0, 1, 2]: tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto)) self.assertEqual(tz11, tz21) self.assertEqual(tz12, tz22)
def test_adapt_infinity_tz(self): t = self.execute("select 'infinity'::timestamp") self.assert_(t.tzinfo is None) self.assert_(t > datetime(4000, 1, 1)) t = self.execute("select '-infinity'::timestamp") self.assert_(t.tzinfo is None) self.assert_(t < datetime(1000, 1, 1)) t = self.execute("select 'infinity'::timestamptz") self.assert_(t.tzinfo is not None) self.assert_(t > datetime(4000, 1, 1, tzinfo=FixedOffsetTimezone())) t = self.execute("select '-infinity'::timestamptz") self.assert_(t.tzinfo is not None) self.assert_(t < datetime(1000, 1, 1, tzinfo=FixedOffsetTimezone()))
def test_timestamp_value_error_sec_59_99(self): s = psycopg2.TimestampFromTicks(1273173119.99992) self.assertEqual( s.adapted, datetime(2010, 5, 6, 14, 11, 59, 999920, tzinfo=FixedOffsetTimezone(-5 * 60)))
def mock_fun(*_, **__): return [(PID, "vulnerability", "postgres", "psql", datetime.datetime(2021, 8, 5, 8, 8, 7, 774124, tzinfo=FixedOffsetTimezone(offset=0, name=None)), datetime.timedelta(seconds=32, microseconds=983505), "Lock", "relation", "select * from system_platform;")]
def validate_time(toks): """ Function to convert datetime string into datetime object. The format is compliant with ParseAction requirements :param toks: datetime string passed in tokens :return: datetime object """ datetime_string = toks[0] # Check the precision precision = len(datetime_string.replace('T', ':').split(':')) # Parse try: dtobj = dtparser.parse(datetime_string) except ValueError: raise RestInputValidationError( 'time value has wrong format. The ' 'right format is ' '<date>T<time><offset>, ' 'where <date> is expressed as ' '[YYYY]-[MM]-[DD], ' '<time> is expressed as [HH]:[MM]:[' 'SS], ' '<offset> is expressed as +/-[HH]:[' 'MM] ' 'given with ' 'respect to UTC') if dtobj.tzinfo is not None and dtobj.utcoffset() is not None: tzoffset_minutes = int(dtobj.utcoffset().total_seconds() // 60) return DatetimePrecision( dtobj.replace(tzinfo=FixedOffsetTimezone( offset=tzoffset_minutes, name=None)), precision) return DatetimePrecision( dtobj.replace(tzinfo=FixedOffsetTimezone(offset=0, name=None)), precision)
def _count_publication_year(self): counts = {} for rec in self._records: date = rec['pubDate'] if date is None: continue date = datetime(*strptime(date, "%Y-%m-%dT%H:%M:%SZ")[:6], tzinfo=FixedOffsetTimezone()) year = unicode(date.astimezone(LOCAL_TZINFO).year) counts.setdefault(year, 0) counts[year] += 1 counts = counts.items() # Sort pubYear in reverse chronological order counts.sort(lambda a, b: cmp(a[0], b[0]), reverse=True) return counts
def check_datetime_tz(self, str_offset, offset): base = datetime(2007, 1, 1, 13, 30, 29) base_str = '2007-01-01 13:30:29' value = self.DATETIME(base_str + str_offset, self.curs) # Value has time zone info and correct UTC offset. self.assertNotEqual(value.tzinfo, None), self.assertEqual(value.utcoffset(), timedelta(seconds=offset)) # Datetime is correct. self.assertEqual(value.replace(tzinfo=None), base) # Conversion to UTC produces the expected offset. UTC = FixedOffsetTimezone(0, "UTC") value_utc = value.astimezone(UTC).replace(tzinfo=None) self.assertEqual(base - value_utc, timedelta(seconds=offset))
def parse_timestamp(ts): """ Takes a timestamp in format "%Y-%m-%dT%H:%M:%S(.$d)" with or without microseconds and returns the time as datetime.datetime. """ try: dt = datetime.datetime.strptime( ts, "%Y-%m-%dT%H:%M:%S.%f" ) except ValueError: try: # Retry without microseconds dt = datetime.datetime.strptime( ts, "%Y-%m-%dT%H:%M:%S" ) except ValueError as e: raise ValueError("could not parse timestamp - %s" % str(e)) return dt.replace(tzinfo=FixedOffsetTimezone(offset=0, name=None))
def password_timeout_tween(request): """Verify the last login timestamp is still valid. """ logger = logging.getLogger('speakfriend.password_timeout_tween') response = handler(request) if not request.user: return response cp = ControlPanel(request) domain_name = get_domain(request) domain = DomainProfile.apply_wildcard(request.db_session, domain_name) if domain: pw_valid = timedelta(minutes=domain.get_password_valid(cp)) else: pw_valid = timedelta(minutes=MAX_PASSWORD_VALID) now = datetime.utcnow() utc_now = now.replace(tzinfo=FixedOffsetTimezone(offset=0)) try: last_login = request.user.last_login(request.db_session) except DetachedInstanceError: request.db_session.add(request.user) last_login = request.user.last_login(request.db_session) if last_login and last_login.activity_ts + pw_valid < utc_now: msg = 'You must log in again to be returned to: %s' % domain_name request.session.flash(msg, queue='error') logger.info('Password validity time out: %r, %r, %s', request.user, last_login, pw_valid) response = logout(request, request.route_url('home')) if 'openid.mode' in request.params: rp_dict = dict(request.params.items()) request.session['openid_request'] = rp_dict request.session.save() return response
def setUpSharedData(cls): tz = cls.tz = FixedOffsetTimezone(0) cls.dt_test1 = datetime(2018, 1, 1, tzinfo=tz) cls.dt_test2 = datetime(2018, 1, 2, tzinfo=tz) cls.dt_test3 = datetime(2018, 1, 3, tzinfo=tz) Operation = cls.Operation PhysObj = cls.PhysObj cls.physobj_type = PhysObj.Type.insert(label="My good type", code='MyGT') cls.create_location_type() cls.incoming_loc = cls.cls_insert_location('INCOMING') cls.stock = cls.cls_insert_location('STOCK') cls.arrival = Operation.Arrival.create(goods_type=cls.physobj_type, location=cls.incoming_loc, state='planned', dt_execution=cls.dt_test1, **cls.arrival_kwargs) assert len(cls.arrival.outcomes) == 1 cls.avatar = cls.arrival.outcomes[0] cls.physobj = cls.avatar.obj cls.Avatar = cls.PhysObj.Avatar
def test_replication_status(self, capsys): """ Test management of pg_stat_archiver view output :param MagicMock connect_mock: mock the database connection :param capsys: retrieve output from consolle """ # Build a fake get_replication_stats record replication_stats_data = dict( pid=93275, usesysid=10, usename='postgres', application_name='replica', client_addr=None, client_hostname=None, client_port=-1, slot_name=None, backend_start=datetime.datetime( 2016, 5, 6, 9, 29, 20, 98534, tzinfo=FixedOffsetTimezone(offset=120)), backend_xmin='940', state='streaming', sent_location='0/3005FF0', write_location='0/3005FF0', flush_location='0/3005FF0', replay_location='0/3005FF0', current_location='0/3005FF0', sync_priority=0, sync_state='async' ) replication_stats_class = namedtuple("Record", replication_stats_data.keys()) replication_stats_record = replication_stats_class( **replication_stats_data) # Prepare the server server = build_real_server(main_conf={'archiver': 'on'}) server.postgres = MagicMock() server.postgres.get_replication_stats.return_value = [ replication_stats_record] server.postgres.current_xlog_location = "AB/CDEF1234" # Execute the test (ALL) server.postgres.reset_mock() server.replication_status('all') (out, err) = capsys.readouterr() assert err == '' server.postgres.get_replication_stats.assert_called_once_with( PostgreSQLConnection.ANY_STREAMING_CLIENT) # Execute the test (WALSTREAMER) server.postgres.reset_mock() server.replication_status('wal-streamer') (out, err) = capsys.readouterr() assert err == '' server.postgres.get_replication_stats.assert_called_once_with( PostgreSQLConnection.WALSTREAMER) # Execute the test (failure: PostgreSQL too old) server.postgres.reset_mock() server.postgres.get_replication_stats.side_effect = \ PostgresUnsupportedFeature('9.1') server.replication_status('all') (out, err) = capsys.readouterr() assert 'Requires PostgreSQL 9.1 or higher' in out assert err == '' server.postgres.get_replication_stats.assert_called_once_with( PostgreSQLConnection.ANY_STREAMING_CLIENT) # Execute the test (failure: superuser required) server.postgres.reset_mock() server.postgres.get_replication_stats.side_effect = \ PostgresSuperuserRequired server.replication_status('all') (out, err) = capsys.readouterr() assert 'Requires superuser rights' in out assert err == '' server.postgres.get_replication_stats.assert_called_once_with( PostgreSQLConnection.ANY_STREAMING_CLIENT)
class TestEditor: editor_dt = datetime(2014, 12, 1, 14, 6, 42, 321443, tzinfo=FixedOffsetTimezone(offset=0, name=None)) editor_1 = dict(id=2323, name="Editor 1", privs=0, member_since=editor_dt, email_confirm_date=editor_dt, last_login_date=editor_dt, last_updated=editor_dt, deleted=False, password="******", ha1="3f3edade87115ce351d63f42d92a1834") expected_editor_1 = { 'area': None, 'bio': None, 'birth_date': None, 'deleted': False, 'email': None, 'email_confirm_date': editor_dt, 'gender': None, 'id': 2323, 'last_login_date': editor_dt, 'last_updated': editor_dt, 'member_since': editor_dt, 'name': 'Editor 1', 'privs': 0, 'website': None } editor_2 = dict(id=2324, name="Editor 2", privs=3, email="*****@*****.**", website="example.com", bio="Random\neditor", member_since=editor_dt, email_confirm_date=editor_dt, last_login_date=editor_dt, last_updated=editor_dt, deleted=False, area=None, password="******", ha1="3f3edade87115ce351d63f42d92a1834") expected_editor_2 = { "id": 2324, "name": "Editor 2", "privs": 3, "email": "*****@*****.**", "website": "example.com", "bio": "Random\neditor", "member_since": editor_dt, "email_confirm_date": editor_dt, "last_login_date": editor_dt, "last_updated": editor_dt, "birth_date": None, "deleted": False, "gender": None, "area": None, } def test_get_by_id(self, session): # Manually adding and deleting data in tests can get tedious. However, we have only two tests for which this is # needed. In case in future we need to add more tests where the test database needs to be modified, we should # explore other alternatives to ease the process. with session as db: # The editors table in test database has many empty columns and fields like last_login_date may change with # new dump. insert_editor_1 = Editor(**TestEditor.editor_1) db.add(insert_editor_1) db.commit() try: editor = mb_editor.get_editor_by_id(2323) assert editor == TestEditor.expected_editor_1 finally: # regardless whether the assertion fails or passes, delete the inserted editor to prevent side effects # on subsequent tests db.delete(insert_editor_1) db.commit() def test_fetch_multiple_editors(self, session): # Manually adding and deleting data in tests can get tedious. However, we have only two tests for which this is # needed. In case in future we need to add more tests where the test database needs to be modified, we should # explore other alternatives to ease the process. with session as db: # The editors table in test database has many empty columns and fields like last_login_date may change with # new dump. insert_editor_1 = Editor(**TestEditor.editor_1) insert_editor_2 = Editor(**TestEditor.editor_2) db.add(insert_editor_1) db.add(insert_editor_2) db.commit() try: editors = mb_editor.fetch_multiple_editors([2323, 2324]) assert editors[2323] == TestEditor.expected_editor_1 assert editors[2324] == TestEditor.expected_editor_2 finally: # regardless whether the assertion fails or passes, delete the inserted editor to prevent side effects # on subsequent tests db.delete(insert_editor_1) db.delete(insert_editor_2) db.commit() def test_fetch_multiple_editors_empty(self, engine): editors = mb_editor.fetch_multiple_editors( [2323, 2324], ) assert editors == {}
def test_repr_with_negative_offset(self): tzinfo = FixedOffsetTimezone(-5 * 60) self.assertEqual( repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=None)" % timedelta(minutes=-5 * 60))
def test_repr_with_name(self): tzinfo = FixedOffsetTimezone(name="FOO") self.assertEqual( repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=0, name='FOO')")
def test_repr_with_negative_offset(self): tzinfo = FixedOffsetTimezone(-5 * 60) self.assertEqual( repr(tzinfo), "psycopg2.tz.FixedOffsetTimezone(offset=-300, name=None)")
def test_init_with_no_args(self): tzinfo = FixedOffsetTimezone() self.assertTrue(tzinfo._offset is ZERO) self.assertTrue(tzinfo._name is None)
def test_type_roundtrip_datetimetz_array(self): from datetime import datetime self._test_type_roundtrip_array( datetime(2010, 5, 3, 10, 20, 30, tzinfo=FixedOffsetTimezone(0)))
FFI bindings to the OpenSSL library. Private key encryption is non-standard operation and Python packages either don't offer it at all, or it's incompatible with the PHP version. The backend argument MUST be the OpenSSL cryptography backend. """ length = backend._lib.EVP_PKEY_size(key._evp_pkey) buffer = backend._ffi.new('unsigned char[]', length) result = backend._lib.RSA_private_encrypt( len(data), data, buffer, backend._lib.EVP_PKEY_get1_RSA(key._evp_pkey), backend._lib.RSA_PKCS1_PADDING) backend.openssl_assert(result == length) return backend._ffi.buffer(buffer)[:] UTC = FixedOffsetTimezone(offset=0) def milliseconds_to_datetime(m): if m is None: return None return datetime.utcfromtimestamp(m / 1000).replace(tzinfo=UTC) HEAD_TEMPLATE = """\ <script type="text/javascript"> var fscSession = {{ session|tojson }}; {% if webhook %} window.onbeforeunload = confirmExit; function confirmExit() { return "You have attempted to leave this page. Are you sure?";
def test_pubYear_w_timezone(self, cursor): """Verify the use of pubYear with timestamps that occur 12/31 or 1/1.""" # See also https://github.com/Connexions/cnx-archive/issues/249 # Change the local tzinfo to be near 'America/Whitehorse'. from .. import search self.addCleanup(setattr, search, 'LOCAL_TZINFO', search.LOCAL_TZINFO) from psycopg2.tz import FixedOffsetTimezone local_tz = FixedOffsetTimezone(-8 * 60) setattr(search, 'LOCAL_TZINFO', local_tz) # Modify some modules to give them different year of publication. # All these dates occur in 2020 according to the system time zone. pub_year_mods = [ ( 'e79ffde3-7fb4-4af3-9ec8-df648b391597', # Almost 2021 somewhere in mid-USA datetime.datetime(2020, 12, 31, 23, 5, 0, tzinfo=FixedOffsetTimezone(-6 * 60)), ), ( '209deb1f-1a46-4369-9e0d-18674cf58a3e', # Just turned 2021 somewhere in mid-USA datetime.datetime(2021, 1, 1, 0, 5, 0, tzinfo=FixedOffsetTimezone(-6 * 60)), ), ( 'f3c9ab70-a916-4d8c-9256-42953287b4e9', # Almost 2020 in Alaska datetime.datetime(2019, 12, 31, 23, 5, 0, tzinfo=FixedOffsetTimezone(-10 * 60)), ), ] for id, date in pub_year_mods: cursor.execute( "UPDATE latest_modules " "SET revised = %s " "WHERE uuid = %s", ( date, id, )) cursor.connection.commit() query_params = [('pubYear', '2020')] results = self.call_target(query_params) self.assertEqual(results.counts['pubYear'], [(u'2020', 3)])
curs = conn.cursor() try: curs.execute("CREATE TABLE test_tz (t timestamp with time zone)") except: conn.rollback() curs.execute("DROP TABLE test_tz") curs.execute("CREATE TABLE test_tz (t timestamp with time zone)") conn.commit() d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=LOCAL) curs.execute("INSERT INTO test_tz VALUES (%s)", (d, )) print "Inserted timestamp with timezone:", d print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) tz = FixedOffsetTimezone(-5 * 60, "EST") d = datetime.datetime(1971, 10, 19, 22, 30, 0, tzinfo=tz) curs.execute("INSERT INTO test_tz VALUES (%s)", (d, )) print "Inserted timestamp with timezone:", d print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) curs.execute("SELECT * FROM test_tz") d = curs.fetchone()[0] curs.execute("INSERT INTO test_tz VALUES (%s)", (d, )) print "Inserted SELECTed timestamp:", d print "Time zone:", d.tzinfo.tzname(d), "offset:", d.tzinfo.utcoffset(d) curs.execute("SELECT * FROM test_tz") for d in curs: u = d[0].utcoffset() or ZERO print "UTC time: ", d[0] - u
# # This is a dump of the wofs_summary overview for all-time. # It was consistently failing because the valid footprint becomes invalid when # projected to wgs84. # (perhaps the non-footprint data can be thrown away in the future if too unmaintable) # wofs_time_summary = TimePeriodOverview( product_name="wofs_summary", year=None, month=None, day=None, dataset_count=1244, timeline_dataset_counts=Counter( { datetime( 1970, 1, 1, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None) ): 1244, datetime( 1970, 1, 2, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None) ): 0, datetime( 1970, 1, 3, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None) ): 0, datetime( 1970, 1, 4, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None) ): 0, datetime( 1970, 1, 5, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None) ): 0, datetime( 1970, 1, 6, 0, 0, tzinfo=FixedOffsetTimezone(offset=600, name=None)
def setUp(self): tz = self.tz = FixedOffsetTimezone(0) self.dt_test1 = datetime(2018, 1, 1, tzinfo=tz) self.dt_test2 = datetime(2018, 1, 2, tzinfo=tz) self.dt_test3 = datetime(2018, 1, 3, tzinfo=tz)
sp = spectrum[np.where(spectrum > 0)] smin, smax = np.percentile(sp, (0.1, 99.9)) # skip bad files (which will have bad timestamps in spectrogram file) tt = data['Start_time_day'][0,:] - 367 # convert to offset from 0001-01-01 igood = np.where(tt > 0)[0] if (len(igood) < nRec): sys.stderr.write('\nWARNING: Ignoring %d bad recordings!\n\n' % (nRec-len(igood))) spectrum = spectrum[:,igood] recName = recName[igood] tt = tt[igood] nRec = len(igood) # convert time from datestr(0) in Matlab to datetime time = [] tzUTC = FixedOffsetTimezone(offset=0, name='UTC') for t in tt: time.append( datetime(1,1,1, tzinfo=tzUTC) + timedelta(t) ) # open files to log details, and lists to store them for later write to db specLog = open('spec.log', 'w') recLog = open('rec.log', 'w') specInfo = [] recInfo = {} # save chunks of spectrum in images print('Creating daily chunks') iStart = 0 day = 0 while iStart < nRec:
def test_type_roundtrip_datetimetz(self): tz = FixedOffsetTimezone(8 * 60) dt1 = datetime(2010, 5, 3, 10, 20, 30, tzinfo=tz) dt2 = self._test_type_roundtrip(dt1) self.assertNotEqual(None, dt2.tzinfo) self.assertEqual(dt1, dt2)
def test_type_roundtrip_timetz(self): tz = FixedOffsetTimezone(8 * 60) tm1 = time(10, 20, 30, tzinfo=tz) tm2 = self._test_type_roundtrip(tm1) self.assertNotEqual(None, tm2.tzinfo) self.assertEqual(tm1, tm2)