def test_models_restriction_insert_many_updated_data(self, mock_moment): # First data mock_moment.side_effect = lambda: moment.utc('2015-06-22T00:00:00', '%Y-%m-%dT%H:%M:%S') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_1.html') RestrictionReport.insert_many(self.mongo_db, crawler.parse()['restriction']) first_entries = [] rows = self.mongo_db[RestrictionReport.get_mongo_collection()].find({'$query': {}, '$orderby': {'fecha': -1}}) for row in rows: first_entries.append(row) # Modified data mock_moment.side_effect = lambda: moment.utc('2015-06-22T01:00:00', '%Y-%m-%dT%H:%M:%S') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_2.html') RestrictionReport.insert_many(self.mongo_db, crawler.parse()['restriction']) second_entries = [] rows = self.mongo_db[RestrictionReport.get_mongo_collection()].find({'$query': {}, '$orderby': {'fecha': -1}}) for row in rows: second_entries.append(row) # Keep old data self.assertEqual(first_entries[0], second_entries[0]) self.assertEqual(first_entries[2:], second_entries[2:]) # Check updated for key in ['_id', 'fecha', 'sin_sello_verde', 'fuente', 'ciudad']: self.assertEqual(first_entries[1][key], second_entries[1][key]) for key in ['hash', 'con_sello_verde', 'actualizacion']: self.assertNotEqual(first_entries[1][key], second_entries[1][key])
def test_crawler_uoct_parse_current_date_data(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-07-05', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_3.html') reports = crawler.parse() self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-07-05', 'hash': '1627f5903717ecec8e1baa40955e69a63b01039f', 'con_sello_verde': [], 'sin_sello_verde': ['3', '4'], 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['restriction'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-07-05', 'estado': 'Alerta Ambiental', 'hash': '11bdf05f554d6e98b4ddbc7851322e4612441bcc', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['air_quality'][0] ) mock_moment.side_effect = lambda: moment.utc('2015-07-06', '%Y-%m-%d') reports = crawler.parse() self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-07-06', 'hash': 'e892e6bd7198fefdbbc420f963db0fab3fb971a3', 'con_sello_verde': [], 'sin_sello_verde': ['5', '6', '7', '8'], 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['restriction'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-07-06', 'estado': 'Normal', 'hash': '9bf487f4b4af1a6b312af84f7062f380bab40c54', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['air_quality'][0] )
def test_crawler_uoct_parse_current_date_data_2016_06_26(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2016-06-25', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('date/uoct.cl_restriccion-vehicular_2016_06_26.html') reports = crawler.parse() self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2016-06-26', 'hash': 'b7125cce59b6612ba64e6b5dbd78a94f7f143bfe', 'con_sello_verde': ['6', '7'], 'sin_sello_verde': ['3', '4', '5', '6', '7', '8'], 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['restriction'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2016-06-26', 'estado': 'Preemergencia Ambiental', 'hash': 'c5ca5deeec466adb6a9dbd2e9f6c36aa18fcdc85', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['air_quality'][0] ) mock_moment.side_effect = lambda: moment.utc('2016-06-26', '%Y-%m-%d') reports['restriction'] = crawler.parse()['restriction'] self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2016-06-26', 'hash': 'b7125cce59b6612ba64e6b5dbd78a94f7f143bfe', 'con_sello_verde': ['6', '7'], 'sin_sello_verde': ['3', '4', '5', '6', '7', '8'], 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['restriction'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2016-06-26', 'estado': 'Preemergencia Ambiental', 'hash': 'c5ca5deeec466adb6a9dbd2e9f6c36aa18fcdc85', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['air_quality'][0] )
def rankingSMS(): limit = request.args.get('limit', 10) start = request.args.get( 'start', moment.now().subtract(days=1).format("YYYY-MM-DD HH:mm")) end = request.args.get('end', moment.now().format("YYYY-MM-DD HH:mm")) # convet a data para o formato ISO start = moment.utc(start).format("YYYY-MM-DD HH:mm") end = moment.utc(end).format("YYYY-MM-DD HH:mm") return topSMS(limit, start, end)
def test_models_device_insert_existing(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_data = { 'tipo': 'email', 'id': '*****@*****.**', 'fecha_registro': mock_datetime.isoformat() } Device.insert_one(self.mongo_db, 'email', '*****@*****.**') # Mock new date mock_datetime = moment.date('2015-06-23', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime response = Device.insert_one(self.mongo_db, 'email', '*****@*****.**') self.assertEqual(1, self.mongo_db.devices.count()) device_in_db = self.mongo_db.devices.find_one({'tipo': 'email', 'id': '*****@*****.**'}, {'_id': 0}) self.assertEqual(expected_data, device_in_db) # Keep previous data self.assertEqual('ok', response['status']) self.assertEqual(expected_data, response['data'])
def test_crawler_uoct_parse_data_integrity(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-21', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') reports = crawler.parse() self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-21', 'hash': 'ed55bf3ea8e18f328eb03471874be28e5779424b', 'sin_sello_verde': ['3', '4', '5', '6', '7', '8'], 'con_sello_verde': ['0', '9'], 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['restriction'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-21', 'estado': 'Preemergencia Ambiental', 'hash': '81ec93a759e6d309a135fa7af1b87bdff77b5459', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, reports['air_quality'][0] )
def test_cloning_a_UTC_date(self): utc = moment.utc("2016-01-13T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ") self.assertEquals(utc.hours, 0) self.assertEquals(utc.format("YYYY-MM-DD"), "2016-01-13") usa = utc.clone().locale("US/Eastern") self.assertEquals(usa.hours, 19) self.assertEquals(usa.format("YYYY-MM-DD"), "2016-01-12")
def test_devices_post_existing(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_device = { 'tipo': 'gcm', 'id': 'dummy', 'fecha_registro': moment.date('2015-06-21', '%Y-%m-%d').isoformat() } Device.insert_one(self.mongo_db, 'gcm', 'dummy') response = self.app.post('/0/dispositivos', data={'tipo': 'gcm', 'id': 'dummy'}) self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode()) self.assertEqual(dict, type(data)) self.assertNotEqual(expected_device['fecha_registro'], data['fecha_registro']) del expected_device['fecha_registro'] del data['fecha_registro'] self.assertEqual(expected_device, data)
def test_cloning_a_UTC_date(self): utc = moment.utc("2016-01-13T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ") self.assertEqual(utc.hours, 0) self.assertEqual(utc.format("YYYY-MM-DD"), "2016-01-13") usa = utc.clone().locale("US/Eastern") self.assertEqual(usa.hours, 19) self.assertEqual(usa.format("YYYY-MM-DD"), "2016-01-12")
def test_models_device_notify_gcm_canonical_ids_response(self, mock_gcm, mock_moment, mock_smtp): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime mock_method = Mock() mock_method.json_request = Mock(return_value={ 'canonical': { 'gcm_id_to_remove_1': 'gcm_to_remain_2', 'gcm_id_to_remove_2': 'gcm_to_remain_3', } }) mock_gcm.side_effect = lambda *a, **ka: mock_method Device.insert_one(self.mongo_db, 'email', 'email@to_remain.com') Device.insert_one(self.mongo_db, 'gcm', 'gcm_to_remain_1') Device.insert_one(self.mongo_db, 'gcm', 'gcm_to_remain_2') Device.insert_one(self.mongo_db, 'gcm', 'gcm_id_to_remove_1') Device.insert_one(self.mongo_db, 'gcm', 'gcm_id_to_remove_2') Device.notify(self.mongo_db, {'fake': 'data'}, collapse_key='fake_type') self.assertEqual(1, self.mongo_db.devices.find({'tipo': 'email'}).count()) query = {'tipo': 'gcm', 'id': {'$not': {'$in': ['gcm_id_to_remove_1', 'gcm_id_to_remove_1']}}} self.assertEqual(3, self.mongo_db.devices.find(query).count())
def test_get_with_date_param(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') AirQualityReport.insert_many(self.mongo_db, crawler.parse()['air_quality']) response = self.app.get('/0/calidad-aire?fecha=2015-06-21') self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) entries = json.loads(response.data.decode()) self.assertEqual(1, len(entries)) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-21', 'hash': '81ec93a759e6d309a135fa7af1b87bdff77b5459', 'estado': 'Preemergencia Ambiental', 'actualizacion': mock_datetime.isoformat(), 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, entries[0] )
def test_models_restriction_get_limit(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-21', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') RestrictionReport.insert_many(self.mongo_db, crawler.parse()['restriction']) self.assertEqual(26, len(RestrictionReport.get(self.mongo_db, limit=30)))
def test_models_restriction_insert_many_keep_old_data(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-21', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') RestrictionReport.insert_many(self.mongo_db, crawler.parse()['restriction']) self.assertEqual(26, self.mongo_db[RestrictionReport.get_mongo_collection()].count()) first_entries = [] rows = self.mongo_db[RestrictionReport.get_mongo_collection()].find( {'$query': {}, '$orderby': {'fecha': -1}}, {'_id': 0} ) for row in rows: first_entries.append(row) mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_1.html') new_restrictions = crawler.parse()['restriction'] RestrictionReport.insert_many(self.mongo_db, new_restrictions) self.assertEqual(len(new_restrictions), self.mongo_db[RestrictionReport.get_mongo_collection()].count()) second_entries = [] rows = self.mongo_db[RestrictionReport.get_mongo_collection()].find( {'$query': {}, '$orderby': {'fecha': -1}}, {'_id': 0} ) for row in rows: second_entries.append(row) # Keep old data self.assertEqual(first_entries, second_entries[1:]) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-22', 'hash': '4550713861c4b74e957963c03195202980f4b831', 'sin_sello_verde': ['0', '1', '2', '5', '6', '7', '8', '9'], 'con_sello_verde': ['1', '2', '3', '4'], 'actualizacion': mock_datetime.isoformat(), 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, second_entries[0] )
def getSupplyDates(grouped_docs, route): supply_dates = [] for supply in grouped_docs["items"]: # only look for water supplies if scenario_supply_type in supply['ct']: if not supply['$ca']: print("Missing created_at" + supply["_id"]) else: supply_dates.append(moment.utc(supply['$ca'])) return supply_dates
def test_crawler_uoct_parse_air_quality_report_only(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2017-05-13', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('date/uoct.cl_restriccion-vehicular_2017_05_13.html') reports = crawler.parse() self.assertEqual(2, len(reports['restriction'])) self.assertEquals( { 'ciudad': 'Santiago', 'fecha': '2017-05-13', 'sin_sello_verde': [], 'con_sello_verde': [], 'hash': 'ff44f9f3dccc362b10b0d578699757eae8491777', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/' }, reports['restriction'][0] ) self.assertEquals( { 'ciudad': 'Santiago', 'fecha': '2017-05-09', 'sin_sello_verde': [], 'con_sello_verde': [], 'hash': '26ea0a1f20a66ae6ffd3ed2f1e0fd468f7c2f234', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/' }, reports['restriction'][1] ) self.assertEqual(2, len(reports['air_quality'])) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2017-05-13', 'estado': 'Normal', 'hash': '4bf3326dca181c2a8d9b64a060fed8c71e03d656', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/' }, reports['air_quality'][0] ) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2017-05-09', 'estado': 'Alerta Ambiental', 'hash': '3568e7fdef8bbea2fb09aa7c5aa271bf6c18921f', 'fuente': 'http://www.uoct.cl/restriccion-vehicular/' }, reports['air_quality'][1] )
def test_crawler_uoct_parse_file(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-21', '%Y-%m-%d') crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') reports = crawler.parse() self.assertEqual(dict, type(reports)) self.assertEqual(list, type(reports['restriction'])) self.assertEqual(26, len(reports['restriction'])) self.assertEqual(list, type(reports['air_quality'])) self.assertEqual(26, len(reports['air_quality']))
def test_get_all(self, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') AirQualityReport.insert_many(self.mongo_db, crawler.parse()['air_quality']) response = self.app.get('/0/calidad-aire') self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) entries = json.loads(response.data.decode()) self.assertEqual(10, len(entries))
def test_crawler_uoct_parse_url(self, mock_pyquery, mock_moment): mock_moment.side_effect = lambda: moment.utc('2015-06-21', '%Y-%m-%d') mock_pyquery.side_effect = lambda **kw: pq( filename=self.get_fixture_file_path('uoct.cl_restriccion-vehicular_1.html').replace('file://', '') ) reports = UOCT_Crawler().parse() self.assertEqual(list, type(reports['restriction'])) self.assertEqual(27, len(reports['restriction'])) self.assertEqual(list, type(reports['air_quality'])) self.assertEqual(27, len(reports['air_quality']))
def test_devices_get_ok(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_device = {'tipo': 'gcm', 'id': 'dummy', 'fecha_registro': mock_datetime.isoformat()} Device.insert_one(self.mongo_db, 'gcm', 'dummy') response = self.app.get('/0/dispositivos?tipo=gcm&id=dummy') self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode()) self.assertEqual(list, type(data)) self.assertEqual(expected_device, data[0])
def translate_unix(): dataset = "./Banqiao.csv" df = pd.read_csv(dataset, sep=",", encoding="utf8") data = df.iloc[:].values print(data[0,0]) print((moment.utc(data[0,0]))) timeArray = time.strptime(data[0,0], "%Y-%m-%d %H:%M") timeStamp = int(time.mktime(timeArray)) print(timeStamp) timeArray = time.localtime(timeStamp) otherStyleTime = time.strftime("%Y-%m-%d %H:%M", timeArray) print(otherStyleTime)
def test_models_restriction_insert_many(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') new_restrictions = crawler.parse()['restriction'] RestrictionReport.insert_many(self.mongo_db, new_restrictions) self.assertEqual(len(new_restrictions), self.mongo_db[RestrictionReport.get_mongo_collection()].count()) rows = self.mongo_db[RestrictionReport.get_mongo_collection()].find({}, {'_id': 0}) for i in range(len(new_restrictions)): new_restrictions[i]['actualizacion'] = mock_datetime.isoformat() self.assertEqual(new_restrictions[i], rows[i])
def test_models_device_notify_gcm_unregistered_or_invalid_ids(self, mock_gcm, mock_moment, mock_smtp): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime mock_method = Mock() mock_method.json_request = Mock(return_value={ 'errors': { 'NotRegistered': ['gcm_not_registered'], 'InvalidRegistration': ['gcm_invalid_registration'] } }) mock_gcm.side_effect = lambda *a, **ka: mock_method Device.insert_one(self.mongo_db, 'email', 'email@to_remain.com') Device.insert_one(self.mongo_db, 'gcm', 'gcm_not_registered') Device.insert_one(self.mongo_db, 'gcm', 'gcm_invalid_registration') Device.notify(self.mongo_db, {'fake': 'data'}) self.assertEqual(1, self.mongo_db.devices.find({'tipo': 'email'}).count()) self.assertEqual(0, self.mongo_db.devices.find({'tipo': 'gcm'}).count())
def test_models_restriction_get(self, mock_moment): mock_datetime = moment.utc('2015-06-21', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') AirQualityReport.insert_many(self.mongo_db, crawler.parse()['air_quality']) air_quality_reports = AirQualityReport.get(self.mongo_db) self.assertEqual(10, len(air_quality_reports)) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-21', 'hash': '81ec93a759e6d309a135fa7af1b87bdff77b5459', 'estado': 'Preemergencia Ambiental', 'actualizacion': mock_datetime.isoformat(), 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, air_quality_reports[0] )
def test_models_restriction_get(self, mock_moment): mock_datetime = moment.utc('2015-06-21', '%Y-%m-%d') mock_moment.side_effect = lambda: mock_datetime crawler = UOCT_Crawler() crawler.url = self.get_fixture_file_path('uoct.cl_restriccion-vehicular_0.html') RestrictionReport.insert_many(self.mongo_db, crawler.parse()['restriction']) restrictions = RestrictionReport.get(self.mongo_db) self.assertEqual(10, len(restrictions)) self.assertEqual( { 'ciudad': 'Santiago', 'fecha': '2015-06-21', 'hash': 'ed55bf3ea8e18f328eb03471874be28e5779424b', 'sin_sello_verde': ['3', '4', '5', '6', '7', '8'], 'con_sello_verde': ['0', '9'], 'actualizacion': mock_datetime.isoformat(), 'fuente': 'http://www.uoct.cl/restriccion-vehicular/', }, restrictions[0] )
def test_devices_post_email_ok(self, mock_moment): mock_datetime = moment.utc('2015-06-22', '%Y-%m-%d').timezone(CONFIG['moment']['timezone']) mock_moment.side_effect = lambda: mock_datetime expected_device = { 'tipo': 'email', 'id': '*****@*****.**', 'fecha_registro': mock_datetime.isoformat() } self.mongo_db.devices.insert_one(expected_device) del expected_device['_id'] del expected_device['fecha_registro'] response = self.app.post('/0/dispositivos', data=expected_device) self.assertEqual('application/json', response.mimetype) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode()) self.assertEqual(dict, type(data)) expected_device['fecha_registro'] = mock_datetime.isoformat() self.assertEqual(expected_device, data)
import moment import lib.database as db # setup now for json first = moment.utc(2018, 1, 5) then = moment.utc(2018, 7, 17) now = moment.utcnow() #-------------------------------------------------------------------------------- Events first_flood = db.addEvent("First Boston Flood", id="boston_X8E29", geohash="drt2z", cat="fld", status=3, created_at=first.strftime("%Y-%m-%dT%H:%M:%S")) second_flood = db.addEvent("Second Boston Flood", id="boston_19EC4", geohash="drt2z", cat="fld", status=3, created_at=then.strftime("%Y-%m-%dT%H:%M:%S")) active_flood = db.addEvent("Boston Flood", id="boston_D392F", geohash="drt2z", cat="fld", status=1, created_at=now.strftime("%Y-%m-%dT%H:%M:%S")) #-------------------------------------------------------------------------------- Devices
import moment import lib.database as db #-------------------------------------------------------------------------------- Events florence = db.addEvent("Hurricane Florence", id="hurricane_florence_82FCD", geohash="djzqj", cat="hrc", status=1, created_at=moment.utc(2018, 9, 13).strftime("%Y-%m-%dT%H:%M:%S") ) #-------------------------------------------------------------------------------- Venues rvh = db.addVenue("Ridge View High School", id="rvh", lat=34.1630921, lon=-80.910353, cat="bld", parents=[florence]) pbe = db.addVenue("Palmetto Bays Elementary School", id="pbe", lat=33.7301615, lon=-79.0228992, cat="bld", parents=[florence]) #-------------------------------------------------------------------------------- Items # add supplies for current storm event db.addItem(venue=rvh, cat="bed", event=florence) db.addItem(venue=rvh, cat="net", event=florence) db.addItem(venue=rvh, cat="wtr", event=florence) db.addItem(venue=pbe, cat="bed", event=florence) db.addItem(venue=pbe, cat="eat", event=florence)
ptt = crawler(['-b', 'forsale', '-i', '-1', '2']) #filename = 'forsale--1-5.json' print(ptt.json_filename) with codecs.open(ptt.json_filename, 'r', encoding='utf-8') as f: #with open('forsale-0-2.json', 'r') as f: json_data = json.load(f) for item in json_data['articles']: if (KeyWord in item[u'article_title'].lower() or \ KeyWord in item[u'content'].lower()) and \ (u'看板規則' not in item[u'content'].lower() and \ u'公告' not in item[u'article_title'].lower() and \ u'市集' not in item[u'article_title'].lower()): diff_time = moment.utc( time.asctime(time.localtime( time.time()))).timezone('Asia/Taipei') - moment.utc( item[u'date']) diff_time = diff_time.seconds / 60.0 print(diff_time) if (diff_time < 20): line_nofity(item) print(item[u'article_title']) print(item[u'author']) print(item[u'content']) print(item[u'date']) print(item[u'url']) print('\n') os.remove(ptt.json_filename)
def since(iso): dt = moment.utcnow() - moment.utc(_normalize_tpu_isodate(iso), "%Y-%m-%dT%H:%M:%S.%fZ") return dt.total_seconds()
def test_suffix_formula(self): d = moment.utc((2012, 12, 18)).zero expecting = "December 18th, 2012" self.assertEqual(d.strftime("%B %-d%^, %Y"), expecting)
def test_chaining_with_format(self): d = moment.utc((2012, 12, 18)) d.replace(hours=1).add(minutes=2).replace(seconds=3) expecting = "2012-12-18 01:02:03" self.assertEquals(d.format('YYYY-MM-DD hh:mm:ss'), expecting)
def test_utc_function_with_args(self): d = moment.utc(2012, 12, 18) self.assertEquals(d, datetime(2012, 12, 18, tzinfo=pytz.utc))
async def galnet_loop(): # wait for the discord bot to be ready await discord_client.wait_until_ready() logger.info("Starting GalNet loop") galnet_last_modified = "" # while the bot is connected and running while not discord_client.is_closed: # get config variables news_channel_id = config.get('discord', 'news_channel_id', fallback=None) galnet_api = config.get('elite', 'galnet_api', fallback=None) news_timestamp_use_ugt = config.getboolean('general', 'news_timestamp_use_ugt', fallback=True) timezone = config.get('general', 'timezone', fallback="UTC") new_news_message = config.get('discord', 'new_news_message', fallback="@here") check_interval = config.getint('general', 'check_interval', fallback=1800) # get information about the defined channel channel = discord_client.get_channel(news_channel_id) if not channel: logger.error( "Galnet loop enabled but no news channel set or no channel matching {0}, exiting galnet loop." .format(news_channel_id)) return if not galnet_api: logger.error( "No API endpoint set for Galnet News. Exiting galnet loop.") return # get galnet articles from api endpoint and load json data # noinspection PyDeprecation async with aiohttp.get(url=galnet_api) as r: if r.status == 200: data = await r.json() # extract the date the article was posted and convert it to a datetime object latest_post_datetime = moment.utc(data[0]['date'], 'DD MMM YYYY').locale("UTC") # create the embed that contains the article embed = discord.Embed(title=data[0]['title'], description=data[0]['content'].replace( "<br /><br /> ", "\n\n")) embed.set_author( name="Galnet News", url="https://community.elitedangerous.com/en/galnet") if news_timestamp_use_ugt: timestamp = latest_post_datetime.timezone(timezone) else: timestamp = latest_post_datetime.subtract(years=1286) embed.add_field(name="Post Date", value=timestamp.format('DD MMM YYYY')) # if this is our first run or there is a new article post the latest article if not galnet_last_modified or ( galnet_last_modified and galnet_last_modified < latest_post_datetime): logger.info( "New galnet article found. Posting to discord.") # update the last modified time galnet_last_modified = latest_post_datetime await discord_client.send_message(channel, content=new_news_message, embed=embed) else: logger.error("Failed to get galnet news articles.") # wait for the defined time before checking again await asyncio.sleep(delay=check_interval)
def str_to_utc_date(s): return moment.utc(s)
def test_moment_unix_command(self): d = moment.unix(1355788800, utc=True).date expected = moment.utc((2012, 12, 18)).date self.assertEqual(d, expected)
scenario_emergency_level = np.random.randint(low=1, high=4, size=(2000,)) # simulated level scenario_Number_of_available_trucks = np.random.randint(low=0, high=21, size=(2000,)) scenario_Hours_since_last_supply = np.random.randint(low=1, high=4001, size=(2000,)) scenario_number_of_requests = np.random.randint(low=1, high=101, size=(2000,)) scenario_total_population = np.random.randint(low=2000, high=15001, size=(2000,)) scenario_total_infant_population = np.random.randint(low=2000, high=150001, size=(2000,)) scenario_total_aged_population = np.random.randint(low=2000, high=100001, size=(2000,)) scenario_type = 1 # flood scenario scenario_ideal_distribution = scenario_Hours_since_last_supply + 4 * scenario_number_of_requests \ + np.log(scenario_total_population) \ + np.log(2 * scenario_total_infant_population) \ + np.log(scenario_total_aged_population) \ + np.exp(scenario_emergency_level) \ + np.random.randn(2000, )# water in liters per truck route @todo calculate this intelligently scenario_optimal_number_of_trucks = 1 scenario_start = moment.utc(2018, 9, 6) # date of disaster start # 5856 -jan , 1224 - july def getSupplyDates(grouped_docs, route): supply_dates = [] for supply in grouped_docs["items"]: # only look for water supplies if scenario_supply_type in supply['ct']: if not supply['$ca']: print("Missing created_at" + supply["_id"]) else: supply_dates.append(moment.utc(supply['$ca'])) return supply_dates
def calculateHoursSinceDisaster(event): route_completed_date = moment.utc(event['$ca']) diff = route_completed_date.diff(scenario_start) hours_since = round(diff.seconds / 60 / 60) hours_since = hours_since + (diff.days * 24) return hours_since