def setUp(self): self.date_range = (datetime.datetime.fromisoformat("2020-02-01T10:00:00"), datetime.datetime.fromisoformat("2020-02-02T10:00:00")) self.query = [RealDictRow([('measurement_start_time', datetime.datetime(2020, 2, 11, 6, 53, 37)), ('report_id', '20200211T065336Z_AS4134_4M0eNXqQCp1mrHumzmR73pHhLRMyVh1dAc4VYcoICjBAkqjxlZ'), ('probe_asn', 4134), ('probe_cc', 'CN'), ('probe_ip', None), ('test_name', 'web_connectivity'), ('input', 'https://zh.wikipedia.org/'), ('blocking', 'tcp_ip'), ('http_experiment_failure', 'generic_timeout_error')]), RealDictRow([('measurement_start_time', datetime.datetime(2020, 2, 13, 6, 16, 19)), ('report_id', '20200213T061554Z_AS45102_IVK2a2mfaXQTip5xHVezqfun2jnQo8auGA0D5JTEHK3ovOmrx1'), ('probe_asn', 45102), ('probe_cc', 'CN'), ('probe_ip', None), ('test_name', 'web_connectivity'), ('input', 'https://fr.wikipedia.org/'), ('blocking', 'false'), ('http_experiment_failure', None)]), RealDictRow([('measurement_start_time', datetime.datetime(2020, 2, 11, 6, 53, 37)), ('report_id', '20200211T065336Z_AS4134_4M0eNXqQCp1mrHumzmR73pHhLRMyVh1dAc4VYcoICjBAkqjxlZ'), ('probe_asn', 0), ('probe_cc', 'VN'), ('probe_ip', None), ('test_name', 'web_connectivity'), ('input', 'https://zh.wikipedia.org/'), ('blocking', 'tcp_ip'), ('http_experiment_failure', 'generic_timeout_error')]), RealDictRow([('measurement_start_time', datetime.datetime(2020, 2, 13, 6, 16, 19)), ('report_id', '20200213T061554Z_AS45102_IVK2a2mfaXQTip5xHVezqfun2jnQo8auGA0D5JTEHK3ovOmrx1'), ('probe_asn', 45102), ('probe_cc', 'CA'), ('probe_ip', None), ('test_name', 'web_connectivity'), ('input', 'https://fr.wikipedia.org/'), ('blocking', 'false'), ('http_experiment_failure', "unknown_failure")])] self.config = {"database": {"dbname": "metadb", "user": "******"}, "domains": ["wikipedia.org"]} self.expected_results = {'len_all': 2, 'len_blocking': 1, 'measurements': [ {'url': 'https://explorer.ooni.io/measurement/20200211T065336Z_AS4134_4M0eNXqQCp1mrHumzmR73pHhLRMyVh1dAc4VYcoICjBAkqjxlZ?input=https%3A//zh.wikipedia.org/', 'blocking': 'tcp_ip'}, {'url': 'https://explorer.ooni.io/measurement/20200213T061554Z_AS45102_IVK2a2mfaXQTip5xHVezqfun2jnQo8auGA0D5JTEHK3ovOmrx1?input=https%3A//fr.wikipedia.org/', 'blocking': 'false'} ]} self.unexpected_results = {'len_all': 2, 'len_blocking': 0, 'measurements': [ {'url': 'https://explorer.ooni.io/measurement/20200211T065336Z_AS4134_4M0eNXqQCp1mrHumzmR73pHhLRMyVh1dAc4VYcoICjBAkqjxlZ?input=https%3A//zh.wikipedia.org/', 'blocking': 'dns'}, {'url': 'https://explorer.ooni.io/measurement/20200213T061554Z_AS45102_IVK2a2mfaXQTip5xHVezqfun2jnQo8auGA0D5JTEHK3ovOmrx1?input=https%3A//fr.wikipedia.org/', 'blocking': 'false'} ]}
def _transform_project_data(project: RealDictRow, appdb=None) -> RealDictRow: """Returns a transformed version of the project data, or itself if the project doesn't exist.""" if not project: return project new_project = project.copy() # Re-map project fields for API. new_project["id"] = new_project["projectid"] new_project["license"] = get_project_license(appdb, new_project["licenseid"]) new_project["icon"] = new_project["projecticon"] # Add new fields based on context. new_project["developer"] = get_developer(appdb, new_project["id"])["userid"] new_project["releases"] = get_app_releases(appdb, new_project["id"]) new_project["latest_version"] = new_project["releases"][0][ "version"] if new_project["releases"] else None new_project["screenshots"] = get_screenshots(appdb, new_project["id"]) new_project["permissions"] = get_project_permissions( appdb, new_project["id"]) del new_project["projectid"], new_project["version"], new_project[ "projecticon"], new_project["licenseid"] new_project["type"] = str(ProjectType(new_project["type"]).name).lower() if new_project["type"] == "coreservice": new_project["type"] = "core service" return new_project
def test_producer_flush(self, mock_extractor, request): mock_obj = mock.MagicMock() mock_obj.value = KafkaProducer stats_producer = _StatsProducer(mock_obj) _website_list = property_mock(request, _StatsProducer, "_website_list") _publish_message = method_mock(request, _StatsProducer, "_publish_message", autospec=True) mock_extractor.return_value = [0.0, None, "foo"] _website_list.return_value = [ RealDictRow([ ("id", 4), ("name", "python website"), ("url", "https://www.python.org"), ]), ] stats_producer.flush() assert _publish_message.called assert _publish_message.call_args_list == [ mock.call( mock_obj, "demo-topic", value='{"id": 4, "url": "https://www.python.org", "time": 0.0, ' '"error_code": null, "page_content": "foo"}', ) ]
def transform_release_row(release_row: RealDictRow) -> RealDictRow: """Transforms the metadata of a release so that it's easier to read.""" api_mode = release_row.copy() api_mode["download"] = api_mode["downloadurl"] api_mode["release_date"] = api_mode["inspectdate"] del api_mode["downloadurl"], api_mode["inspectstatus"], api_mode["userid"], api_mode["projectid"], \ api_mode["inspectdate"] return api_mode
def __get_account_type(account: RealDictRow) -> RealDictRow: """Return an account type""" if not account: return account new_account = account.copy() new_account["type"] = str(AccountType( new_account["accounttype"]).name).lower() return new_account
def it_knows_its_website_list(self, mock_connect): expected = [ RealDictRow([ ("id", 4), ("name", "python website"), ("url", "https://www.python.org"), ]), RealDictRow([ ("id", 5), ("name", "foo"), ("url", "https://www.foo.bar"), ]), ] # Mocking DB connection mock_con = mock_connect.return_value mock_cur = mock_con.cursor.return_value mock_cur.fetchall.return_value = expected stats_producer = _StatsProducer() website_list = stats_producer._website_list assert website_list == expected
def test_count_the_number_of_subordinates(self, conn): self.load_fixtures(conn, os.path.join(PATH_TO_SQL_DIR, "organizations.sql")) sql = """ SELECT COUNT(t2.sales_organization_id) AS subordinates_count, t1.id FROM organizations AS t1 LEFT OUTER JOIN enterprise_sales_enterprise_customers AS t2 ON t1.id = t2.sales_organization_id GROUP BY t1.id ORDER BY t1.id; """ with conn.cursor(cursor_factory=RealDictCursor) as cur: cur.execute(sql) actual = cur.fetchall() print(actual) assert len(actual) == 7 assert actual == [ RealDictRow(**{ "subordinates_count": 0, "id": 1, }), RealDictRow(**{ "subordinates_count": 4, "id": 2, }), RealDictRow(**{ "subordinates_count": 0, "id": 3, }), RealDictRow(**{ "subordinates_count": 0, "id": 4, }), RealDictRow(**{ "subordinates_count": 0, "id": 5, }), RealDictRow(**{ "subordinates_count": 1, "id": 6, }), RealDictRow(**{ "subordinates_count": 0, "id": 7, }) ]
def test_opengauss_exporter(mock_connect): expected = RealDictRow([('fake1', 1), ('fake2', 2)]) mock_con = mock_connect.return_value mock_cur = mock_con.cursor.return_value mock_cur_cm = mock_cur.__enter__.return_value mock_cur_cm.fetchall.return_value = expected oe_controller.run = mock.MagicMock() oe_main( ['--url', 'postgres://*****:*****@127.0.0.1:1234/testdb', '--disable-https']) oe_controller.run.assert_called_once() assert oe_controller.query_all_metrics().startswith(b'# HELP')
def test_calculate_center_of_each_segment(self, conn): self.load_fixtures(conn, os.path.join(PATH_TO_SQL_DIR, "japan_segments.sql")) sql = """ SELECT id, ST_X(ST_Centroid(bounds)) AS longitude, ST_Y(ST_Centroid(bounds)) AS latitude FROM japan_segments ORDER BY NULLIF(regexp_replace(id, '\\D', '', 'g'), '')::int; """ with conn.cursor(cursor_factory=RealDictCursor) as cur: cur.execute(sql) actual = cur.fetchall() print(actual) assert len(actual) == 10 assert actual == [ RealDictRow( **{ "id": "KAGOSHIMA_1", "longitude": 130.642228315775, "latitude": 30.7045454545455, }), RealDictRow( **{ "id": "KAGOSHIMA_2", "longitude": 130.694183864916, "latitude": 30.7045454545455, }), RealDictRow( **{ "id": "KAGOSHIMA_3", "longitude": 130.746139414057, "latitude": 30.7045454545455, }), RealDictRow( **{ "id": "KAGOSHIMA_4", "longitude": 129.707028431231, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_5", "longitude": 129.758983980373, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_6", "longitude": 129.810939529514, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_7", "longitude": 129.862895078655, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_8", "longitude": 129.914850627797, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_9", "longitude": 129.966806176937, "latitude": 30.75, }), RealDictRow( **{ "id": "KAGOSHIMA_10", "longitude": 130.018761726079, "latitude": 30.75, }) ]
def test_segments_using_geojson_boundary(self, conn): self.load_fixtures(conn, os.path.join(PATH_TO_SQL_DIR, "japan_segments.sql")) sql = """ SELECT id FROM japan_segments jps WHERE ST_Within( jps.bounds, ST_SetSRID(ST_GeomFromGeoJSON('{ "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", "coordinates": [ [ [ 130.27313232421875, 30.519681272749402 ], [ 131.02020263671875, 30.519681272749402 ], [ 131.02020263671875, 30.80909017893796 ], [ 130.27313232421875, 30.80909017893796 ], [ 130.27313232421875, 30.519681272749402 ] ] ] } } ] }'::json->'features'->0->'geometry'),4326)); """ with conn.cursor(cursor_factory=RealDictCursor) as cur: cur.execute(sql) actual = cur.fetchall() print(actual) assert len(actual) == 3 assert actual == [ RealDictRow(**{ "id": "KAGOSHIMA_1", }), RealDictRow(**{ "id": "KAGOSHIMA_2", }), RealDictRow(**{ "id": "KAGOSHIMA_3", }) ]
from psycopg2.extras import RealDictRow import datetime database_ip_traffic_records = [ RealDictRow([('event_type', None), ('ip_src', None), ('ip_dst', 'TEST'), ('port_src', None), ('port_dst', None), ('timestamp_start', datetime.datetime(2020, 4, 19, 20, 9, 33)), ('timestamp_end', None), ('packets', None), ('bytes', None), ('writer_id', None), ('mac_src', None), ('mac_dst', None), ('ip_proto', None), ('src_hostname', None), ('dst_hostname', None), ('incoming_outgoing', None)]), RealDictRow([('event_type', None), ('ip_src', None), ('ip_dst', 'TEST'), ('port_src', None), ('port_dst', None), ('timestamp_start', datetime.datetime(2020, 4, 19, 20, 9, 48)), ('timestamp_end', None), ('packets', None), ('bytes', None), ('writer_id', None), ('mac_src', None), ('mac_dst', None), ('ip_proto', None), ('src_hostname', None), ('dst_hostname', None), ('incoming_outgoing', None)]), RealDictRow([('event_type', None), ('ip_src', None), ('ip_dst', 'TEST'), ('port_src', None), ('port_dst', None), ('timestamp_start', datetime.datetime(2020, 4, 19, 20, 20, 10)), ('timestamp_end', None), ('packets', None), ('bytes', None), ('writer_id', None), ('mac_src', None), ('mac_dst', None), ('ip_proto', None), ('src_hostname', None), ('dst_hostname', None), ('incoming_outgoing', None)]), RealDictRow([('event_type', None), ('ip_src', None), ('ip_dst', 'TEST'), ('port_src', None), ('port_dst', None), ('timestamp_start', datetime.datetime(2020, 4, 19, 20, 24,
def test_segments_using_geojson_boundary(self, conn): """ bounds geos were fetched then processed by converting to Text and then back to geos because of not matching SRID between Stored Geos and Geo Json. you can replicate the error by applying ST_Within directly on Stored Geo and the Geo Json """ self.load_fixtures(conn, os.path.join(PATH_TO_SQL_DIR, "japan_segments.sql")) sql = f""" WITH segment_geo_texts AS ( SELECT id, ST_AsText(bounds) AS seg_geo_text FROM japan_segments ) , segment_geos AS ( SELECT id, ST_GeomFromText(seg_geo_text) AS segment_geo FROM segment_geo_texts ) , segment_buffered_polygons AS ( SELECT id, ST_Buffer(ST_GeomFromGeoJSON('{PREDIFIENED_GEO_JSON}'), 20) AS geo_json_polygon, ST_BUFFER(segment_geo, 20) AS segment_polygon FROM segment_geos ) , results AS ( SELECT id, ST_Within(segment_polygon, geo_json_polygon) As is_within_req_polygon FROM segment_buffered_polygons ) SELECT id FROM results WHERE is_within_req_polygon = 't'; """ # noqa with conn.cursor(cursor_factory=RealDictCursor) as cur: cur.execute(sql) actual = cur.fetchall() print(actual) assert len(actual) == 3 assert actual == [ RealDictRow(**{ "id": "KAGOSHIMA_1", }), RealDictRow(**{ "id": "KAGOSHIMA_2", }), RealDictRow(**{ "id": "KAGOSHIMA_3", }) ]