Exemplo n.º 1
0
    def test_during_search(self):
        """Function to test the temporal during search query.
        """

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e, token=self.config["headers"]["token"])

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.during_search(
                    start_time=time_param["start_time"],
                    end_time=time_param["end_time"]
                )

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)

            print("*"*60 + "\n" + "*"*60)
            print(
                f"DataFrame has {df.shape[0]} rows and {df.shape[1]} columns.",
                end="\n-----------\n"
                )
            print(f"Columns in DataFrame:\n{df.columns}", end="\n-----------\n")
            print(df.head)
            print("*"*60)
Exemplo n.º 2
0
 def test_batches_resourcegroup(self):
     e = Entity(
         "datakaveri.org/04a15c9960ffda227e9546f3f46e629e1fe4132b/rs.iudx.org.in/pune-env-aqm",
         token_obj=self.token_obj)
     df = e.during_search("2021-12-18T00:00:00Z", "2021-12-31T00:00:00Z")
     df["observationDateTime"] = pd.to_datetime(df["observationDateTime"])
     print(df["observationDateTime"].max())
     print(df["observationDateTime"].min())
 def test_batches_resource(self):
     e = Entity(
         "datakaveri.org/04a15c9960ffda227e9546f3f46e629e1fe4132b/rs.iudx.org.in/pune-env-aqm/83cdf03d-5787-7052-08aa-143cfbfb807d",
         token_obj=self.token_obj)
     query = ResourceQuery().add_entity(
         "datakaveri.org/04a15c9960ffda227e9546f3f46e629e1fe4132b/rs.iudx.org.in/pune-env-aqm/83cdf03d-5787-7052-08aa-143cfbfb807d"
     ).during_search("2021-12-18T00:00:00Z", "2021-12-29T00:00:00Z")
     batch_queries = []
     e.make_query_batches(query, batch_queries)
     for q in batch_queries:
         print(q.get_query())
Exemplo n.º 4
0
    def test_date_bins(self):
        e = Entity("datakaveri.org/04a15c9960ffda227e9546f3f46e629e1fe4132b/rs.iudx.org.in/pune-env-aqm/83cdf03d-5787-7052-08aa-143cfbfb807d", token_obj=self.token_obj)
        query = ResourceQuery().add_entity(
                    "datakaveri.org/04a15c9960ffda227e9546f3f46e629e1fe4132b/rs.iudx.org.in/pune-env-aqm/83cdf03d-5787-7052-08aa-143cfbfb807d").during_search("2021-12-18T00:00:00Z", "2021-12-29T00:00:00Z")
        date_bins = []
        start_time = datetime.strptime("2021-12-16T00:00:00Z", self.time_format)
        end_time = datetime.strptime("2021-12-27T00:00:00Z", self.time_format)
        e.make_date_bins(start_time, end_time, date_bins)
        for i in range(0, len(date_bins)-1):
            print(date_bins[i], date_bins[i+1])

        print(date_bins)
Exemplo n.º 5
0
    def test_download(self):
        """Function to test the downloading of file in csv format.
        """
        file_name = "test_download_file"

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e, token=self.config["headers"]["token"])

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.during_search(
                    start_time=time_param["start_time"],
                    end_time=time_param["end_time"]
                )

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)
            
            for file_type in self.testVector["file_types"]:
                self.entity.download(file_name, file_type)

                zf = zipfile.ZipFile(f"{file_name}.zip")
                
                if file_type == "csv":
                    df_csv = pd.read_csv(zf.open(f"{file_name}.{file_type}"))
                    self.assertIsNotNone(df_csv)
                    self.assertIsInstance(df_csv, pd.DataFrame)
                    os.remove(f"{file_name}.zip")

                elif file_type == "json":
                    df_json = pd.read_json(zf.open(f"{file_name}.{file_type}"), orient='records')
                    self.assertIsNotNone(df_json)   
                    self.assertIsInstance(df_json, pd.DataFrame)      
                    os.remove(f"{file_name}.zip")           
                
                else:
                    raise RuntimeError(f"File type '{file_type}' is not supported.")
Exemplo n.º 6
0
    def test_latest(self):
        """Function to test the latest search query.
        """

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e)

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.latest()

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)

            print("*"*60 + "\n" + "*"*60)
            print(
                f"DataFrame has {df.shape[0]} rows and {df.shape[1]} columns.",
                end="\n-----------\n"
                )
            print(f"Columns in DataFrame:\n{df.columns}", end="\n-----------\n")
            print(df.head)
            print("*"*60)
 def test_get_static_data(self):
     iid = "varanasismartcity.gov.in/62d1f729edd3d2a1a090cb1c6c89356296963d55/rs.iudx.org.in/varanasi-point-of-interests/smartpole-locations"
     e = Entity(iid, token_obj=self.token_obj)
     df = e.property_search("id", iid, operation="==")
     print(df.shape)
Exemplo n.º 8
0
class EntityTest(unittest.TestCase):
    """Test different scenarios for the Entity class.
    """
    def __init__(self, *args, **kwargs):
        """EntityTest base class constructor.
        """
        super(EntityTest, self).__init__(*args, **kwargs)

        self.config = {}
        with open("./config.json", "r") as f:
            self.config = json.load(f)

        self.testVector = {}
        with open("./tests/entity/testVector_Entity.json", "r") as f:
            self.testVector = json.load(f)

    def test_latest(self):
        """Function to test the latest search query.
        """

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e)

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.latest()

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)

            print("*"*60 + "\n" + "*"*60)
            print(
                f"DataFrame has {df.shape[0]} rows and {df.shape[1]} columns.",
                end="\n-----------\n"
                )
            print(f"Columns in DataFrame:\n{df.columns}", end="\n-----------\n")
            print(df.head)
            print("*"*60)

    def test_during_search(self):
        """Function to test the temporal during search query.
        """

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e, token=self.config["headers"]["token"])

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.during_search(
                    start_time=time_param["start_time"],
                    end_time=time_param["end_time"]
                )

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)

            print("*"*60 + "\n" + "*"*60)
            print(
                f"DataFrame has {df.shape[0]} rows and {df.shape[1]} columns.",
                end="\n-----------\n"
                )
            print(f"Columns in DataFrame:\n{df.columns}", end="\n-----------\n")
            print(df.head)
            print("*"*60)

    def test_download(self):
        """Function to test the downloading of file in csv format.
        """
        file_name = "test_download_file"

        for e in self.testVector["entity_ids"]:
            self.entity = Entity(e, token=self.config["headers"]["token"])

            for time_param in self.testVector["temporal_params"]:
                df = self.entity.during_search(
                    start_time=time_param["start_time"],
                    end_time=time_param["end_time"]
                )

                self.assertNotEqual(df.shape[0], 0)
                self.assertGreaterEqual(df.shape[0], 1)
                self.assertIsNotNone(df)
                self.assertIsInstance(df, pd.DataFrame)
            
            for file_type in self.testVector["file_types"]:
                self.entity.download(file_name, file_type)

                zf = zipfile.ZipFile(f"{file_name}.zip")
                
                if file_type == "csv":
                    df_csv = pd.read_csv(zf.open(f"{file_name}.{file_type}"))
                    self.assertIsNotNone(df_csv)
                    self.assertIsInstance(df_csv, pd.DataFrame)
                    os.remove(f"{file_name}.zip")

                elif file_type == "json":
                    df_json = pd.read_json(zf.open(f"{file_name}.{file_type}"), orient='records')
                    self.assertIsNotNone(df_json)   
                    self.assertIsInstance(df_json, pd.DataFrame)      
                    os.remove(f"{file_name}.zip")           
                
                else:
                    raise RuntimeError(f"File type '{file_type}' is not supported.")