def test_loadJSONFile(self):
     json_content = utils.loadJSONFile(filename="t_utils.json", path=Path("./tests"))
     self.assertEqual(json_content['first'], "the worst")
     self.assertEqual(json_content['second'], ["the best", "born, second place"])
     self.assertTrue("fourth" in json_content.keys())
     self.assertEqual(json_content['fourth']['a'], "why's it out of order?")
     self.assertEqual(json_content['fourth']['b'], 4)
     self.assertEqual(json_content['fourth']['c'], False)
Beispiel #2
0
    def testUpdateDataset(self):
        data = loadJSONFile('data.json')
        status, dataset = metax.create_dataset(data)
        self.assertIn(status, self.OK, "could not create dataset")

        #data = loadJSONFile('metax_dataset.json')
        dataset['research_dataset']['title']['en'] = 'title updated'
        status, updated_data = metax.update_dataset(dataset['id'], dataset)
        self.assertIn(status, self.OK, "Metax update failure")
        urn = updated_data["identifier"]
        etsin_status, etsin_data = etsin.view_dataset(urn)
        self.assertIn(etsin_status, self.OK, "Etsin failure")
Beispiel #3
0
    def testCreateDataset(self):
        # loading the example dataset

        data = loadJSONFile('data.json')
        status, cdata = metax.create_dataset(data)

        self.assertIn(status, self.OK, "could not create dataset")
        urn = cdata["identifier"]
        time.sleep(10)

        etsin_status, etsin_data = etsin.view_dataset(urn)
        self.assertIn(etsin_status, self.OK,
                      "Etsin could not found the dataset")
Beispiel #4
0
    def testRejectDataset(self):
        #Create a dataset in metax and reject the dataset for preservation

        # loading the example dataset
        data = loadJSONFile('data.json')

        # creating a dataset
        status, cdata = metax.create_dataset(data)
        self.assertIn(status, self.OK, "Metax create dataset fails")
        id = cdata['id']

        # rejecting the dataset
        status = dpr.reject_dataset(id)
        self.assertIn(status, self.OK, "dpres dataset rejection fails")
Beispiel #5
0
    def testDeleteDataset(self):

        data = loadJSONFile('data.json')

        status, cdata = metax.create_dataset(data)
        self.assertIn(status, self.OK, "could not create dataset")
        urn = cdata["identifier"]

        time.sleep(2)
        #data = loadJSONFile('metax_dataset.json')
        status = metax.delete_dataset(cdata['id'])
        self.assertIn(status, self.OK, "Metax dataset delete failure")

        etsin_status, etsin_data = etsin.view_dataset(urn)
        self.assertIn(etsin_status, self.FAIL,
                      "Etsin found the deleted dataset")
Beispiel #6
0
    def testPreserveDataset(self):
        #Create a dataset in metax and preserve the dataset

        # loading the example dataset
        data = loadJSONFile('data.json')

        # creating a dataset
        status, cdata = metax.create_dataset(data)

        self.assertIn(status, self.OK, "Metax create dataset fails")
        id = cdata['id']

        # preserving the dataset
        status = dpr.preserve_dataset(id)

        self.assertIn(status, self.OK, "dpres preserve fails")
Beispiel #7
0
    def testRemoveDataset(self):
        #Create a dataset in metax, preserve the dataset and then remove the dataset from preservation

        # loading the example dataset
        data = loadJSONFile('data.json')

        # creating a dataset
        status, cdata = metax.create_dataset(data)
        self.assertIn(status, self.OK, "create dataset fails")
        id = cdata['id']

        # preserving the dataset
        status = dpr.preserve_dataset(id)
        self.assertIn(status, self.OK, "dataset preservation fails")
        time.sleep(5)

        # removing the dataset
        status = dpr.remove_dataset(id)
        self.assertIn(status, self.OK, "dataset removal fails")
 def UpdateFileExportList(self, num_sess: int) -> None:
     self._backupFileExportList()
     existing_csvs = {}
     try:
         existing_csvs = utils.loadJSONFile(filename="file_list.json", path=self._data_dir)
     except FileNotFoundError as err:
         Logger.Log("file_list.json does not exist.", logging.WARNING)
     except Exception as err:
         msg = f"Could not load file list. {type(err)} {str(err)}"
         Logger.Log(msg, logging.ERROR)
     finally:
         with open(self._data_dir / "file_list.json", "w") as existing_csv_file:
             Logger.Log(f"opened csv file at {existing_csv_file.name}", logging.INFO)
             if not self._game_id in existing_csvs.keys():
                 existing_csvs[self._game_id] = {}
             existing_data = existing_csvs[self._game_id][self._dataset_id] if self._dataset_id in existing_csvs[self._game_id].keys() else None
             population_path = str(self._zip_names["population"]) if self._zip_names["population"] is not None \
                               else (existing_data["population"] if (existing_data is not None and "population" in existing_data.keys()) else None)
             players_path    = str(self._zip_names["players"]) if self._zip_names["players"] is not None \
                               else (existing_data["players"] if (existing_data is not None and "players" in existing_data.keys()) else None)
             sessions_path   = str(self._zip_names["sessions"]) if self._zip_names["sessions"] is not None \
                               else (existing_data["sessions"] if (existing_data is not None and "sessions" in existing_data.keys()) else None)
             events_path     = str(self._zip_names["events"]) if self._zip_names["events"] is not None \
                               else (existing_data["events"] if (existing_data is not None and "events" in existing_data.keys()) else None)
             existing_csvs[self._game_id][self._dataset_id] = \
             {
                 "ogd_revision" :self._short_hash,
                 "start_date"   :self._date_range['min'].strftime("%m/%d/%Y") if self._date_range['min'] is not None else "Unknown",
                 "end_date"     :self._date_range['max'].strftime("%m/%d/%Y") if self._date_range['max'] is not None else "Unknown",
                 "date_modified":datetime.now().strftime("%m/%d/%Y"),
                 "sessions"     :num_sess,
                 "population_file" :population_path,
                 "players_file"    :players_path,
                 "sessions_file"   :sessions_path,
                 "events_file"     :events_path,
             }
             existing_csv_file.write(json.dumps(existing_csvs, indent=4))
    def __init__(self,
                 schema_name: str,
                 schema_path: Path = Path("./") / os.path.dirname(__file__) /
                 "TABLES/",
                 is_legacy: bool = False):
        """Constructor for the TableSchema class.
        Given a database connection and a game data request,
        this retrieves a bit of information from the database to fill in the
        class variables.

        :param schema_name: The filename for the table schema JSON.
        :type schema_name: str
        :param schema_path: Path to find the given table schema file, defaults to os.path.dirname(__file__)+"/TABLES/"
        :type schema_path: str, optional
        :param is_legacy: [description], defaults to False
        :type is_legacy: bool, optional
        """
        # declare and initialize vars
        self._table_format_name: str = schema_name
        # self._is_legacy         : bool                 = is_legacy
        self._columns: List[Dict[str, str]] = []
        self._column_map: Map = {}

        if not self._table_format_name.lower().endswith(".json"):
            self._table_format_name += ".json"
        schema = utils.loadJSONFile(filename=self._table_format_name,
                                    path=schema_path)

        # after loading the file, take the stuff we need and store.
        if schema is not None:
            self._columns = schema['columns']
            self._column_map = schema['column_map']
        else:
            Logger.Log(
                f"Could not find event_data_complex schemas at {schema_path}{schema_name}",
                logging.ERROR)
 def ZipFiles(self) -> None:
     try:
         existing_csvs = utils.loadJSONFile(filename="file_list.json", path=self._data_dir)
     except Exception as err:
         existing_csvs = {}
     # if we have already done this dataset before, rename old zip files
     # (of course, first check if we ever exported this game before).
     if (self._game_id in existing_csvs and self._dataset_id in existing_csvs[self._game_id]):
         existing_data = existing_csvs[self._game_id][self._dataset_id]
         _existing_pop_file     = existing_data.get('population_file', None)
         _existing_players_file = existing_data.get('players_file', None)
         _existing_sess_file    = existing_data.get('sessions_file', None)
         _existing_events_file  = existing_data.get('events_file', None)
         try:
             if _existing_pop_file is not None and self._zip_names['population'] is not None:
                 Logger.Log(f"Renaming {str(_existing_pop_file)} -> {self._zip_names['population']}", logging.DEBUG)
                 os.rename(_existing_pop_file, str(self._zip_names['population']))
             if _existing_players_file is not None and self._zip_names['players'] is not None:
                 os.rename(_existing_players_file, str(self._zip_names['players']))
             if _existing_sess_file is not None and self._zip_names['sessions'] is not None:
                 os.rename(_existing_sess_file, str(self._zip_names['sessions']))
             if _existing_events_file is not None and self._zip_names['events'] is not None:
                 os.rename(_existing_events_file, str(self._zip_names['events']))
         except Exception as err:
             msg = f"Error while setting up zip files! {type(err)} : {err}"
             Logger.Log(msg, logging.ERROR)
             traceback.print_tb(err.__traceback__)
     # for each file, try to save out the csv/tsv to a file - if it's one that should be exported, that is.
     if self._zip_names['population'] is not None:
         with zipfile.ZipFile(self._zip_names["population"], "w", compression=zipfile.ZIP_DEFLATED) as population_zip_file:
             try:
                 population_file = Path(self._dataset_id) / f"{self._dataset_id}_{self._short_hash}_population-features.{self._extension}"
                 readme_file  = Path(self._dataset_id) / "readme.md"
                 self._addToZip(path=self._file_names["population"], zip_file=population_zip_file, path_in_zip=population_file)
                 self._addToZip(path=self._readme_path,              zip_file=population_zip_file, path_in_zip=readme_file)
                 population_zip_file.close()
                 if self._file_names["population"] is not None:
                     os.remove(self._file_names["population"])
             except FileNotFoundError as err:
                 Logger.Log(f"FileNotFoundError Exception: {err}", logging.ERROR)
                 traceback.print_tb(err.__traceback__)
     if self._zip_names['players'] is not None:
         with zipfile.ZipFile(self._zip_names["players"], "w", compression=zipfile.ZIP_DEFLATED) as players_zip_file:
             try:
                 player_file = Path(self._dataset_id) / f"{self._dataset_id}_{self._short_hash}_player-features.{self._extension}"
                 readme_file  = Path(self._dataset_id) / "readme.md"
                 self._addToZip(path=self._file_names["players"], zip_file=players_zip_file, path_in_zip=player_file)
                 self._addToZip(path=self._readme_path,           zip_file=players_zip_file, path_in_zip=readme_file)
                 players_zip_file.close()
                 if self._file_names["players"] is not None:
                     os.remove(self._file_names["players"])
             except FileNotFoundError as err:
                 Logger.Log(f"FileNotFoundError Exception: {err}", logging.ERROR)
                 traceback.print_tb(err.__traceback__)
     if self._zip_names['sessions'] is not None:
         with zipfile.ZipFile(self._zip_names["sessions"], "w", compression=zipfile.ZIP_DEFLATED) as sessions_zip_file:
             try:
                 session_file = Path(self._dataset_id) / f"{self._dataset_id}_{self._short_hash}_session-features.{self._extension}"
                 readme_file  = Path(self._dataset_id) / "readme.md"
                 self._addToZip(path=self._file_names["sessions"], zip_file=sessions_zip_file, path_in_zip=session_file)
                 self._addToZip(path=self._readme_path,            zip_file=sessions_zip_file, path_in_zip=readme_file)
                 sessions_zip_file.close()
                 if self._file_names["sessions"] is not None:
                     os.remove(self._file_names["sessions"])
             except FileNotFoundError as err:
                 Logger.Log(f"FileNotFoundError Exception: {err}", logging.ERROR)
                 traceback.print_tb(err.__traceback__)
     if self._zip_names['events'] is not None:
         with zipfile.ZipFile(self._zip_names["events"], "w", compression=zipfile.ZIP_DEFLATED) as events_zip_file:
             try:
                 events_file = Path(self._dataset_id) / f"{self._dataset_id}_{self._short_hash}_events.{self._extension}"
                 readme_file = Path(self._dataset_id) / "readme.md"
                 self._addToZip(path=self._file_names["events"], zip_file=events_zip_file, path_in_zip=events_file)
                 self._addToZip(path=self._readme_path,          zip_file=events_zip_file, path_in_zip=readme_file)
                 events_zip_file.close()
                 if self._file_names["events"] is not None:
                     os.remove(self._file_names["events"])
             except FileNotFoundError as err:
                 Logger.Log(f"FileNotFoundError Exception: {err}", logging.ERROR)
                 traceback.print_tb(err.__traceback__)
Beispiel #11
0
    def __init__(self,
                 schema_name: str,
                 schema_path: Union[Path, None] = None):
        """Constructor for the GameSchema class.
        Given a path and filename, it loads the data from a JSON schema,
        storing the full schema into a private variable, and compiling a list of
        all features to be extracted.

        :param schema_name: The name of the JSON schema file (if .json is not the file extension, .json will be appended)
        :type schema_name: str
        :param schema_path: schema_path Path to the folder containing the JSON schema file, defaults to None
        :type schema_path: str, optional
        """
        # define instance vars
        self._schema: Union[Dict, None] = {}
        self._game_name: str = schema_name.split('.')[0]
        self._detector_list: Union[List[str], None] = None
        self._feature_list: Union[List[str], None] = None
        self._min_level: Union[int, None] = None
        self._max_level: Union[int, None] = None
        self._job_map: Dict = {}
        # set instance vars
        if not schema_name.lower().endswith(".json"):
            schema_name += ".json"
        if schema_path == None:
            schema_path = Path("./games") / f"{schema_name.split('.')[0]}"
        self._schema = utils.loadJSONFile(filename=schema_name,
                                          path=schema_path)
        if self._schema is not None:
            if "detectors" in self._schema.keys():
                self._detector_list = []
                for feat_kind in ["perlevel", "per_count", "aggregate"]:
                    if feat_kind in self._schema['detectors']:
                        self._detector_list += self._schema['detectors'][
                            feat_kind].keys()
            else:
                self._schema["detectors"] = {}
                Logger.Log(
                    f"{schema_name} game schema does not define any detectors.",
                    logging.WARN)
            if "features" in self._schema.keys():
                self._feature_list = []
                for feat_kind in ["perlevel", "per_count", "aggregate"]:
                    if feat_kind in self._schema['features']:
                        self._feature_list += self._schema['features'][
                            feat_kind].keys()
            else:
                self._schema["features"] = {}
                Logger.Log(
                    f"{schema_name} game schema does not define any features.",
                    logging.WARN)
            # lastly, get max and min levels.
            if "level_range" in self._schema.keys():
                self._min_level = self._schema["level_range"]['min']
                self._max_level = self._schema["level_range"]['max']
            if "job_map" in self._schema.keys():
                self._job_map = self._schema["job_map"]
        else:
            Logger.Log(
                f"Could not find game schema at {schema_path / schema_name}",
                logging.ERROR)