def test_with_user_ids(self):
     update_data.update_user_data([self.user_id])
     pg_db = auth.postgresDB()
     sql_query = "SELECT * FROM users WHERE user_id = '{0}'".format(
         self.user_id)
     result = pg_db.retr_query(sql_query)
     self.assertIsNotNone(result)
 def test_no_users_in_postgres(self):
     """Test update users when no users are in postgres yet."""
     update_data.update_user_data()
     pg_db = auth.postgresDB()
     sql_query = "SELECT * FROM users WHERE user_id = ANY( %(user_ids)s )"
     result = pg_db.retr_query(sql_query, {"user_ids": self.user_ids})
     self.assertEqual(len(result), self.num_users)
def run_firebase_to_postgres() -> list:
    """Update users and transfer results from Firebase to Postgres."""
    update_data.update_user_data()
    update_data.update_project_data()
    project_ids = transfer_results.transfer_results()
    for project_id in project_ids:
        send_progress_notification(project_id)
    return project_ids
 def test_no_users_in_postgres(self):
     """Test update users when no users are in postgres yet."""
     update_data.update_user_data()
     pg_db = auth.postgresDB()
     sql_query = "SELECT * FROM users WHERE user_id = '{0}'".format(
         self.user_id)
     result = pg_db.retr_query(sql_query)
     self.assertIsNotNone(result)
 def transfer(current_results):
     if current_results is None:
         logger.info(f"{project_id}: No results in Firebase")
         return dict()
     else:
         results_user_id_list = get_user_ids_from_results(current_results)
         update_data.update_user_data(results_user_id_list)
         results_file = results_to_file(current_results, project_id)
         save_results_to_postgres(results_file)
         return dict()
    def test_last_updated_users(self):
        """Test update users when some users are in postgres."""
        update_data.update_user_data()
        user_id = set_up.create_test_user("tile_map_service_grid",
                                          "test_user_2")
        self.user_ids.append(user_id)
        update_data.update_user_data()

        pg_db = auth.postgresDB()
        sql_query = "SELECT * FROM users WHERE user_id = '{0}'".format(user_id)
        result = pg_db.retr_query(sql_query)
        self.assertIsNotNone(result)
def transfer_results_for_project(project_id, results, filter_mode: bool = False):
    """Transfer the results for a specific project.
    Save results into an in-memory file.
    Copy the results to postgres.
    Delete results in firebase.
    We are NOT using a Firebase transaction functions here anymore.
    This has caused problems, in situations where a lot of mappers are
    uploading results to Firebase at the same time. Basically, this is
    due to the behaviour of Firebase Transaction function:
        "If another client writes to this location
        before the new value is successfully saved,
        the update function is called again with the new current value,
        and the write will be retried."
    (source: https://firebase.google.com/docs/reference/admin/python/firebase_admin.db#firebase_admin.db.Reference.transaction)  # noqa
    Using Firebase transaction on the group level
    has turned out to be too slow when using "normal" queries,
    e.g. without using threading. Threading should be avoided here
    as well to not run into unforeseen errors.
    For more details see issue #478.
    """

    if results is None:
        logger.info(f"{project_id}: No results in Firebase")
    else:
        # First we check for new users in Firebase.
        # The user_id is used as a key in the postgres database for the results
        # and thus users need to be inserted before results get inserted.
        results_user_id_list = get_user_ids_from_results(results)
        update_data.update_user_data(results_user_id_list)

    try:
        # Results are dumped into an in-memory file.
        # This allows us to use the COPY statement to insert many
        # results at relatively high speed.
        results_file = results_to_file(results, project_id)
        truncate_temp_results()
        save_results_to_postgres(results_file, project_id, filter_mode=filter_mode)
    except psycopg2.errors.ForeignKeyViolation as e:

        sentry.capture_exception(e)
        sentry.capture_message(
            "could not transfer results to postgres due to ForeignKeyViolation: "
            f"{project_id}; filter_mode={filter_mode}"
        )
        logger.exception(e)
        logger.warning(
            "could not transfer results to postgres due to ForeignKeyViolation: "
            f"{project_id}; filter_mode={filter_mode}"
        )

        # There is an exception where additional invalid tasks are in a group.
        # If that happens we arrive here and add the flag filtermode=true
        # to this function, which could solve the issue in save_results_to_postgres.
        # If it does not solve the issue we arrive again but
        # since filtermode is already true, we will not try to transfer results again.
        if not filter_mode:
            transfer_results_for_project(project_id, results, filter_mode=True)
    except Exception as e:
        sentry.capture_exception(e)
        sentry.capture_message(f"could not transfer results to postgres: {project_id}")
        logger.exception(e)
        logger.warning(f"could not transfer results to postgres: {project_id}")
    else:
        # It is important here that we first insert results into postgres
        # and then delete these results from Firebase.
        # In case something goes wrong during the insert, results in Firebase
        # will not get deleted.
        delete_results_from_firebase(project_id, results)
        logger.info(f"{project_id}: Transferred results to postgres")
 def test_with_user_ids(self):
     update_data.update_user_data(self.user_ids)
     pg_db = auth.postgresDB()
     sql_query = "SELECT * FROM users WHERE user_id = ANY ( %(user_ids)s )"
     result = pg_db.retr_query(sql_query, {"user_ids": self.user_ids})
     self.assertEqual(len(result), self.num_users)