def test_update():

    filename = 'firebase_imported_projects.pickle'
    with open(filename, 'rb') as f:
        imported_projects = pickle.load(f)

    project_ids = [i[1] for i in imported_projects]
    BaseFunctions.run_update('development', project_ids)
Ejemplo n.º 2
0
def import_process():
    firebase, postgres = BaseFunctions.get_environment('development')
    fb_db = firebase.database()

    imported_projects = BaseFunctions.run_import('development')

    # save all keys to disk
    filename = 'firebase_imported_projects.pickle'
    if os.path.isfile(filename):
        with open(filename, 'rb') as f:
            already_imported_projects = pickle.load(f)
        imported_projects = imported_projects + already_imported_projects

    with open(filename, 'wb') as f:
        pickle.dump(imported_projects, f)
def upload_sample_data_to_firebase():

    firebase, postgres = BaseFunctions.get_environment('development')
    fb_db = firebase.database()

    with open('sample_data.json') as f:
        sample_data = json.load(f)

    # upload sample data to firebaseio.com/imports
    uploaded_project_keys = []
    for data in sample_data:
        uploaded_project_keys.append(
            fb_db.child("imports").push(sample_data[data])['name'])

    # save all keys to disk
    filename = 'firebase_uploaded_projects.pickle'
    if os.path.isfile(filename):
        with open(filename, 'rb') as f:
            already_uploaded_project_keys = pickle.load(f)
        uploaded_project_keys = already_uploaded_project_keys + uploaded_project_keys
    with open(filename, 'wb') as f:
        pickle.dump(uploaded_project_keys, f)

    for import_key in uploaded_project_keys:

        fb_db.update(
            {"imports/{}/key".format(import_key): auth.get_submission_key()})
Ejemplo n.º 4
0
    def delete_project(self, firebase, postgres):
        """
        The function to delete all project related information in firebase and postgres
            This includes information on groups

        Parameters
        ----------
        firebase : pyrebase firebase object
            initialized firebase app with admin authentication
        postgres : database connection class
            The database connection to postgres database

        Returns
        -------
        bool
            True if successful. False otherwise
        """

        logging.warning('%s - delete_project - start deleting project' %
                        self.id)
        b.delete_local_files(self.id, self.import_key)
        b.delete_project_postgres(self.id, self.import_key, postgres)
        b.delete_project_firebase(self.id, self.import_key, firebase)

        logging.warning('%s - delete_project - finished delete project' %
                        self.id)
        return True
def imports_to_postgres(firebase):
    logging.info('Imports import started')

    fb_db = firebase.database()
    # get a dict with all imports
    imports = fb_db.child("imports").get().val()
    raw_geom = 'data/check_kml.kml'

    # loop over imports
    for import_key, import_dict in imports.items():
        # let's have a look at the project type
        try:
            project_type = import_dict['projectType']
        except:
            project_type = 1

        if not 'tileServer' in import_dict.keys():
            import_dict['tileServer'] = 'bing'

        if len(import_dict['project'].keys()) < 4:
            continue

        if 'projectDescription' in import_dict['project'].keys():
            import_dict['project']['projectDetails'] = import_dict['project'][
                'projectDescription']

        with open(raw_geom, 'w') as geom_file:
            geom_file.write(import_dict['kml'])

        try:
            driver = ogr.GetDriverByName('KML')
            datasource = driver.Open(raw_geom, 0)
            layer = datasource.GetLayer()
            if not layer.GetFeatureCount() > 0:
                continue
            else:
                pass
        except:
            continue

        # now let's init the import
        imp = b.init_import(project_type, import_key, import_dict)

        # set import in postgres
        imp.set_import_postgres(postgres)
        os.remove(raw_geom)

    del fb_db
Ejemplo n.º 6
0
    def __init__(
        self,
        project_id: int,
        firebase: object,
        postgres: object,
    ) -> object:

        logging.warning('%s - __init__ - start init' % project_id)

        # set basic project information
        self.id = project_id

        # check if project exists in firebase and postgres
        project_exists = b.project_exists(self.id, firebase, postgres)

        if not project_exists:
            raise Exception("can't init project.")
        else:
            fb_db = firebase.database()
            project_data = fb_db.child("projects").child(
                project_id).get().val()

            # we set attributes based on the data from firebase
            self.import_key = project_data['importKey']
            self.name = project_data['name']
            self.image = project_data['image']
            self.look_for = project_data['lookFor']
            self.project_details = project_data['projectDetails']
            self.verification_count = int(project_data['verificationCount'])
            self.is_featured = project_data['isFeatured']
            self.state = project_data['state']
            self.group_average = project_data['groupAverage']
            self.progress = project_data['progress']
            self.contributors = project_data['contributors']
            # old projects might not have an info field in firebase
            try:
                self.info = project_data['info']
            except:
                self.info = {}
Ejemplo n.º 7
0
def test_update_old_projects():

    project_ids = [3, 124, 5519, 13523]
    BaseFunctions.run_update('production', project_ids)
Ejemplo n.º 8
0
        pass

    try:
        os.remove(DATA_PATH +
                  '/input_geometries/raw_input_{}.geojson'.format(import_key))
        os.remove(
            DATA_PATH +
            '/input_geometries/valid_input_{}.geojson'.format(import_key))
    except:
        os.remove(DATA_PATH +
                  '/input_geometries/raw_input_{}.kml'.format(import_key))


if __name__ == '__main__':
    modus = 'development'
    firebase, postgres = BaseFunctions.get_environment(modus)

    filename = 'firebase_imported_projects.pickle'
    with open(filename, 'rb') as f:
        imported_projects = pickle.load(f)

    print(imported_projects)

    for import_key, project_id, project_type in imported_projects:
        delete_sample_data_from_firebase(firebase, project_id, import_key)
        delete_sample_results_from_postgres(postgres, project_id, import_key)
        delete_local_files(project_id, import_key)

    os.remove('firebase_imported_projects.pickle')
    os.remove('firebase_uploaded_projects.pickle')
    print(
def simulate_user_contributions(
    project_id,
    project_type,
    modus,
    user_id='test_user',
):
    firebase, postgres = BaseFunctions.get_environment(modus)

    # get groups from firebase for this project
    fb_db = firebase.database()

    # temporary workaround because of faulty string encoding
    '''
    groups = fb_db.child("groups").child(project_id).order_by_child("completedCount").limit_to_first(5).get()
    for group in groups.each():
        group_id = group.key()
        val = group.val()

        tasks = val['tasks']
        task_ids = tasks.keys()

        if project_type == 1:
            count = val['count']
            random_sample = random.sample(task_ids, int(count/2))
            for task_id in random_sample:
                create_build_area_result_in_firebase(
                        project_id,
                        task_id,
                        user_id,
                        firebase,
                        )
            print("created build area results in firebase")
        elif project_type == 2:
            for task_id in task_ids:
                create_footprint_result_in_firebase(
                        project_id,
                        task_id,
                        user_id,
                        firebase,
                        )
            print("created footprint results in firebase")
            
        # update groups completed count
        old_completed_count = fb_db.child("groups").child(project_id).child(group_id).child("completedCount").get().val()
        fb_db.child("groups").child(project_id).child(group_id).update({"completedCount":old_completed_count+1})
        print("updated completed count")
    '''

    firebase_instance = 'dev-mapswipe'
    groups_url = 'https://{firebase_instance}.firebaseio.com/groups/{project_id}.json?orderBy=%22completedCount%22&limitToFirst=5'.format(
        project_id=project_id, firebase_instance=firebase_instance)

    groups = json.loads(requests.get(groups_url).text)
    for group_id in groups.keys():

        val = groups[group_id]
        tasks = val['tasks']
        task_ids = tasks.keys()

        if project_type == 1:
            count = val['count']
            random_sample = random.sample(task_ids, int(count / 2))
            for task_id in random_sample:
                create_build_area_result_in_firebase(
                    project_id,
                    task_id,
                    user_id,
                    firebase,
                )
            print("created build area results in firebase")
        elif project_type == 2:
            for task_id in task_ids:
                create_footprint_result_in_firebase(
                    project_id,
                    task_id,
                    user_id,
                    firebase,
                )
            print("created footprint results in firebase")

        # update groups completed count
        old_completed_count = fb_db.child("groups").child(project_id).child(
            group_id).child("completedCount").get().val()
        fb_db.child("groups").child(project_id).child(group_id).update(
            {"completedCount": old_completed_count + 1})
        print("updated completed count")
####################################################################################################

if __name__ == '__main__':
    start_time = time.time()
    args = parser.parse_args()

    logging.basicConfig(
        format="%(asctime)s :: %(name)s :: %(levelname)s :: %(message)s",
        datefmt="%Y-%m-%d %H:%M:%S",
        filename='./logs/aux_{}.log'.format(args.operation),
        filemode="a",
        level=logging.INFO)

    logger = logging.getLogger(name="aux_{}".format(args.operation))

    firebase, postgres = b.get_environment(args.modus)

    logging.info("Operation: %s started." % args.operation)

    cwd = os.getcwd()

    if args.operation == 'download':
        download_all_groups_tasks(firebase)
        #download_users(firebase)
    elif args.operation == 'import':
        #imports_to_postgres(firebase)
        #projects_to_postgres(firebase, postgres)
        import_all_groups_tasks(postgres)
        #os.chdir(cwd)
        #import_users(postgres)
        pass
def test_transfer_results():
    BaseFunctions.run_transfer_results('development')
Ejemplo n.º 12
0
def test_export_old_projects():

    project_ids = [3, 124, 5519, 13523]
    BaseFunctions.run_export('production', project_ids)
Ejemplo n.º 13
0
                        level=logging.WARNING,
                        format='%(asctime)s %(levelname)-8s %(message)s',
                        datefmt='%m-%d %H:%M:%S',
                        filemode='a')

    # create a variable that counts how often we looped the process
    counter = 0

    while counter < args.max_iterations:
        counter += 1
        logging.warning(
            "=== === === === ===>>> started {} <<<=== === === === ===".format(
                args.process))
        try:
            if args.process == 'import':
                b.run_import(args.modus)
            elif args.process == 'update':
                b.run_update(args.modus, args.filter)
            elif args.process == 'transfer_results':
                b.run_transfer_results(args.modus)
            elif args.process == 'export':
                b.run_export(args.modus, args.filter)
            elif args.process == 'delete':
                b.run_delete(args.modus, args.list)
            elif args.process == 'archive':
                b.run_archive(args.modus)
            logging.warning(
                "=== === === === ===>>> finished {} <<<=== === === === ===".
                format(args.process))

        except Exception as error: