def delete_all_users(): users = get_users() for user in users: delete_user(user['_id']) users = get_users() if len(users) == 0: print('deleted all users') else: print("can't delete all users")
def func2c(self): """ test_2 support """ users = get_users() n = 0 for user in users: if user['name'].startswith(prefix): n += 1 uid = user['_id'] print('deleting', uid) delete_user(uid) users = get_users() print('deleted %d users' % n) print(len(users), 'remaining users')
def main(): if PROCNAME: procname.setprocname("Gmail:main") users = get_users(DOMAIN) logger.info("Running with %s users and %s threads", len(users), MAX_THREADS) manager = multiprocessing.Manager() queue = manager.Queue() pool = multiprocessing.Pool(MAX_THREADS, runuser_init, [queue]) results = [] parameters = [] for item in users: parameters.append((item, queue)) runuser_init(queue) print runuser(item, queue) try: r = pool.map_async(runuser, parameters, callback=results.append) main_progressbar(len(users), queue) r.wait() except KeyboardInterrupt: r.terminate() r.wait()
def main(): """ Main method: parses command line arguments, fetch full list of users and execute backups """ run_services = [] arguments = docopt(__doc__, version='Google Backup 0.1') if arguments["full"]: run_services.extend(SERVICES) else: for service in SERVICES: if arguments[service]: run_services.append(service) if len(run_services) == 0 or len(arguments["<user>"]) == 0: print __doc__ return 1 users = arguments["<user>"] if "all" in arguments["<user>"]: users = get_users(DOMAIN) for service in run_services: for user in users: backup = None if service == 'gmail': backup = GmailBackup(user) elif service == 'drive': backup = DriveBackup(user) elif service == 'calendar': backup = CalendarBackup(user) if backup: backup.initialize() backup.run() return 0
def main(): users = get_users(DOMAIN) logger.info("Running with %s users", len(users)) for user in users: drivebackup = DriveBackup(user) drivebackup.initialize() drivebackup.run()
def load(self, force=False): if force or self.need_reload(): contacts = get_users(settings["google-domain"]) json.dump(contacts, open(self._d_file, "w")) with open(self._ts_file, "w") as ts_file: ts_file.write(str(datetime.datetime.now())) else: with open(self._d_file) as datafile: contacts = json.load(datafile) self.data = contacts
def test_1(self): """ unit test """ user_list = random_user_list(prefix) users = get_users() length = len(users) print (length, 'original users') print ('adding', len(user_list), 'users') for user in user_list: add_user(user) users = get_users() print(len(users), 'users') self.assertTrue(len(users) == length + len(user_list)) print ('deleting', len(user_list), 'users') for user in users: if user['name'].startswith('runapp_test_'): delete_user(user['_id']) users = get_users() print(len(users), 'users') self.assertTrue(len(users) == length)
def func2b(self): """ test_2 support """ users = get_users() n = 0 for user in users: if user['name'].startswith(prefix): n += 1 uid = user['_id'] user['email'] = 'new_' + user['email'] text = update_user(uid, user) print(text) print('updated %d users' % n)
def main(): users = get_users(DOMAIN) logger.info("Running with %s users", len(users)) total_entries = 0 for index, user in enumerate(users): if user in CALENDAR_IGNORE_USERS: logger.info("Skipping %s due to ignore list", user) continue logger.info("Status: %s/%s", index+1, len(users)) calendarbackup = CalendarBackup(user) calendarbackup.initialize() total_entries += calendarbackup.run() logger.info("Finished downloading %s entries for %s users", total_entries, len(users))
def main(): users = get_users(DOMAIN) logger.info("Running with %s users", len(users)) total_entries = 0 for index, user in enumerate(users): if user in CALENDAR_IGNORE_USERS: logger.info("Skipping %s due to ignore list", user) continue logger.info("Status: %s/%s", index + 1, len(users)) calendarbackup = CalendarBackup(user) calendarbackup.initialize() total_entries += calendarbackup.run() logger.info("Finished downloading %s entries for %s users", total_entries, len(users))
# JtR hash type parser.add_argument('-format', help='JtR hash format (Default: "Auto-detect")') # Page ID of company/organization etc to get users from parser.add_argument('-pageid', help='Page ID of company/organization etc to get users from') args = parser.parse_args() # instantiate our Extractor class extractor = Extractor() # log in Facebook in order to be able to retrieve data extractor.fb_login() # find users if args.pageid: get_users(extractor, args.pageid) exit() # extract user data extractor.extract(test=args.test, targets_filename=args.file) # save extracted data to results folder extractor.save() # add extra, user/company/organization specific words to wordlists add_words() # generate wordlists user_wordlists = generate_words() crack(user_wordlists, args.test, args.format, args.rules, config)
def run_import(user_project_list, timestamp, initial_setup): print(os.getcwd()) # print('start run import: ', get_memory_consumption()) logging.basicConfig(filename='import.log', level=logging.WARNING, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M:%S', filemode='a') logging.warning('>>> start of script import.py') # check if the user provided an argument to set up the database if initial_setup: print('start the initial setup') logging.warning('start the initial setup') project_table_name, results_table_name, task_table_name = create_database_and_tables( ) users_table_name = 'users' else: # define some variables that are related to the configuration of the psql database project_table_name = 'projects' results_table_name = 'results' task_table_name = 'tasks' users_table_name = 'users' # get list of all project_ids if no list of projects is provided if not user_project_list: project_list = get_all_projects() project_list = check_projects(project_list) print('got all projects from firebase: ', project_list) logging.warning('got all projects from firebase: %s' % project_list) else: print('user provided project ids: ', user_project_list) logging.warning('user provided project ids: %s' % user_project_list) project_list = check_projects(user_project_list) if not project_list: print('there are no projects to process. stop here.') logging.warning('there are no projects to process. stop here.') sys.exit(0) # get project information new_projects, updated_projects, project_dict = get_projects( project_list, project_table_name) print('new projects in firebase: ', new_projects) logging.warning('new projects in firebase: %s' % new_projects) print('updated projects in firebase: ', updated_projects) logging.warning('updated projects in firebase: %s' % updated_projects) logging.warning('get_projects() was successfull') # check if the user provided a timestamp for the processing if timestamp: print('use timestamp provided by user') logging.warning('use timestamp provided by user') pass else: # print('get timestamp from database') timestamp = get_last_timestamp(results_table_name) # timestamp = 1509637220000 # timestamp = int((time.time() - 3600)*1000) # this creates a timestamp representing the last 1 hour, in milliseconds print(timestamp) # get latest results, retrieve a list object with project id's of latest results changed_projects = get_results(results_table_name, timestamp, 500000) print('projects with new results: ', changed_projects) logging.warning('get_results() was successfull') # add the projects which need a update based on results to the ones based on contr. | progres | state # basidally merge the two lists with changed projects and remove the duplicates # merge updated projects from get_projects and get_results updated_projects = updated_projects + list( set(changed_projects) - set(updated_projects)) # remove new projects from updated projects list # when importing new projects, we already get the latest completed count updated_projects = list(set(updated_projects) - set(new_projects)) print('new projects: ', new_projects) logging.warning('new projects: %s' % new_projects) print('updated projects: ', updated_projects) logging.warning('updated projects: %s' % updated_projects) # get tasks for new projects get_tasks(new_projects, task_table_name) logging.warning('get_tasks() was successfull') # update projects that need an update # get latest completed count for projects that need an update get_tasks_completed_count(updated_projects, task_table_name) logging.warning('get_tasks_completed_count() was successfull') # save project data in psql database save_projects_psql(project_table_name, project_dict) print('saved project info to psql') logging.warning('saved project info to psql') # get user information get_users(users_table_name) logging.warning('get_users() was successfull') logging.warning('<<< end of script import.py') # print('after garbage collection: ', get_memory_consumption()) # garbage collection for i in range(2): n = gc.collect() return new_projects, updated_projects