def run(): while True: try: citations = Object.factory("citations") violations = Object.factory("violations") users = Object.factory("user") # violations = Object.factory("violations") tomorrow_datetime = datetime.date.today() + datetime.timedelta(days=1) tomorrow = '{dt.month}/{dt.day}/{dt.year} 0:00:00'.format(dt = tomorrow_datetime) tomorrows_citations = citations.Query.filter(court_date=tomorrow,proactive_outreach=False) for citation in tomorrows_citations: try: user = users.Query.get(first_name=citation.first_name,last_name=citation.last_name,birthdate=citation.date_of_birth) reachout_sms = "Hey! It's a reminder from ProactiveLaw that you have a court date tomorrow in " + citation.court_location + " at " + citation.court_address + " to get more information call them at " + court_numbers[citation.court_location] message = client.messages.create(body=reachout_sms, to=user.phone_number, # Replace with your phone number from_="+13142549337") # Replace with your Twilio number # print message.sid citation.proactive_outreach = True; citation.save() except: print "No court date or not enough information" warrant_violations = violations.Query.filter(status="FTA WARRANT ISSUED",proactive_outreach=False) for violation in warrant_violations: try: citation = citations.Query.get(citation_number=violation.citation_number) user = users.Query.get(first_name=citation.first_name,last_name=citation.last_name,birthdate=citation.date_of_birth) amount_owed = violation.fine_amount + violation.court_cost reachout_sms = "Hey! It's a notification from ProactiveLaw that a warrant " + violation.warrant_number + " has been issued for your arrest for violation: " + violation.violation_number + " " + violation.violation_description + ", you owe $" + str(amount_owed) + " to the court " + citation.court_location + " at " + citation.court_address + " to get more information call them at " + court_numbers[citation.court_location] + ". To Pay now respond with PAY " + violation.violation_number message = client.messages.create(body=reachout_sms, to=user.phone_number, # Replace with your phone number from_="+13142549337") # Replace with your Twilio number violation.proactive_outreach = True; violation.save() except: print "No court date or not enough information" payment_violations = violations.Query.filter(status="CONT FOR PAYMENT",proactive_outreach=False) for violation in payment_violations: try: citation = citations.Query.get(citation_number=violation.citation_number) user = users.Query.get(first_name=citation.first_name,last_name=citation.last_name,birthdate=citation.date_of_birth) amount_owed = violation.fine_amount + violation.court_cost reachout_sms = "Hey! It's a notification from ProactiveLaw that a payment is owed for violation: " + violation.violation_number + " " + violation.violation_description + ", you owe $" + str(amount_owed) + " to the court " + citation.court_location + " at " + citation.court_address + " to get more information call them at " + court_numbers[citation.court_location] + ". To Pay now respond with PAY " + violation.violation_number message = client.messages.create(body=reachout_sms, to=user.phone_number, # Replace with your phone number from_="+13142549337") # Replace with your Twilio number violation.proactive_outreach = True; violation.save() except: print "No court date or not enough information" # sys.exit(0) except: print "error" time.sleep(5)
def __init__(self, **kwargs): dictionary = kwargs.pop("dictionary", None) Object.__init__(self, **kwargs) if dictionary: for attribute in self.required_attributes: setattr(self, attribute, dictionary[attribute]) for attribute in self.optional_attributes: if attribute in dictionary: setattr(self, attribute, dictionary[attribute])
def __init__(self, source_url): ParseObject.__init__(self) obj = Content._embedly_client.oembed(source_url) self.object_description = obj['description'] if 'description' in obj else None self.title = obj['title'] if 'title' in obj else None self.url = source_url self.thumbnail_url = obj['thumbnail_url'] if 'thumbnail_url' in obj else None self.provider_name = obj['provider_name'] if 'provider_name' in obj else None self.type = obj['type'] if 'type' in obj else 'unknown'
def __init__(self, **kwargs): logging.info("sipMessage() __init__ sipMessage") print 'sipMessage() __init__ sipMessage' self.hasSDP = False self.sipHeaderInfo = {} self.sipMsgSdpInfo = {} self.sipMsgMethodInfo = '' self.sipMsgCallId = '' self.sipMsgCallId = kwargs assert "sipMsgCallId" in kwargs Object.__init__(self, **kwargs)
def __init__(self, source_url): ParseObject.__init__(self) obj = Content._embedly_client.oembed(source_url) self.object_description = obj[ 'description'] if 'description' in obj else None self.title = obj['title'] if 'title' in obj else None self.url = source_url self.thumbnail_url = obj[ 'thumbnail_url'] if 'thumbnail_url' in obj else None self.provider_name = obj[ 'provider_name'] if 'provider_name' in obj else None self.type = obj['type'] if 'type' in obj else 'unknown'
def add_door_event(): doorEvent = ParseObject() opened = request.args.get('opened', ''); if opened == '1': doorEvent.opened = True elif opened == '0': doorEvent.opened = False else: return 'Invalid request.' doorEvent.save() response = {} response['opened'] = doorEvent.opened response['createdAt'] = str(doorEvent.createdAt) return json.dumps(response)
def _sample_initialize(projId, objectId, name): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=projId) config = Config.get() data_path = config['dataPath'] sample_dir = config['sampleDir'] sample_path = os.path.join(data_path, sample_dir, objectId) os.makedirs(sample_path, exist_ok=True) # Get analyses that apply to samples. sample_analyses = _get_analyses().filter(type='sample') paths = {} for analysis in sample_analyses: if analysis.code in project.paths: source_path = os.path.join(project.paths[analysis.code], name) target_path = os.path.join(sample_path, analysis.code) os.makedirs(source_path, exist_ok=True) rel_path = os.path.relpath(source_path, os.path.dirname(target_path)) print(rel_path, target_path, file=sys.stderr) os.symlink(rel_path, target_path, target_is_directory=True) paths[analysis.code] = source_path return jsonify({'result': {'paths': paths}})
def _project_reads(objectId): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) extensions = Config.get()['readExtensions'] reads = {} for root, dirs, files in os.walk(project.paths['read']): for f in files: if f.endswith(tuple(extensions)): name = os.path.basename(root.replace(project.paths['read'], '')) # In case files are at the root. if name == '': name = '/' if name not in reads: reads[name] = [] path = os.path.join(root, f) size = os.path.getsize(path) reads[name].append({'path': path, 'size': size}) print(path, file=sys.stderr) return jsonify({'result': reads})
def _project_get(objectId, code, name): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) if name: path = project.files[code][name] else: path = project.files[code] print(path, file=sys.stderr) # If the file is an html file, serve it as a static file. # Otherwise, send it as a file. extension = '.'.join(path.split('.')[1:]) dirname = os.path.dirname(path) basename = os.path.basename(path) if extension == 'html': return send_from_directory(dirname, basename) if name: filename = '{}_{}_{}.{}'.format(objectId, code, name, extension) else: filename = '{}_{}.{}'.format(objectId, code, extension) return send_from_directory(dirname, basename, as_attachment=True, attachment_filename=filename)
def getSinglePost(objectId): settings_local.initParse() className="Posts" Posts=Object.factory(className) SinglePost=Posts.Query.get(objectId=objectId) print SinglePost.Image1.url return SinglePost
def run_upload(objectId, host, username, password, geo_username): # Get project with specified objectId. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) upload(project, host, username, password, '{}_files.tar.gz'.format(geo_username))
def __init__(self): register("husLce98tepIAM9pl40MqechizNwsmWx5WNBSAPn", "gLYbSDZhJBAX4dNU0gl3sr8DFOVPEG5cBsLCzigM") # Parse Core APP Key self.emailKey = "2bc1e17c8d1ca087d01292a5aa5e11c652ccb48b38e3e2c8e455d66710a324d9" # Kickbox APP Key self.signUpCredentials = { 'full_name' : '', 'email' : '', 'userID' : '', 'password' : '', } self.signUpKeys = ['full_name', 'email', 'userID', 'password'] self.signUpSuccessful = None self.logInCredentials = { 'userID' : '', 'password' : '', } self.logInKeys = ['userID', 'password'] self.logInSuccessful = None self.user = 0 self.token = None self.full_name = '' self.response = 'Email Verification' self.emailVerfication = 'Invalid' #------------------------------------------- self.patientClass = Object.factory("Patients") self.patients = self.patientClass.Query.all()
def findTest(): global testIDs, testIDsNames testNames = [] testIDs = [] testNameIDs = {} testIDsNames = {} myClassName = 'Tests' myClass = Object.factory(myClassName) word= myClass.Query.all() splitword = str(word).split(':') length = len(splitword) for k in range (1,length-1): line = splitword[k] splitword[k] = line[:-9] testIDs.append(splitword[k]) testIDs.append(splitword[length-1][:-2]) length = len(testIDs) for k in range (0, length - 1): name = myClass.Query.get(objectId = testIDs[k]) testNames.append(name.name) name = myClass.Query.get(objectId = testIDs[length-1]) testNames.append(name.name) for k in range (0, length): testNameIDs[testNames[k]] = testIDs[k] for k in range (0, length): testIDsNames[testIDs[k]] = testNames[k] print('FUNCTION: findTest COMPLETE') print(time.asctime( time.localtime(time.time()) )) return testIDs, testNames, testNameIDs
def sigterm_handler(signal, frame): print('SIGTERM received', file=sys.stderr, flush=True) print(compiling, uploading, file=sys.stderr, flush=True) Project = Object.factory('Project') for objectId, t in compiling.items(): if t.is_alive(): project = Project.Query.get(objectId=objectId) project.progress = 'success' project.save() for objectId, t in uploading.items(): if t.is_alive(): project = Project.Query.get(objectId=objectId) project.progress = 'compiled' project.save() if index_container is not None: try: print('sending SIGTERM to container {}'.format( index_container.name), flush=True) index_container.stop() except Exception as e: print('error while stopping container', flush=True) finally: index_container.remove(force=True) sys.exit(0)
def userIDsAndNames(): global userIDtoName, userIDs, userDetails names = [] userIDs = [] userDetails = {} userIDtoName = {} myClassName = 'User' myClass = Object.factory(myClassName) details = myClass.Query.all() splitword = str(details).split(':') length = len(splitword) for k in range (1,length-1): line = splitword[k] names.append(line[:-24]) lastLine = str(splitword[length-1])[:-18] names.append(lastLine) splitword = str(details).split('Id ') length = len(splitword) for k in range (1,length-1): line = splitword[k].split(')>,') userIDs.append(line[0]) lastLine = str(splitword[length-1])[:-3] userIDs.append(lastLine) length = len(userIDs) for k in range(0,length): userDetails[names[k]] = userIDs[k] userIDtoName[userIDs[k]] = names[k] print('FUNCTION: userIDsAndNames COMPLETE') print(time.asctime( time.localtime(time.time()) )) return userDetails, names, userIDs
def pupilResultsToTeacher(sameGroupID): global resultIDs, testIDsNames, userDetails, userIDtoName, results, pupilUserName results = [] value = str(classPupilsList.curselection()) strip = value.strip(',') value = int(strip[1]) pupilID = sameGroupID[value] pupilName = userIDtoName[pupilID] pupilUserName = pupilName myClassName = 'Results' myClass = Object.factory(myClassName) length = len(resultIDs) for k in range(1, length): obj = myClass.Query.get(objectId = resultIDs[k]) user = obj.userName if user == pupilName: testID = obj.testID average = obj.average average = '%.1f' % average attempts = obj.attempts testName = testIDsNames[testID] string = (str(testName)+': Average = '+str(average)+'; Attempts = '+str(attempts)) results.append(string) print('Results collected') print(results) pupilResultsList.delete(0, END) length = len(results) for k in range(0, length): pupilResultsList.insert(k+1, results[k])
def Login(userIDClass): global username, userDetails username = usernameEntry.get() password = passwordEntry.get() myClassName = 'User' myClass = Object.factory(myClassName) username = str(username.lower()) Id = userDetails[username] print(Id) data = myClass.Query.get(objectId = Id) teacher = data.teacher print(teacher) if username in userDetails: print('Logging in') try: User.login(username, password) print('Logged in') currentUserLabel.config(text = 'Logged in as '+username+'. Class: '+userIDClass[Id]) if teacher == False: outputUserResults() newWordButton.config(state = DISABLED) quizSelectButton.config(state = NORMAL) else: newWordButton.config(state = NORMAL) quizSelectButton.config(state = DISABLED) resultsList.delete(0, END) findTeachersPupils(Id) except: print('Incorrect password') currentUserLabel.config(text = 'Incorrect password') else: currentUserLabel.config(text = 'That is an invalid username') print('That is an invalid username') print('FUNCTION: userLogin COMPLETE') print(time.asctime( time.localtime(time.time()) ))
def _project_ftp(objectId): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) config = Config.get() ftp_path = config['ftpPath'] project_dir = config['projectDir'] ftp_project_path = os.path.join(ftp_path, project_dir, objectId) # Change ftp home directory to the project root. cmd = ( 'pure-pw usermod {} -d {} -m -f /etc/pure-ftpd/passwd/pureftpd.passwd' ).format(objectId, ftp_project_path) try: ftp_name = config['repoName'] + '_' + config['ftpService'] + '_1' client = docker.from_env() ftp = client.containers.get(ftp_name) # run command. out = ftp.exec_run(cmd) exit_code = out[0] if exit_code != 0: raise Exception('non-zero exit code on ftp user modification') except Exception as e: print('error occured while modifying ftp user {}'.format(objectId), file=sys.stderr) raise e return jsonify({'result': project.paths['root']})
def outputUserResults(): global username, resultIDs, testIDsNames myClassName = 'Results' myClass = Object.factory(myClassName) results = [] length = len(resultIDs) for k in range(1, length): obj = myClass.Query.get(objectId = resultIDs[k]) user = obj.userName if user == username: testID = obj.testID average = obj.average average = '%.1f' % average attempts = obj.attempts testName = testIDsNames[testID] string = (str(testName)+': Average = '+str(average)+'; Attempts = '+str(attempts)) results.append(string) print('Results collected') resultsList.delete(0, END) length = len(results) for k in range(0, length): resultsList.insert(k+1, results[k]) length = len(results) resultsList.config(height = length) print('FUNCTION outputUserResults COMPLETE') print(time.asctime( time.localtime(time.time()) ))
def _project_email(objectId, subject, message): """ Send mail with the given arguments. """ Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) config = Config.get() host = config['host'] data_path = config['dataPath'] email_dir = config['emailDir'] email_path = os.path.join(data_path, email_dir) datetime = dt.datetime.now() url = 'http://{}/?id={}'.format(host, objectId) fr = 'AlaskaProject_{}@{}'.format(objectId, host) to = project.email format_dict = { 'message': message, 'objectId': objectId, 'url': url, 'host': host, 'password': project.ftpPassword, 'to': to, 'datetime': datetime.strftime('%Y-%m-%d %H:%M:%S') + ' PDT' } # Footer that is appended to every email. full_message = '\ <html> \ <head></head> \ <body> \ <p>{message}</p> \ <br> \ <hr> \ <p>Project ID: {objectId}<br> \ Unique URL: <a href="{url}">{url}</a><br> \ FTP server: {host}<br> \ FTP port: 21<br> \ FTP username: {objectId}<br> \ FTP password: {password}<br> \ This message was sent to {to} at {datetime}.<br> \ <b>Please do not reply to this email.</b></p> \ </body> \ </html> \ '.format(**format_dict) email = {'to': to, 'from': fr, 'subject': subject, 'message': full_message} email_file = '{}.json'.format(datetime) output_path = os.path.join(email_path, email_file) with open(output_path, 'w') as f: json.dump(email, f, indent=4) return jsonify({'result': email_file})
def main(): soup = BeautifulSoup(requests.get('https://www.mturk.com/mturk/viewhits?searchWords=&pageNumber=4&searchSpec=HITGroupSearch%23T%231%2310%23-1%23T%23%21%23%21NumHITs%211%21%23%21&sortType=NumHITs%3A1&selectedSearchType=hitgroups').text, "html.parser") titles = soup.findAll('a', {"class" : "capsulelink"}) num_results = int(soup.findAll('td', {"class" : "title_orange_text"})[0].text.strip()[8:-7]) print("\nTotal number of HITs: " + str(num_results)) count = 0 page = 1 requestErrors = 0 privateCount = 0 register("DKJjvfvhnCGRK0cAdOpJN9MwR7zhIpuYya5xvbuF", "d8hIYrBrcW4r2ujEkL79vE03FmLxE2QCJgSwuXYv") HITClass = ParseObject.factory("HIT") all_hits = HITClass.Query.all() batcher = ParseBatcher() batcher.batch_delete(all_hits) while (count < 200): soup = BeautifulSoup(requests.get('https://www.mturk.com/mturk/viewhits?searchWords=&pageNumber=' + str(page) + '&searchSpec=HITGroupSearch%23T%231%2310%23-1%23T%23%21%23%21NumHITs%211%21%23%21&sortType=NumHITs%3A1&selectedSearchType=hitgroups').text, "html.parser") titles = soup.findAll('a', {"class" : "capsulelink"}) for t in titles: time.sleep(.3) count = count + 1 print("\n" + str(count) + "\nTitle: " + t.text.strip()) linkA = t.parent.parent.findAll('span')[1].a # check if the link is public if linkA.has_attr('href'): link = linkA['href'] hitPage = BeautifulSoup(requests.get('https://www.mturk.com' + link).text, "html.parser") form = hitPage.findAll('form', {'name' : 'hitForm'}) # Check for error if len(form) >= 3: form = form[2] requester = form.find("input", {'name' : 'prevRequester'})['value'] print('Requester: ' + requester) reward = form.find("input", {'name' : 'prevReward'})['value'] print('Reward: ' + reward) groupID = form.find("input", {'name' : 'groupId'})['value'] print('Group id: ' + groupID) anyObject = HIT(requester=requester, reward=float(reward[3:]), title=t.text.strip(), groupID=groupID) anyObject.save() else: requestErrors = requestErrors + 1 print(link) print(form) else: link = linkA['id'] print(link) privateCount = privateCount + 1 page = page + 1 print("\n\nErrors: " + str(requestErrors)) print("Private HITs: " + str(privateCount))
def _job_output(objectId): # Get job from server. Job = Object.factory('Job') job = Job.Query.get(objectId=objectId) output = None with open(job.outputPath, 'r') as f: output = f.readlines() return jsonify({'result': output})
def create_event_users_in_Parse(self): # For now, just grab first ones; later, check by array_eventsRegistered. """ Create zE0000_User objects by "batch_save"-ing them to Parse using ParsePy's ParseBatcher(). Event User objects are _User objects whose array_eventsRegistered contains the eventNum of this current event. """ eu_ClassName = "zE" + _Event.STR_EVENT_SERIAL_NUM + "_User" eu_Class = Object.factory(eu_ClassName) # # Get the correct class name from the ep = Event Prefix (passed in). # eventUser_ClassName = ep + "_User" # eventUser_Class = Object.factory(eventUser_ClassName) # add some Users to this Event qset_all_users = User.Query.all().order_by("userNum") li_meu = list(qset_all_users.filter(sex = "M").limit( _Event.MEN)) li_feu = list(qset_all_users.filter(sex = "F").limit( _Event.WOMEN)) li_mgeu = list(qset_all_users.filter(sex = "MG").limit( self.num_m_ghosts)) li_fgeu = list(qset_all_users.filter(sex = "FG").limit( self.num_f_ghosts)) li_users_at_event = li_meu + li_feu + li_mgeu + li_fgeu count_eu = len(li_users_at_event) li_eu_obj_to_upload = [] for index, obj_User in enumerate(li_users_at_event): new_EU_object = eu_Class( user_objectId = obj_User.objectId, event_userNum = index + 1, username = obj_User.username, first = obj_User.username.split(" ")[0], last = obj_User.username.split(" ")[-1], sex = obj_User.sex ) li_eu_obj_to_upload.append(new_EU_object) # Batch upload in chunks no larger than 50, # and sleep to avoid timeouts batch_upload_to_Parse(eu_ClassName, li_eu_obj_to_upload) pass
def _project_sleuth(objectId, port): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) # Check if there is a sleuth container open for this project. config = Config.get() data_volume = config['repoName'] + '_' + config['dataVolume'] data_path = config['dataPath'] script_volume = config['repoName'] + '_' + config['scriptVolume'] script_path = config['scriptPath'] network = config['repoName'] + '_' + config['backendNetworkName'] shiny_script = config['shinyScript'] so_path = project.files[config['diffDir']]['sleuth'] # Start a new docker container. cmd = 'Rscript {} -p {} --alaska'.format(shiny_script, so_path) volumes = { data_volume: { 'bind': data_path, 'mode': 'rw' }, script_volume: { 'bind': script_path, 'mode': 'rw' } } environment = { 'PARSE_HOSTNAME': PARSE_HOSTNAME, 'PARSE_APP_ID': PARSE_APP_ID, 'PARSE_MASTER_KEY': PARSE_MASTER_KEY } ports = {42427: port} wdir = script_path name = 'shiny-{}'.format(project.objectId) # Docker client. client = docker.from_env() container = client.containers.run(config['diffImage'], cmd, detach=True, auto_remove=True, volumes=volumes, working_dir=wdir, network=network, environment=environment, name=name, ports=ports) return jsonify( {'result': { 'containerId': container.id, 'containerName': name }})
def _project_delete(objectId): # Get project from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) try: if os.path.isdir(project.paths['root']): shutil.rmtree(project.paths['root']) except Exception as e: print(e) return jsonify({'result': 'success'})
def __init__(self, options, columns): super(ParseFdw, self).__init__(options, columns) self.columns = columns try: self.application_id = options['application_id'] self.rest_api_key = options['rest_api_key'] self.className = options['class_name'] except KeyError: log_to_postgres("You must specify an application_id, rest_api_key and class_name options when creating this FDW.", ERROR) register(self.application_id, self.rest_api_key) self.object = Object.factory(self.className)
def getResultIDs(): global resultIDs resultIDs =[] myClassName = 'Results' myClass = Object.factory(myClassName) returned = str(myClass.Query.all()) splited = returned.split('Results:') length = len(splited) for k in range(0, length-1): resultIDs.append(splited[k][:-4]) resultIDs.append(splited[length - 1][:-2]) print('FUNCTION getRestultIDs COMPLETE') print(time.asctime( time.localtime(time.time()) ))
def getWordDetails(): global IDsandDefinitions wordIDs = [] words = [] nameIDs = {} IDsandTest = {} IDsandDefinitions = {} myClassName = 'Words' myClass = Object.factory(myClassName) word = myClass.Query.all() splitword = str(word).split(':') length = len(splitword) for k in range (1,length-1): line = splitword[k] splitword[k] = line[:-9] wordIDs.append(splitword[k]) print('wordIDs FOUND') print(time.asctime( time.localtime(time.time()) )) wordIDs.append(splitword[length-1][:-2]) length = len(wordIDs) for k in range(0, length): word = myClass.Query.get(objectId = wordIDs[k]) words.append(word.text) print('words FOUND') print(time.asctime( time.localtime(time.time()) )) for k in range(0, length - 1): nameIDs[wordIDs[k]] = words[k] print('words and wordIDs ASSIGNED') print(time.asctime( time.localtime(time.time()) )) nameIDs[wordIDs[length-1]] = words[length-1] for k in range(0, length - 1): testID = myClass.Query.get(objectId = wordIDs[k]) testID = testID.testID IDsandTest[wordIDs[k]] = testID print('testID and wordIDs ASSIGNED') print(time.asctime( time.localtime(time.time()) )) testID = myClass.Query.get(objectId = wordIDs[length-1]) testID = testID.testID IDsandTest[wordIDs[length-1]] = testID[:-2] for k in range(0, length - 1): definition = myClass.Query.get(objectId = wordIDs[k]) definition = definition.definition IDsandDefinitions[wordIDs[k]] = definition definition = myClass.Query.get(objectId = wordIDs[length-1]) definition = definition.definition print('wordIDs and Definitions ASSIGNED') print(time.asctime( time.localtime(time.time()) )) IDsandDefinitions[wordIDs[length-1]] = definition[:-2] print('FUNCTION: getWordDetails COMPLETE') print(time.asctime( time.localtime(time.time()) )) return wordIDs, words, nameIDs, IDsandTest, IDsandDefinitions
def cleanup_progress(): print('cleaning up progresses') Project = Object.factory('Project') projects = Project.Query.all().filter(progress='compiling') print(projects) for project in projects: project.progress = 'success' project.save() projects = Project.Query.all().filter(progress='uploading') print(projects) for project in projects: project.progress = 'compiled' project.save()
def dequeue(): try: Job = Object.factory('Job') jobs = Job.Query.filter( queuePosition__gte=0).order_by('queuePosition').limit(1) if jobs: job = jobs[0] Function('jobStarted')(objectId=job.objectId) return Job.Query.get(objectId=job.objectId) else: return False except Exception as e: capture_exception(e)
def __init__(self, options, columns): super(ParseFdw, self).__init__(options, columns) self.columns = columns try: self.application_id = options['application_id'] self.rest_api_key = options['rest_api_key'] self.className = options['class_name'] except KeyError: log_to_postgres( "You must specify an application_id, rest_api_key and class_name options when creating this FDW.", ERROR) register(self.application_id, self.rest_api_key) self.object = Object.factory(self.className)
def __init__(self, obj, batch_strategy=OnChangeStrategy()): """Update the JSON representation of an object. - obj is the object to update. Requirements: - obj.toJSON() returns the JSON represenation of the object as dict - the "type" field is set to the type of the object - batch_mode whether to save the object in batch mode. """ self.batch_strategy = batch_strategy self.obj = obj self.type = obj.toJSON()["type"] self.ParseClass = Object.factory(self.type) self.parse_object = self.ParseClass() self.update()
def _project_sleuth_close(objectId): # Get shiny from server. Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) shiny = project.shiny container_id = shiny.containerId container_name = shiny.containerName # Docker client. client = docker.from_env() try: container = client.containers.get(container_id) container.stop(timeout=1) return jsonify({'result': 'stopped'}) except docker.errors.NotFound: return jsonify({'result': 'not found'})
def _sample_citation(objectId): # Get project from server. Sample = Object.factory('Sample') sample = Sample.Query.get(objectId=objectId) config = Config.get() genus = sample.reference.organism.genus species = sample.reference.organism.species ref_version = sample.reference.version arg = '-b {} --bias'.format(config['kallistoBootstraps']) if sample.readType == 'single': arg += ' --single -l {} -s {}'.format(sample.readLength, sample.readStd) format_dict = { 'genus': genus, 'species': species, 'ref_version': ref_version, 'arg': arg, **config } info = [ 'RNA-seq data was analyzed with the Alaska pipeline (alaska.caltech.edu).', ('Quality control was performed using using Bowtie2 (v{versionBowtie}), ' 'Samtools (v{versionSamtools}), RSeQC (v{versionRseqc}), ' 'FastQC (v{versionFastqc}), with results aggregated with ' 'MultiQC (v{versionMultiqc}).').format(**format_dict), ('Reads were aligned to the {genus} {species} genome version {ref_version} ' 'as provided by Wormbase using Kallisto (v{versionKallisto}) with the following ' 'flags: {arg}').format(**format_dict), ('Differential expression analyses with Sleuth (v{versionSleuth}) ' 'were performed using a Wald Test corrected for multiple-testing.' ).format(**format_dict) ] if genus == 'caenorhabditis' and species == 'elegans': info.append( 'Enrichment analysis was performed using the Wormbase Enrichment Suite.' ) return jsonify({'result': info})
def historyToTeacher(): global results, testNameIDs, pupilUserName user = pupilUserName historyIDs = [] outputList = [] value = str(pupilResultsList.curselection()) strip = value.strip(',') value = int(strip[1]) line = results[value] strip = line.split(': Average') testname = strip[0] testID = testNameIDs[testname] myClassName = 'History' myClass = Object.factory(myClassName) output = myClass.Query.all() print(output) length = len(output) print(length) for k in range(0, length): strip = str(output[k]).strip('<>') split = strip.split(':') historyIDs.append(split[1]) print(split[1]) for k in range(0, length): result = myClass.Query.get(objectId = historyIDs[k]) historyUsername = result.userName historyTestID = result.testID if historyUsername == pupilUserName and historyTestID == testID: date = result.createdAt date = str(date)[:10] score = result.score string = (str(date)+': scored '+str(score)) outputList.append(string) else: pass historyList.delete(0, END) length = len(outputList) for k in range(0, length): historyList.insert(k+1, outputList[k])
def classDetails(userIDs): global userIDClass classNames = [] userIDClass = {} ClassUseriD = {} length = len(userIDs) myClassName = 'User' myClass = Object.factory(myClassName) for k in range(0, length): Id = userIDs[k] data = myClass.Query.get(objectId = Id) group = data.group userIDClass[Id] = group if group in classNames: pass else: classNames.append(group) return classNames, userIDClass
def check_images(): index_image = Config.get()['indexImage'] Analysis = Object.factory('Analysis') images = list( analysis.image for analysis in Analysis.Query.filter(active=True)) + [index_image] client = docker.from_env() for image in images: print(image, flush=True) try: client.images.get(image) except Exception as e: capture_exception(e) print('error while checking image {}'.format(image)) sys.exit(1)
def referenceBuild(): ''' Method to build all non-built references new organisms. ''' global indexThread # Get all non-ready references. if indexThread is not None and indexThread.is_alive(): return jsonify({'status': 'running'}) Reference = Object.factory('Reference') references = Reference.Query.filter(ready=False) print(references, file=sys.stderr) print('found {} unbuilt reference'.format(len(references)), file=sys.stderr) indexThread = Thread(target=_referencesBuild, args=(references, )) indexThread.daemon = True indexThread.start() return jsonify({'status': 'started'})
def project_compile(objectId): try: token = request.args.get('sessionToken') with SessionToken(token): if objectId in compiling and compiling[objectId].is_alive(): raise Exception( '{} is already being compiled'.format(objectId)) Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) project.progress = 'compiling' project.save() t = Thread(target=_project_compile, args=(project, )) t.daemon = True compiling[objectId] = t t.start() return jsonify({'result': 'compiling'}) except Exception as e: print(traceback.format_exc(), file=sys.stderr) return jsonify({'error': str(e)})
def updateResults(): global resultIDs, username, score, testID length = len(resultIDs) run = 1 myClassName = 'Results' myClass = Object.factory(myClassName) while run == 1: for k in range(1,length): obj = myClass.Query.get(objectId = resultIDs[k]) user = obj.userName test = obj.testID if user == username and test == testID: attempts = obj.attempts average = obj.average total = attempts*average attempts = attempts + 1 average = (total + score)/attempts print('Attempts:'+str(attempts),'Average:'+str(average)) obj.average = average obj.attempts = attempts obj.save() print('Saved') run = 0
def project_upload(objectId): try: token = request.args.get('sessionToken') with SessionToken(token): data = request.get_json() host = data['host'] username = data['username'] password = data['password'] geo_username = data['geo_username'] Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) project.progress = 'uploading' project.save() if objectId in uploading and uploading[objectId].is_alive(): raise Exception( '{} is already being uploaded'.format(objectId)) t = Thread(target=_project_upload, args=( project, host, username, password, geo_username, )) t.daemon = True uploading[objectId] = t t.start() return jsonify({'result': 'uploading'}) except Exception as e: print(traceback.format_exc(), file=sys.stderr) return jsonify({'error': str(e)})
#parse stuff from parse_rest.connection import register, ParseBatcher from parse_rest.datatypes import Object from parse_rest.user import User import time #parse initialization register("XEPryFHrd5Tztu45du5Z3kpqxDsweaP1Q0lt8JOb", "PE8FNw0hDdlvcHYYgxEnbUyxPkP9TAsPqKvdB4L0") ClientsTest = Object.factory("Clients") pc = ClientsTest.Query.get(objectId="mCcWlsoRUh") ''' if pc.Telephone[0] == chr(10): print "SI" pc.Telephone = pc.Telephone.splitlines()[0] print len(pc.Telephone) for character in pc.Telephone: print pc.Telephone.index(character) print character.encode('hex') exit() '''
def save_to_parse(): new_click = Object(x=touch.x, y=touch.y) new_click.save()
#parse stuff from parse_rest.connection import register, ParseBatcher from parse_rest.datatypes import Object from parse_rest.user import User import time #parse initialization register("XEPryFHrd5Tztu45du5Z3kpqxDsweaP1Q0lt8JOb", "PE8FNw0hDdlvcHYYgxEnbUyxPkP9TAsPqKvdB4L0") Agencies = Object.factory("Agencies") n = 100 counter = 0 clients = [] while n == 100: print "Fetching: " + str(counter) res = Agencies.Query.all().skip(counter) cc = 0 batchclients = [] for i in res: clients.append(i) batchclients.append(i) accountID = "AV" + str(counter+cc).zfill(4)
def run_compile(objectId): Project = Object.factory('Project') project = Project.Query.get(objectId=objectId) compile(project)
def testFactory(self): self.assertEqual(Object.factory("_User"), User) self.assertEqual(Object.factory("GameScore"), GameScore)
def __init__(self, channel_name, update_strategy=OnChangeStrategy()): """Create a new publisher and publish messages on a channel.""" self.message_holder_class = Object.factory(CHANNEL_CLASS_PREFIX + channel_name) self.update_strategy = update_strategy
def testFactory(self): self.assertEqual(Object.factory('_User'), User) self.assertEqual(Object.factory('GameScore'), GameScore)
class OpenBookScanner: """This is the main class working on the scanner.""" ModelClass = Object.factory(PUBLIC_MODEL_CLASS_NAME) public_channel_name_outgoing = "OpenBookScannerOutgoing" public_channel_name_incoming = "OpenBookScannerIncoming" def __init__(self): """Create a new book scanner. status(StatusStateMachine) --message--> public_message_buffer(BufferingBroker) --message--- ---> public_message_broker(ParseBroker) --""" self.create_communication_channels() self.create_model() def create_communication_channels(self): """This creates the communication channels to the client.""" self.update_strategy = BatchStrategy() self.outgoing_messages = BufferingBroker() self.outgoing_messages_publisher = ParsePublisher( self.public_channel_name_outgoing, self.update_strategy) self.incoming_messages = ParseSubscriber( self.public_channel_name_incoming) self.internal_messages = LocalBroker() def create_model(self): """This creates the model which is observable by the client.""" self.model = self.ModelClass() self.model.save() # messaging self.outgoing_messages.subscribe(self.outgoing_messages_publisher) self.outgoing_messages.deliver_message( message.new_book_scanner_server(id=self.model.objectId)) # status self.status = self.public_state_machine("status", StatusStateMachine()) self.incoming_messages.subscribe(self.status) # scanner self.scanner_listener = self.public_state_machine( "listener", ScannerListener()) self.scanner_listener.register_hardware_observer(self) # usb sticks self.usb_stick_listener = self.public_state_machine( "usb_stick_listener", USBStickListener()) self.usb_stick_listener.register_hardware_observer(self) # conversion self.converter = Converter() self.internal_messages.subscribe(self.converter) self.converter.subscribe(self.internal_messages) # storage self.storage_location = UserDefinedStorageLocation() self.parse_storage_location = ParseUpdater(self.storage_location, self.update_strategy) self.storage_location.register_state_observer( self.parse_storage_location) self.incoming_messages.subscribe(self.storage_location) self.internal_messages.subscribe(self.storage_location) self.storage_location.subscribe(self.internal_messages) self.storage_location.run_in_parallel() self.relate_to("storage", self.parse_storage_location) def relate_to(self, relation, updater): """Relate to an updater over a defined relation.""" self.model.relation(relation).add([updater.get_parse_object()]) def new_hardware_detected(self, hardware): """Add new hardware to myself.""" print("model -> new hardware", hardware) if hardware.is_scanner(): self.new_scanner_detected(hardware) elif hardware.is_usb_stick(): self.new_usb_stick_detected(hardware) else: raise ValueError("Could not use the hardware {}.".format(hardware)) # self.model.save() # ERROR!! def new_scanner_detected(self, scanner): """A new scanner has been detected.""" self.public_state_machine("scanner", scanner) self.incoming_messages.subscribe(scanner) def new_usb_stick_detected(self, usb_stick): """A new USB stick has been detected""" self.public_state_machine("usb_stick", usb_stick) self.incoming_messages.subscribe(usb_stick) def public_state_machine(self, relation, state_machine): """Make the state machine public""" updater = ParseUpdater(state_machine, self.update_strategy) state_machine.register_state_observer(updater) state_machine.subscribe(self.internal_messages) state_machine.register_state_observer( StateChangeToMessageReceiveAdapter(self.internal_messages)) self.relate_to(relation, updater) return state_machine def run(self): """Run the update in a loop.""" while 1: self.update() time.sleep(0.5) def update(self): """Update the book scanner, send and receive messages.""" self.incoming_messages.flush() self.update_state_machines() self.outgoing_messages.flush() self.update_strategy.batch() def update_state_machines(self): """Send an update message to the state machines.""" self.scanner_listener.update() self.scanner_listener.update_hardware() self.usb_stick_listener.update() self.usb_stick_listener.update_hardware() def print_messages(self): """Attach a receiver to the message broker to print them.""" self.outgoing_messages.subscribe( MessagePrintingSubscriber(self.public_channel_name_outgoing)) self.internal_messages.subscribe(MessagePrintingSubscriber("Internal")) self.incoming_messages.subscribe( MessagePrintingSubscriber(self.public_channel_name_incoming))
# Install with pip install git+https://github.com/dgrtwo/ParsePy.git from parse_rest.connection import register, ParseBatcher # Alias the Object type to make clear is not a normal python Object from parse_rest.datatypes import Object as ParseObject printSubTitle("First register the app") register(APPLICATION_ID, REST_API_KEY) printSubTitle("Parse is a NOSQL database.") # https://en.wikipedia.org/wiki/NoSQL printExplain("So, you not need to pre-create the data schema, and can drop/add data & columns at will") anyObject = ParseObject() printExplain("Simple set a value to the object. No need to exist before..") anyObject.title = 'Hello world' anyObject.score = 100 def saveToParse(anyObject): print "Saving..." anyObject.save() print "Done!" saveToParse(anyObject)
""" import json FILENAME = "Courses.json" with open(FILENAME) as data_file: data = json.load(data_file) APPLICATION_ID = "H0NpxZR0eGLP4kgYFvHujDybG43HwI0ktVeWjN8u" REST_API_KEY = "ilhlGks7fj6jppBbW5OYz42jl3RN8Z4EUebQHGZK" from parse_rest.connection import register register(APPLICATION_ID, REST_API_KEY) from parse_rest.datatypes import Object myClassName = "Course" Course = Object.factory(myClassName) subjects = data.keys() for sub in subjects: if (sub == ""): print "FOUND EMPTY SUBJECT" continue numbers = data[sub] nums = numbers.keys() for num in nums: if (num == ""): print "FOUND EMPTY NUMBERSPACE" continue title = numbers[num] if (title == ""): print "FOUND EMPTY TITLE" continue
""" import json FILENAME = "Courses.json"; with open(FILENAME) as data_file: data = json.load(data_file) APPLICATION_ID = "H0NpxZR0eGLP4kgYFvHujDybG43HwI0ktVeWjN8u"; REST_API_KEY = "ilhlGks7fj6jppBbW5OYz42jl3RN8Z4EUebQHGZK"; from parse_rest.connection import register register(APPLICATION_ID, REST_API_KEY); from parse_rest.datatypes import Object myClassName = "Course"; Course = Object.factory(myClassName); subjects = data.keys(); for sub in subjects: if (sub == ""): print "FOUND EMPTY SUBJECT"; continue; numbers = data[sub] nums = numbers.keys(); for num in nums: if (num == ""): print "FOUND EMPTY NUMBERSPACE"; continue; title = numbers[num]; if (title == ""): print "FOUND EMPTY TITLE"; continue;
#reportlab for generate the pdf file from reportlab.pdfgen import canvas point = 1 inch = 72 #parse stuff from parse_rest.connection import register, ParseBatcher from parse_rest.datatypes import Object from parse_rest.user import User #parse initialization register("XEPryFHrd5Tztu45du5Z3kpqxDsweaP1Q0lt8JOb", "PE8FNw0hDdlvcHYYgxEnbUyxPkP9TAsPqKvdB4L0") Receipts = Object.factory("Receipts") Boxes = Object.factory("Boxes") #Inventarios = Object.factory("Inventarios") #Tareas = Object.factory("Tareas") #Notas = Object.factory("Notas") class TaskMenu(Popup): taskItem = ObjectProperty() def selfDelete(self): print self.taskItem.taskID #delete from the database
def get_channel_class(channel_name): return Object.factory(CHANNEL_CLASS_PREFIX + channel_name)
REST_API_KEY = "SmoaTb8g7ld84iheL13k568C7pT1ybPJjOJc0set" from parse_rest.connection import register # Alias the Object type to make clear is not a normal python Object from parse_rest.datatypes import Object #first register the app register(APPLICATION_ID, REST_API_KEY) #define a Python class that inherts from parse_rest.datatypes.Object class tastyThangz(Object): pass #creating Object subclass by string name objectName = "favoriteShit" myObject = Object.factory(objectName) #instantiate new class with some parameters brownSugar = tastyThangz(fuxGiven=1337, they_call_me='Brown Sugar', extraFresh=False) #change or set new parameters afterwards brownSugar.extraFresh = True brownSugar.amountOfSwag = 11 #creating Object subclass by string name objectName2 = "favoriteShit" myObject2 = Object.factory(objectName2) #instantiate new class with some parameters bigDaddy = tastyThangz(fuxGiven=0, they_call_me='Big Daddy', extraFresh=True)
def __init__(self, **kwargs): logging.info("sipCall() New sipCall object created()") print 'sipCall() New sipCall object created()' self.sipCallID = kwargs assert "sipCallID" in kwargs Object.__init__(self, **kwargs)
from parse_rest.connection import register from parse_rest.datatypes import Object class GameScore(Object): pass register("XEPryFHrd5Tztu45du5Z3kpqxDsweaP1Q0lt8JOb", "PE8FNw0hDdlvcHYYgxEnbUyxPkP9TAsPqKvdB4L0") myClassName = "GameScore" myClass = Object.factory(myClassName) print myClass gameScore = GameScore(score=1337, player_name='John Doe', cheat_mode=False) gameScore.cheat_mode = True gameScore.level = 2 gameScore.save()
import os, sys APPLICATION_ID = "15DQ4FEwTwxxXOTwjDKwDsyyZGLfTTZS9WTQlWaN" REST_API_KEY = "RIVWH59VFaPUqER09SjeJbxYA8NwDW1yOisuvBuv" from parse_rest.connection import register, ParseBatcher from parse_rest.datatypes import Object as ParseObject register(APPLICATION_ID, REST_API_KEY) anyObject = ParseObject() name = raw_input("Enter your name: ") zipcode = raw_input("Enter your zip code: ") userObject = ParseObject() userObject.name = name userObject.zipcode = int(zipcode) hates = [] while True: hate = raw_input("Tell me something you hate: ") if(hate == "nothing"): break else: hates.append(hate) userObject.hates = hates def saveToParse(anyObject): print "Saving..." anyObject.save() print "Done!"
def setup_event_users(m, f, mg, fg, ep): """ Create zE0001_User objects by "batch_save"-ing them to Parse using ParsePy's ParseBatcher(). Event User objects are _User objects whose array_eventsRegistered contains the eventNum of this current event. """ # Start a function timer. function_start_time = time.time() # Get the correct class name from the ep = Event Prefix (passed in). eventUser_ClassName = ep + "_User" eventUser_Class = Object.factory(eventUser_ClassName) # add some Users qset_all_users = User.Query.all().order_by("userNum") li_meu = list(qset_all_users.filter(sex = "M").limit(m)) li_feu = list(qset_all_users.filter(sex = "F").limit(f)) li_mgeu = list(qset_all_users.filter(sex = "MG").limit(mg)) li_fgeu = list(qset_all_users.filter(sex = "FG").limit(fg)) li_users_at_event = li_meu + li_feu + li_mgeu + li_fgeu count_eu = len(li_users_at_event) li_eu_obj_to_upload = [] for n, eu_obj in enumerate(li_users_at_event): new_EU_object = eventUser_Class( user_objectId = eu_obj.objectId, event_userNum = n + 1, username = eu_obj.username, sex = eu_obj.sex ) li_eu_obj_to_upload.append(new_EU_object) # # now add some ghosts # g, mg, fg, s = determine_ghosts_and_stations(meu_count, feu_count) # qset_all_ghosts = User.Query.filter(userNum__gte = 1000000).order_by("userNum") # list_male_ghosts = list(qset_all_ghosts.filter(userNum__lte = 1000007))[:mg] # list_female_ghosts = list(qset_all_ghosts.filter(userNum__gte = 1000006))[:fg] # list_ghosts_at_event = list_male_ghosts + list_female_ghosts # print (len(list_ghosts_at_event)) # print (len(list_male_ghosts)) # print (len(list_female_ghosts)) # print (list_ghosts_at_event) # print (list_male_ghosts) # print (list_female_ghosts) # print (g) # for gu_num in range(g): # new_Event_User_object = zE0001_User( # user_objectId = list_ghosts_at_event[gu_num].objectId, # event_userNum = gu_num + 100 + 1, # username = list_ghosts_at_event[gu_num].username, # sex = list_ghosts_at_event[gu_num].sex # ) # list_of_eu_objects_to_upload.append(new_Event_User_object) # Call batcher.batch_save on slices of the list no larger than 50. batcher = ParseBatcher() for k in range(count_eu/50 + 1): lo = 50*k hi = min(50*(k + 1), count_eu) batcher.batch_save(li_eu_obj_to_upload[lo:hi]) print ("\n{} zE0001_User objects uploaded to Parse in {} seconds.\n" .format(count_eu, round(time.time() - function_start_time, 2))) return li_eu_obj_to_upload