def update_waiver_order(): try: priorities = [int(prio) for prio in request.form.get('priorities', '').split(',')] except ValueError: priorities = [] wgw = waiver_gameweek() if datetime.now() >= wgw['waiver']: flash("The deadline for waivers this week has passed. You can no longer edit your claims.") else: current_claims = sorted(db.get('claims', dict(user=current_user.get_id(), week=wgw['week'])), key=lambda claim: claim['priority']) deleted_claims = [] for (n, claim) in enumerate(current_claims): try: claim['priority'] = priorities.index(n + 1) except ValueError: deleted_claims.append(claim) db.save_all([claim for claim in current_claims if claim not in deleted_claims]) for claim in deleted_claims: db.delete(claim) return redirect(url_for('waiver_claims'))
def brew(): j = request.get_json(True) if 'brewing' not in j: abort(400) pin.set_state(j['brewing']) t = None if app.BREW_TIMER is not None: app.BREW_TIMER.cancel() if j['brewing']: app.BREW_TIMER = Timer(TIME, stop_brew) app.BREW_TIMER.start() t = datetime.datetime.now().strftime(_TIME_FMT) db.save('brew_time', { 'time': t }) t = 360.0 else: db.delete('brew_time') return jsonify({ 'brewing': j['brewing'], 'time': t })
def update_waiver_order(): try: priorities = [ int(prio) for prio in request.form.get('priorities', '').split(',') ] except ValueError: priorities = [] wgw = waiver_gameweek() if datetime.now() >= wgw['waiver']: flash( "The deadline for waivers this week has passed. You can no longer edit your claims." ) else: current_claims = sorted(db.get( 'claims', dict(user=current_user.get_id(), week=wgw['week'])), key=lambda claim: claim['priority']) deleted_claims = [] for (n, claim) in enumerate(current_claims): try: claim['priority'] = priorities.index(n + 1) except ValueError: deleted_claims.append(claim) db.save_all( [claim for claim in current_claims if claim not in deleted_claims]) for claim in deleted_claims: db.delete(claim) return redirect(url_for('waiver_claims'))
def delete(self): db.delete(table='posts', params={ 'id': self.id, 'board': self.board.name }) return 1
def deleteItem(self, id, user): # only creator of the item can delete items if not (self.isCreator(id, user)): return False item = session.query(Item).filter_by(id=id).first() session.delete(item) session.commit() return True
def delete_department(department_id): department = db.query(models.Department).get(department_id) if department: db.delete(department) db.commit() return Response(status_code=status.HTTP_204_NO_CONTENT) else: raise HTTPException(status_code=404, detail=f"Could not find Department with ID {department_id}")
def delete_user(user_id): user = db.query(models.User).get(user_id) if user: db.delete(user) db.commit() return Response(status_code=status.HTTP_204_NO_CONTENT) else: raise HTTPException(status_code=404, detail=f"Could not find User with ID {user_id}")
def drop_index(task_id): ''' Make this a celery task so we can schedule it -- done? ''' print("Remove key/index for %s from redis" % task_id) client = redisearch.Client(task_id) client.drop_index() db.delete(task_id) db.delete("%s:cols" % task_id)
def run_inceptionV3(): """ Run the pre-trained base Inception V3 model and send image to queue Listening user submitted images and stack them in a Redis queue """ data = {"success": False} # load model name model_name = request.form.get('model_name') # load and pre-processing image img = request.files['image'] img = image.load_img(img, target_size = (299, 299)) x = np.expand_dims(image.img_to_array(img), axis=0) x = preprocess_input(x) x = x.copy(order="C") # encode x = API_helpers.base64_encode_image(x) # create a image id this_id = str(uuid.uuid4()) d = {"id": this_id, "image": x, "model_name": model_name} # push to the redis queue db.rpush(INCEPTIONV3_IMAGE_QUEUE, json.dumps(d)) while True: # check if the response has been returned output = db.get(this_id) if output is not None: output = output.decode('utf-8') data["prediction"] = json.loads(output) db.delete(this_id) break else: # print "* Waiting for the Inference Server..." time.sleep(CLIENT_SLEEP) data['success'] = True return jsonify({ "data": data }), 200
def delete_account(): if session.get('username', None) is not None: db.delete(session.get('username')) email.send_email_del(session.get("email"), session.get("username")) session.pop('username', None) session.pop('battles', None) session.pop('email', None) session.pop('wins', None) session.pop('token', None) return redirect(url_for('index'))
def drop_club(usr_id, club_id): delete(DB.CLUBS, 'DELETE FROM club_user_xref WHERE usr_id=%s AND club_id=%s', [usr_id, club_id]) enrollment_count = query_one(DB.CLUBS, "SELECT enrollment_count " "FROM club " "WHERE club_id = %s", club_id) query = "UPDATE club SET enrollment_count = %s WHERE club_id = %s" params = [enrollment_count[0] - 1, club_id] update(DB.CLUBS, query, params)
def delete_club(club_id): students = get_club_students(club_id) # remove all students from a club first for student in students: remove_student(student.usr_id, club_id) # remove the club from the club & xref tables delete(DB.CLUBS, 'DELETE FROM club WHERE club_id=%s', [club_id]) delete(DB.CLUBS, 'DELETE FROM club_user_xref WHERE club_id=%s', [club_id]) return False
def remove_student(student_id, club_id): delete(DB.CLUBS, 'DELETE FROM club_user_xref WHERE usr_id=%s AND club_id=%s', [student_id, club_id]) enrollment_count = query_one(DB.CLUBS, "SELECT enrollment_count " "FROM club " "WHERE club_id = %s", club_id) query = "UPDATE club SET enrollment_count = %s WHERE club_id = %s" params = [enrollment_count[0] - 1, club_id] update(DB.CLUBS, query, params) return False
def delete(self): u = authorize(request) id = get_request_arg('id', int, required=True) if not db.exists('POST').where(id=id): abort(404,'Post Not Found') p = db.select('POST').where(id=id).execute() if p[1] != u[1]: abort(403,'You Are Unauthorized To Make That Request') comment_list = text_list_to_set(p[7]) [db.delete('COMMENT').where(id=c_id).execute() for c_id in comment_list] db.delete('POST').where(id=id).execute() return { 'message': 'success' }
def delete(self): """ Delete file action :return: Profile page of current user """ if any(self.file.type == type_check for type_check in self.type_list): self.id = self.id + self.file_path.get_ext(self.file.filename) db = Database() if self.file.repo == "public": if self.file_path.isfile(filename=self.id, type=self.file.type): flash('{} is deleted.'.format(self.file.filename)) self.file_path.delfile(type=self.file.type, filename=self.id) if self.file.type == "misc": db.delete(self.file) else: db.delete(self.file.open_file.first(), self.file) elif self.file.repo == "private": if self.file_path.isfile(filename=self.id, repo="private", type=self.file.type): flash('{} is deleted.'.format(self.file.filename)) self.file_path.delfile(type=self.file.type, filename=self.id, repo="private") if self.file.type == "misc": db.delete(self.file) else: db.delete(self.file.open_file.first(), self.file) return url_for('user', username=current_user.username)
def show_raw_drop(urlstring): """Return the drop with the given urlstring (if it exists); this is different from show_drop because it doesn't put the normal datadrop web things around it (headings, navigation, etc).""" drop = db.Drop.filter_by(urlstring=urlstring).one_or_none() if drop != None: drop.views += 1 if drop.self_destructs and drop.views >= drop.self_destructs_in: db.delete(drop) db.commit() else: db.save(drop) # explicitly create the response object here, because the mimetype needs to be text/plain resp = Response(drop.data, mimetype='text/plain') return resp else: return render_template('drop_not_found.html'), 404
def show_drop(urlstring): """Return a page containing the given drop (if the given urlstring exists).""" # an edge case to handle URLs that have a period on the end (some people don't clean up urls in sentences, imagine that) if urlstring.endswith('.'): urlstring = urlstring[:-1] drop = db.Drop.filter_by(urlstring=urlstring).one_or_none() if drop != None: # a drop with the provided urlstring exists drop.views+=1 if drop.self_destructs and drop.views >= drop.self_destructs_in: db.delete(drop) db.commit() else: db.save(drop) return render_template('drop.html', drop=drop) else: # no drop return render_template('drop_not_found.html'), 404
def delete(self): u = get_dummy_user() id = request.args.get('id',None) if not id: abort(400,'Malformed Request') id = int(id) if not db.exists('POST').where(id=id): abort(400,'Malformed Request') p = db.select('POST').where(id=id).execute() if p[1] != u[1]: abort(403,'You Are Unauthorized To Make That Request') comment_list = text_list_to_set(p[7]) [db.delete('COMMENT').where(id=c_id).execute() for c_id in comment_list] db.delete('POST').where(id=id).execute() return { 'message': 'success' }
def delete(self): u = get_dummy_user() id = request.args.get('id', None) if not id: abort(400, 'Malformed Request') id = int(id) if not db.exists('POST').where(id=id): abort(400, 'Malformed Request') p = db.select('POST').where(id=id).execute() if p[1] != u[1]: abort(403, 'You Are Unauthorized To Make That Request') comment_list = text_list_to_set(p[7]) [ db.delete('COMMENT').where(id=c_id).execute() for c_id in comment_list ] db.delete('POST').where(id=id).execute() return {'message': 'success'}
def delete(self): sectionId = request.args.get('sectionId') topicId = db.getOne( """ select topic_id from Section where section_id=%s; """, (sectionId,) )['section_id'] courseId = db.getOne( """ select course_id from Topic where topic_id=%s; """, (topicId,) ) db.delete( """ delete from Section where section_id=%s; """, (sectionId,) ) db.delete( """ delete from Entity where section_id=%s; """, (sectionId,) ) db.modify( """ update Topic set section_count=section_count-1 where topic_id=%s; """, (topicId,) ) db.modify( """ update Course set section_count=section_count-1 where course_id=%s; """, (courseId,) ) return { "state": "success" }
def branch_delete(id): b = branches.query.get(id) if not b: flash('没有该支行的信息,无法删除。') else: s = staffs.query.filter(staffs.branchId == id).all() uda = user_depo_accounts.query.filter( user_depo_accounts.branchId == id).all() uca = user_check_accounts.query.filter( user_check_accounts.branchId == id).all() if s or uda or uca: flash('存在该支行的关联信息,无法删除。') return redirect(url_for('branch_page')) br = branch_records.query.filter(branch_records.branchId == id).first() if br: db.delete(br) db.session.delete(b) db.session.commit() return redirect(url_for('branch_page'))
def delete(self): topicId = request.args.get("topicId") courseId = request.args.get("courseId") res = db.delete( """ delete from Topic where topic_id=%s; """, (topicId,) ) if res: sectionIds = db.getAll( """ select section_id from Section where topic_id=%s; """, (topicId,) ) cnt = len(sectionIds) db.delete( """ delete from Section where topic_id=%s; """, (topicId,) ) for i in range(0, cnt): db.delete( """ delete from Entity where section_id=%s; """, (sectionIds[i]['section_id'],) ) db.modify( """ update Course set topic_count=topic_count-1, section_count=section_count-%s; """ ) return { "state": "success" }
def pred_sentiment(): """ Run the pre-trained base Sentiment analysis model and send sentence to queue Listening user submitted sentences and stack them in a Redis queue """ logging.info("Inside pred_Sentence") data = {"success": False} model_name = 'base' message = request.form.get('textv') logging.info("Received message:%s", message) #sentence = Sentence(message) # create a image id this_id = str(uuid.uuid4()) d = {"id": this_id, "text": message, "model_name": model_name} # push to the redis queue db.rpush(SENTIMENT_TEXT_QUEUE, json.dumps(d)) while True: # check if the response has been returned output = db.get(this_id) if output is not None: output = output.decode('utf-8') data["prediction"] = json.loads(output) db.delete(this_id) break else: #print "* Waiting for the Sentiment Inference Server..." time.sleep(CLIENT_SLEEP) data['success'] = True return jsonify({"data": data}), 200
def submit_pred_upload(): status, message = prepare_predfile(request) if status == "error": return jsonify({'Message': message}), 500 df = pd.DataFrame(message) # if success then message is the dataframe rand_id = str(uuid.uuid4()) cols = list(df.columns.values) if "z_score" in cols: filteropt = 1 else: filteropt = 2 filterval = "-" genes_str = ",".join(list(df.TF_gene)) genes_selected = list(set(genes_str.split(","))) datavalues = df.to_dict('records') celerytask.savetoredis(rand_id, cols, datavalues, app.config['UPLOAD_PRED_EXPIRY']) session_info = { "parent_id": "uploadpred", "task_id": rand_id, "filename": "-", "genes_selected": genes_selected, "filteropt": filteropt, "filterval": filterval, "chrver": "-", "spec_escore_thres": "-", "nonspec_escore_thres": "-" } if db.exists(rand_id): db.delete(rand_id) db.hmset(rand_id, session_info) db.expire(rand_id, app.config['UPLOAD_PRED_EXPIRY']) resp = make_response(jsonify( {}), 202, {'Location': url_for('process_request', job_id=rand_id)}) return resp
def brew(): j = request.get_json(True) if 'brewing' not in j: abort(400) pin.set_state(j['brewing']) t = None if app.BREW_TIMER is not None: app.BREW_TIMER.cancel() if j['brewing']: app.BREW_TIMER = Timer(TIME, stop_brew) app.BREW_TIMER.start() t = datetime.datetime.now().strftime(_TIME_FMT) db.save('brew_time', {'time': t}) t = 360.0 else: db.delete('brew_time') return jsonify({'brewing': j['brewing'], 'time': t})
def delete_reminder(row_id) -> object: """ Returns reminder which was deleted by user. """ try: id, title, category, date, is_done, frequency = db.delete( 'reminder', row_id) except exceptions.NotConsistInDB as e: return str(e) return _recognize_category(id=id, title=title, date=date, category=category, frequency=frequency, is_done=is_done)
def _set_tags(self, tags): self._tags = tags if self.id: # ensure existing tag references are removed d = db.delete(post_tags, post_tags.c.post_id == self.id) db.engine.execute(d) for tag in set(self.taglist): slug = slugify(tag) tag_obj = Tag.query.filter(Tag.slug == slug).first() if tag_obj is None: tag_obj = Tag(name=tag, slug=slug) db.session.add(tag_obj) if self not in tag_obj.posts: tag_obj.posts.append(self)
def _set_tags(self, tags): self._tags = tags if self.id: # ensure existing tag references are removed d = db.delete(post_tags, post_tags.c.post_id==self.id) db.engine.execute(d) for tag in set(self.taglist): slug = slugify(tag) tag_obj = Tag.query.filter(Tag.slug==slug).first() if tag_obj is None: tag_obj = Tag(name=tag, slug=slug) db.session.add(tag_obj) if self not in tag_obj.posts: tag_obj.posts.append(self)
def delete_section(teacher_id, section_id): print("Im boutta delete something") can_delete = query_one( DB.ELECTIVE, "SELECT * FROM elective_section WHERE section_id=%s AND teacher_id=%s", [section_id, teacher_id]) != None print(can_delete) print(teacher_id) print(section_id) if can_delete: delete(DB.ELECTIVE, "DELETE FROM elective_user_xref WHERE section_id=%s", [section_id]) delete(DB.ELECTIVE, "DELETE FROM elective_section_time_xref WHERE section_id=%s", [section_id]) delete(DB.ELECTIVE, "DELETE FROM elective_section WHERE section_id=%s", [section_id]) return True return False
def delete(self, likeRecipeModel): if not isinstance(likeRecipeModel, LikeRecipeModel): raise ValueError("likeRecipeModel should be of type LikeRecipeModel") query = 'DELETE FROM LikeRecipe WHERE LikeRecipe.id_Recipe = %(id_Recipe)s AND LikeRecipe.id_User = %(id_User)s' db.delete(query, { 'id_Recipe': likeRecipeModel.id_Recipe, 'id_User': likeRecipeModel.id_User })
def main(org_name=None, org_sources=None): ''' Run update over all organizations. Optionally, update just one. ''' # set org_sources org_sources = org_sources or ORG_SOURCES_FILENAME # Collect a set of fresh organization names. organization_names = set() # Retrieve all organizations and shuffle the list in place. orgs_info = get_organizations(org_sources) shuffle(orgs_info) if org_name: orgs_info = [org for org in orgs_info if org['name'] == org_name] # Iterate over organizations and projects, saving them to db.session. for org_info in orgs_info: if not is_safe_name(org_info['name']): error_dict = { "error": unicode('ValueError: Bad organization name: "%s"' % org_info['name']), "time": datetime.now() } new_error = Error(**error_dict) db.session.add(new_error) # commit the error db.session.commit() continue try: filter = Organization.name == org_info['name'] existing_org = db.session.query(Organization).filter(filter).first() organization_names.add(org_info['name']) # Mark everything associated with this organization for deletion at first. # :::here (event/false, story/false, project/false, organization/false) db.session.execute(db.update(Event, values={'keep': False}).where(Event.organization_name == org_info['name'])) db.session.execute(db.update(Story, values={'keep': False}).where(Story.organization_name == org_info['name'])) db.session.execute(db.update(Project, values={'keep': False}).where(Project.organization_name == org_info['name'])) db.session.execute(db.update(Organization, values={'keep': False}).where(Organization.name == org_info['name'])) # commit the false keeps db.session.commit() # Empty lat longs are okay. if 'latitude' in org_info: if not org_info['latitude']: org_info['latitude'] = None if 'longitude' in org_info: if not org_info['longitude']: org_info['longitude'] = None organization = save_organization_info(db.session, org_info) organization_names.add(organization.name) # flush the organization db.session.flush() if organization.rss or organization.website: logging.info("Gathering all of %s's stories." % organization.name) stories = get_stories(organization) if stories: for story_info in stories: save_story_info(db.session, story_info) # flush the stories db.session.flush() if organization.projects_list_url: logging.info("Gathering all of %s's projects." % organization.name) projects = get_projects(organization) for proj_dict in projects: save_project_info(db.session, proj_dict) # flush the projects db.session.flush() if organization.events_url: if not meetup_key: logging.error("No Meetup.com key set.") if 'meetup.com' not in organization.events_url: logging.error("Only Meetup.com events work right now.") else: logging.info("Gathering all of %s's events." % organization.name) identifier = get_event_group_identifier(organization.events_url) if identifier: for event in get_meetup_events(organization, identifier): save_event_info(db.session, event) # flush the events db.session.flush() else: logging.error("%s does not have a valid events url" % organization.name) # Get issues for all of the projects logging.info("Gathering all of %s's open GitHub issues." % organization.name) issues = get_issues(organization.name) for issue in issues: save_issue(db.session, issue) # flush the issues db.session.flush() for issue in issues: save_labels(db.session, issue) # commit everything db.session.commit() # Remove everything marked for deletion. # :::here (event/delete, story/delete, project/delete, issue/delete, organization/delete) db.session.query(Event).filter(Event.keep == False).delete() db.session.query(Story).filter(Story.keep == False).delete() db.session.query(Issue).filter(Issue.keep == False).delete() db.session.query(Project).filter(Project.keep == False).delete() db.session.query(Organization).filter(Organization.keep == False).delete() # commit objects deleted for keep=False db.session.commit() except: # Raise the error, get out of main(), and don't commit the transaction. raise else: # Commit and move on to the next organization. # final commit before moving on to the next organization db.session.commit() # prune orphaned organizations if no organization name was passed if not org_name: for bad_org in db.session.query(Organization): if bad_org.name in organization_names: continue # delete orphaned organizations, all other deletions will cascade db.session.execute(db.delete(Organization).where(Organization.name == bad_org.name)) # commit for deleting orphaned organizations db.session.commit()
def main(org_name=None): ''' Run update over all organizations. Optionally, update just one. ''' # Keep a set of fresh organization names. organization_names = set() # Retrieve all organizations and shuffle the list in place. orgs_info = get_organizations() shuffle(orgs_info) if org_name: orgs_info = [org for org in orgs_info if org['name'] == org_name] # Iterate over organizations and projects, saving them to db.session. for org_info in orgs_info: try: # Mark everything in this organization for deletion at first. db.session.execute(db.update(Event, values={'keep': False}).where(Event.organization_name == org_info['name'])) db.session.execute(db.update(Story, values={'keep': False}).where(Story.organization_name == org_info['name'])) db.session.execute(db.update(Project, values={'keep': False}).where(Project.organization_name == org_info['name'])) db.session.execute(db.update(Organization, values={'keep': False}).where(Organization.name == org_info['name'])) organization = save_organization_info(db.session, org_info) organization_names.add(organization.name) if organization.rss or organization.website: logging.info("Gathering all of %s's stories." % unidecode(organization.name)) stories = get_stories(organization) if stories: for story_info in stories: save_story_info(db.session, story_info) if organization.projects_list_url: logging.info("Gathering all of %s's projects." % unidecode(organization.name)) projects = get_projects(organization) for proj_info in projects: save_project_info(db.session, proj_info) if organization.events_url: logging.info("Gathering all of %s's events." % unidecode(organization.name)) identifier = get_event_group_identifier(organization.events_url) if identifier: for event in get_meetup_events(organization, identifier): save_event_info(db.session, event) else: logging.error("%s does not have a valid events url" % unidecode(organization.name)) # Remove everything marked for deletion. db.session.execute(db.delete(Event).where(Event.keep == False)) db.session.execute(db.delete(Story).where(Story.keep == False)) db.session.execute(db.delete(Project).where(Project.keep == False)) db.session.execute(db.delete(Organization).where(Organization.keep == False)) except: # Raise the error, get out of main(), and don't commit the transaction. raise else: # Commit and move on to the next organization. db.session.commit() # Stop right here if an org name was specified. if org_name: return # Delete any organization not found on this round. for bad_org in db.session.query(Organization): if bad_org.name in organization_names: continue db.session.execute(db.delete(Event).where(Event.organization_name == bad_org.name)) db.session.execute(db.delete(Story).where(Story.organization_name == bad_org.name)) db.session.execute(db.delete(Project).where(Project.organization_name == bad_org.name)) db.session.execute(db.delete(Organization).where(Organization.name == bad_org.name)) db.session.commit()
def main(org_name=None, minimum_age=3*3600): ''' Run update over all organizations. Optionally, update just one. Also optionally, reset minimum age to trigger org update, in seconds. ''' # Set a single cutoff timestamp for orgs we'll look at. maximum_updated = time() - minimum_age # Keep a set of fresh organization names. organization_names = set() # Retrieve all organizations and shuffle the list in place. orgs_info = get_organizations() shuffle(orgs_info) if org_name: orgs_info = [org for org in orgs_info if org['name'] == org_name] # Iterate over organizations and projects, saving them to db.session. for org_info in orgs_info: if not is_safe_name(org_info['name']): error_dict = { "error" : 'ValueError: Bad organization name: "%s"' % org_info['name'], "time" : datetime.now() } new_error = Error(**error_dict) db.session.add(new_error) db.session.commit() continue try: filter = Organization.name == org_info['name'] existing_org = db.session.query(Organization).filter(filter).first() organization_names.add(org_info['name']) if existing_org and not org_name: if existing_org.last_updated > maximum_updated: # Skip this organization, it's been updated too recently. logging.info("Skipping update for {0}".format(org_info['name'].encode('utf8'))) continue # Mark everything in this organization for deletion at first. db.session.execute(db.update(Event, values={'keep': False}).where(Event.organization_name == org_info['name'])) db.session.execute(db.update(Story, values={'keep': False}).where(Story.organization_name == org_info['name'])) db.session.execute(db.update(Project, values={'keep': False}).where(Project.organization_name == org_info['name'])) db.session.execute(db.update(Organization, values={'keep': False}).where(Organization.name == org_info['name'])) organization = save_organization_info(db.session, org_info) organization_names.add(organization.name) if organization.rss or organization.website: logging.info("Gathering all of %s's stories." % organization.name) stories = get_stories(organization) if stories: for story_info in stories: save_story_info(db.session, story_info) if organization.projects_list_url: logging.info("Gathering all of %s's projects." % organization.name) projects = get_projects(organization) for proj_info in projects: save_project_info(db.session, proj_info) if organization.events_url: if not meetup_key: logging.error("No Meetup.com key set.") else: logging.info("Gathering all of %s's events." % organization.name) identifier = get_event_group_identifier(organization.events_url) if identifier: for event in get_meetup_events(organization, identifier): save_event_info(db.session, event) else: logging.error("%s does not have a valid events url" % organization.name) # Get issues for all of the projects logging.info("Gathering all of %s's project's issues." % organization.name) issues = get_issues(organization.name) for issue_info in issues: save_issue_info(db.session, issue_info) # Remove everything marked for deletion. db.session.query(Event).filter(not Event.keep).delete() db.session.query(Story).filter(not Story.keep).delete() db.session.query(Project).filter(not Project.keep).delete() db.session.query(Issue).filter(not Issue.keep).delete() db.session.query(Organization).filter(not Organization.keep).delete() except: # Raise the error, get out of main(), and don't commit the transaction. raise else: # Commit and move on to the next organization. db.session.commit() # Stop right here if an org name was specified. if org_name: return # Delete any organization not found on this round. for bad_org in db.session.query(Organization): if bad_org.name in organization_names: continue db.session.execute(db.delete(Event).where(Event.organization_name == bad_org.name)) db.session.execute(db.delete(Story).where(Story.organization_name == bad_org.name)) db.session.execute(db.delete(Project).where(Project.organization_name == bad_org.name)) db.session.execute(db.delete(Organization).where(Organization.name == bad_org.name)) db.session.commit()
def label(): """ given a folder in S3 bucket label all images in it. """ s3_bucket_name = request.form.get('s3_bucket_name') s3_bucket_prefix = request.form.get('s3_bucket_prefix') model_name = request.form.get('model_name') # load image from s3 image_data_path = API_helpers.download_a_dir_from_s3(s3_bucket_name, s3_bucket_prefix, local_path = TEMP_FOLDER) # for each images in the folder # supports .png and .jpg all_image_ids = [] all_pred = [] for each_image in glob.glob(image_data_path + "/*.*"): iamge_name = each_image.split('/')[-1] this_img = image.load_img(each_image, target_size = (299, 299)) # image pre-processing x = np.expand_dims(image.img_to_array(this_img), axis=0) x = preprocess_input(x) x = x.copy(order="C") # encode x = API_helpers.base64_encode_image(x) # create a image id this_id = str(uuid.uuid4()) all_image_ids.append((this_id, iamge_name)) d = {"id": this_id, "image": x, "model_name": model_name} # push to the redis queue db.rpush(INCEPTIONV3_IMAGE_QUEUE, json.dumps(d)) all_pred = [] while all_image_ids: # pop the first one from the queue this_id, this_image_name = all_image_ids.pop(0) this_pred = {} while True: # check if the response has been returned output = db.get(this_id) if output is not None: this_pred["image name"] = this_image_name output = output.decode('utf-8') this_pred["prediction"] = json.loads(output) db.delete(this_id) break else: time.sleep(CLIENT_SLEEP) all_pred.append(this_pred) # remove the temp folder shutil.rmtree(image_data_path, ignore_errors=True) return jsonify({ "data": all_pred })
def delete_gallery(uuid): gallery = get(Gallery, uuid=uuid) for item in get_list(GalleryItem, gallery_uuid=uuid): delete(item) delete(gallery)
def chestXray_api(): data = {"success": False} if request.method == 'POST': # Verificando usuario autorizado ############################### email = request.authorization["username"] password = request.authorization["password"] try: response = auth.sign_in_with_email_and_password(email, password) if response['registered']: session["USERNAME"] = email except requests.exceptions.HTTPError: data["response"] = "Usuario no Autorizado" return jsonify(data) ################################################################## # Prediccion de imagen del api ################################### if request.files: # Inicializacion de variables ############################### image = request.files['image'] root = 'app' k = str(uuid.uuid4()) print(f'ID de la imagen:{k}') ############################################################ # Borrar imagenes antiguas ################################# print(f'\n1.) Borrando Imagenes Antiguas ...') files = glob.glob('app/static/uploads/chestXray/*.jpg') for f in files: os.remove(f) print(f'\nImagenes Borradas!') ############################################################ # Guardar IDs de fotos en Redis ############################# print(f'\n2.) Guardando Imagenes en Redis ...') path, filename = check_image(image, k) input_file = open_image(path).resize(256) img_original = open_image(path) img_original.save( os.path.join(root, app.config['IMAGE_TEMP'], 'chestXray', f'{k}.jpg')) input_file_db = input_file.data.numpy() input_file_db = input_file_db.copy(order="C") input_file_db = base64_encode_image(input_file_db) d = {"id": k, "image": input_file_db} db.rpush(settings.IMAGE_QUEUE, json.dumps(d)) print(f'\nImagenes Guardadas!') ############################################################ # Busqueda continua de predicciones ######################## print(f'\n3.) Buscando predicciones ...') while True: output = db.get(k) if output is not None: output = output.decode("utf-8") data["predictions"] = json.loads(output) db.delete(k) break time.sleep(settings.CLIENT_SLEEP) print(f'\nPredicciones encontradas!') ############################################################ # Mostrar predicciones ##################################### print(f'\n4.) Procesando las predicciones ...\n') data["success"] = True json_file = jsonify(data) outputs_list = [] for i in range(len(data['predictions'])): outputs_list.append(data['predictions'][i]['probability']) print(outputs_list) pat_preds = patologies_preds(classes, tresholds, outputs_list) if len(pat_preds) == 0: pat_preds = 'No se encontro nada.' else: pat_preds = ', '.join(pat_preds) + '.' print(f'\nPredicciones procesadas!\n') ############################################################ return jsonify(data)
def delete_asana_attribute(uuid): asana_attribute = get(AsanaAttribute, uuid=uuid) delete(asana_attribute)
def chestXray_pred(): upload = True data = {"success": False} if request.method == 'POST': if request.files: # Inicializacion de variables ############################### image = request.files['image'] root = 'app' k = str(uuid.uuid4()) print(f'ID de la imagen:{k}') ############################################################ # Borrar imagenes antiguas ################################# print(f'\n1.) Borrando Imagenes Antiguas ...') files = glob.glob('app/static/uploads/chestXray/*.jpg') for f in files: os.remove(f) print(f'\nImagenes Borradas!') ############################################################ # Guardar IDs de fotos en Redis ############################# print(f'\n2.) Guardando Imagenes en Redis ...') path, filename = check_image(image, k) input_file = open_image(path).resize(256) #Redis img_original = open_image(path) #model_server img_original.save( os.path.join(root, app.config['IMAGE_TEMP'], 'chestXray', f'{k}.jpg')) input_file_db = input_file.data.numpy() input_file_db = input_file_db.copy(order="C") input_file_db = base64_encode_image(input_file_db) d = {"id": k, "image": input_file_db} db.rpush(settings.IMAGE_QUEUE, json.dumps(d)) print(f'\nImagenes Guardadas!') ############################################################ # Busqueda continua de predicciones ######################## print(f'\n3.) Buscando predicciones ...') while True: output = db.get(k) #PREDICCIONES DEL MODEL SERVER if output is not None: output = output.decode("utf-8") data["predictions"] = json.loads(output) db.delete(k) break time.sleep(settings.CLIENT_SLEEP) print(f'\nPredicciones encontradas!') ############################################################ # Mostrar predicciones ##################################### print(f'\n4.) Procesando las predicciones ...\n') data["success"] = True json_file = jsonify(data) outputs_list = [] for i in range(len(data['predictions'])): outputs_list.append(data['predictions'][i]['probability']) print(outputs_list) print(tresholds) pred_list = [] for index, elem in enumerate(classes): pred_list.append(outputs_list[index] / (2 * tresholds[elem])) print(pred_list) def filterByKey(keys, pred_dict): return {x: pred_dict[x] for x in keys} pred_dict = dict(zip(classes, pred_list)) pat_dict = filterByKey(pat_only, pred_dict) other_dict = filterByKey(other_only, pred_dict) pred_tuple = sorted(pat_dict.items(), key=lambda x: x[1], reverse=True) print(pred_tuple) pat_preds, war_preds = patologies_preds(classes, tresholds, outputs_list) print(pat_preds) #if len(pat_preds) == 0: # pat_preds = 'Sin Hallazgos' #else: # pat_preds = ', '.join(pat_preds)+'.' if 'Sin Hallazgo' in war_preds: war_preds.remove('Sin Hallazgo') if len(war_preds) == 0: war_preds = 'Sin Hallazgos' else: war_preds = ', '.join(war_preds) + '.' print(f'\nPredicciones procesadas!\n') data_user = session["USERDATA"] return render_template('/dashboard/chestXray_dashboard.html', outputs=outputs_list, filename=filename, classes=classes, pat_preds=pat_preds, war_preds=war_preds, tresholds=tresholds, pred_tuple=pred_tuple, other_dict=other_dict, data_user=data_user) ############################################################ main = True if not session.get("USERNAME") is None: print("Username found in session") data_user = session["USERDATA"] return render_template('/dashboard/chestXray_dashboard.html', main=main, data_user=data_user) else: print("No username found in session") return render_template('/home/login_home.html', entrar=entrar)
def delete_asana(uuid): asana = get(Asana, uuid=uuid) delete(asana)
print 'Gathering all of ' + organization['name']+ "'s projects." projects = get_projects(organization['name'], organization['projects_list_url']) for project in projects: # Mark this project for safe-keeping project['keep'] = True # Select the current project, filtering on name AND brigade # filter = Project.name == project['name'], Project.brigade == project['brigade'] existing_project = db.session.query(Project).filter(Project.name == project['name'], Project.brigade == project['brigade']).first() # If this is a new project if not existing_project: project = Project(**project) db.session.add(project) continue # Update exisiting project details for (field, value) in project.items(): setattr(existing_project, field, value) # Save each project to db db.session.commit() # Remove everything marked for deletion. db.session.execute(db.delete(Project).where(Project.keep == False)) db.session.commit()
def delete_talk(uuid): talk = get(Talk, uuid=uuid) delete(talk)
def delete(uuid=None): gallery = Gallery.get(uuid=uuid) GalleryItem.delete_list(uuid) delete(gallery)
def GET(self, id): db.delete('client', int(id)) raise web.seeother('/clients')
def main(org_name=None, org_sources=None): ''' Run update over all organizations. Optionally, update just one. ''' # set org_sources org_sources = org_sources or ORG_SOURCES_FILENAME # Collect a set of fresh organization names. organization_names = set() # Retrieve all organizations and shuffle the list in place. orgs_info = get_organizations(org_sources) shuffle(orgs_info) if org_name: orgs_info = [org for org in orgs_info if org['name'] == org_name] # Iterate over organizations and projects, saving them to db.session. for org_info in orgs_info: if not is_safe_name(org_info['name']): error_dict = { "error": unicode('ValueError: Bad organization name: "%s"' % org_info['name']), "time": datetime.now() } new_error = Error(**error_dict) db.session.add(new_error) # commit the error db.session.commit() continue try: filter = Organization.name == org_info['name'] existing_org = db.session.query(Organization).filter( filter).first() organization_names.add(org_info['name']) # Mark everything associated with this organization for deletion at first. # :::here (event/false, story/false, project/false, organization/false) db.session.execute( db.update(Event, values={ 'keep': False }).where(Event.organization_name == org_info['name'])) db.session.execute( db.update(Story, values={ 'keep': False }).where(Story.organization_name == org_info['name'])) db.session.execute( db.update(Project, values={ 'keep': False }).where(Project.organization_name == org_info['name'])) db.session.execute( db.update(Organization, values={ 'keep': False }).where(Organization.name == org_info['name'])) # commit the false keeps db.session.commit() # Empty lat longs are okay. if 'latitude' in org_info: if not org_info['latitude']: org_info['latitude'] = None if 'longitude' in org_info: if not org_info['longitude']: org_info['longitude'] = None organization = save_organization_info(db.session, org_info) organization_names.add(organization.name) # flush the organization db.session.flush() if organization.rss or organization.website: logging.info("Gathering all of %s's stories." % organization.name) stories = get_stories(organization) if stories: for story_info in stories: save_story_info(db.session, story_info) # flush the stories db.session.flush() if organization.projects_list_url: logging.info("Gathering all of %s's projects." % organization.name) projects = get_projects(organization) for proj_dict in projects: save_project_info(db.session, proj_dict) # flush the projects db.session.flush() if organization.events_url: if not meetup_key: logging.error("No Meetup.com key set.") if 'meetup.com' not in organization.events_url: logging.error("Only Meetup.com events work right now.") else: logging.info("Gathering all of %s's events." % organization.name) identifier = get_event_group_identifier( organization.events_url) if identifier: for event in get_meetup_events(organization, identifier): save_event_info(db.session, event) # flush the events db.session.flush() else: logging.error("%s does not have a valid events url" % organization.name) # Get issues for all of the projects logging.info("Gathering all of %s's open GitHub issues." % organization.name) issues = get_issues(organization.name) for issue in issues: save_issue(db.session, issue) # flush the issues db.session.flush() for issue in issues: save_labels(db.session, issue) # commit everything db.session.commit() # Remove everything marked for deletion. # :::here (event/delete, story/delete, project/delete, issue/delete, organization/delete) db.session.query(Event).filter(Event.keep == False).delete() db.session.query(Story).filter(Story.keep == False).delete() db.session.query(Issue).filter(Issue.keep == False).delete() db.session.query(Project).filter(Project.keep == False).delete() db.session.query(Organization).filter( Organization.keep == False).delete() # commit objects deleted for keep=False db.session.commit() except: # Raise the error, get out of main(), and don't commit the transaction. raise else: # Commit and move on to the next organization. # final commit before moving on to the next organization db.session.commit() # prune orphaned organizations if no organization name was passed if not org_name: for bad_org in db.session.query(Organization): if bad_org.name in organization_names: continue # delete orphaned organizations, all other deletions will cascade db.session.execute( db.delete(Organization).where( Organization.name == bad_org.name)) # commit for deleting orphaned organizations db.session.commit()
def main(org_name=None, org_sources=None): ''' Run update over all organizations. Optionally, update just one. ''' # Keep a set of fresh organization names. organization_names = set() # Retrieve all organizations and shuffle the list in place. orgs_info = get_organizations(org_sources) shuffle(orgs_info) if org_name: orgs_info = [org for org in orgs_info if org['name'] == org_name] # Iterate over organizations and projects, saving them to db.session. for org_info in orgs_info: if not is_safe_name(org_info['name']): error_dict = { "error" : 'ValueError: Bad organization name: "%s"' % org_info['name'], "time" : datetime.now() } new_error = Error(**error_dict) db.session.add(new_error) db.session.commit() continue try: filter = Organization.name == org_info['name'] existing_org = db.session.query(Organization).filter(filter).first() organization_names.add(org_info['name']) # Mark everything in this organization for deletion at first. db.session.execute(db.update(Event, values={'keep': False}).where(Event.organization_name == org_info['name'])) db.session.execute(db.update(Story, values={'keep': False}).where(Story.organization_name == org_info['name'])) db.session.execute(db.update(Project, values={'keep': False}).where(Project.organization_name == org_info['name'])) db.session.execute(db.update(Organization, values={'keep': False}).where(Organization.name == org_info['name'])) # Empty lat longs are okay. if 'latitude' in org_info: if not org_info['latitude']: org_info['latitude'] = None if 'longitude' in org_info: if not org_info['longitude']: org_info['longitude'] = None organization = save_organization_info(db.session, org_info) organization_names.add(organization.name) if organization.rss or organization.website: logging.info("Gathering all of %s's stories." % organization.name) stories = get_stories(organization) if stories: for story_info in stories: save_story_info(db.session, story_info) if organization.projects_list_url: logging.info("Gathering all of %s's projects." % organization.name) projects = get_projects(organization) for proj_info in projects: save_project_info(db.session, proj_info) if organization.events_url: if not meetup_key: logging.error("No Meetup.com key set.") if 'meetup.com' not in organization.events_url: logging.error("Only Meetup.com events work right now.") else: logging.info("Gathering all of %s's events." % organization.name) identifier = get_event_group_identifier(organization.events_url) if identifier: for event in get_meetup_events(organization, identifier): save_event_info(db.session, event) else: logging.error("%s does not have a valid events url" % organization.name) # Get issues for all of the projects logging.info("Gathering all of %s's open GitHub issues." % organization.name) issues, labels = get_issues(organization.name) for i in range(0,len(issues)): save_issue_info(db.session, issues[i], labels[i]) # Remove everything marked for deletion. db.session.query(Event).filter(not Event.keep).delete() db.session.query(Story).filter(not Story.keep).delete() db.session.query(Project).filter(not Project.keep).delete() db.session.query(Issue).filter(Issue.keep == False).delete() db.session.query(Organization).filter(not Organization.keep).delete() except: # Raise the error, get out of main(), and don't commit the transaction. raise else: # Commit and move on to the next organization. db.session.commit() # Stop right here if an org name was specified. if org_name: return # Delete any organization not found on this round. for bad_org in db.session.query(Organization): if bad_org.name in organization_names: continue db.session.execute(db.delete(Event).where(Event.organization_name == bad_org.name)) db.session.execute(db.delete(Story).where(Story.organization_name == bad_org.name)) db.session.execute(db.delete(Project).where(Project.organization_name == bad_org.name)) db.session.execute(db.delete(Organization).where(Organization.name == bad_org.name)) db.session.commit()
def delete_candidate(name): """Create new candidate.""" if not db.keys(name): return jsonify({'error': 'candidate does not exists.'}), 404 db.delete(name) return "", 204
def GET(self, id): db.delete('item', int(id)) raise web.seeother('/invoices/' + id)
def delete_asana_image(uuid): asana_image = get(AsanaImage, uuid=uuid) delete(asana_image)
def delete(uuid): talk = get(GalleryItem, uuid=uuid) delete(talk)