def mark_oa(open_access): # load oa paper set pubmed_to_pmc = dict() with open(open_access) as f: f.readline() for line in f: fields = line.strip().split('\t') if len(fields) < 5: continue if not fields[3] or not fields[2]: continue pmid = int(fields[3][5:]) pmc = fields[2] pubmed_to_pmc[pmid] = pmc # download papers that are open-access papers = db_session.query(Paper).all() open_access_papers = list() for paper in papers: if paper.pubmed_id in pubmed_to_pmc: # open-access! open_access_papers.append(paper) paper.open_access = True db_session.commit() assocs = [ assoc for paper in open_access_papers for assoc in paper.associations ] print 'open access papers:', len(open_access_papers) print 'open associaitons:', len(assocs) print 'all papers:', db_session.query(Paper).count() print 'all associaitons:', db_session.query(Association).count()
def get_groups_users_map(friends): # creating a map from groups to users result = dict() print friends for friend_id in friends: friend = db_session.query(models.User).filter(models.User.fb_id == friend_id.left_id).first() print friend if friend: if friend.group_id in result.keys(): result[friend.group_id].append(utils.obj_to_json(None, friend, False)) else: result[friend.group_id] = [utils.obj_to_json(None, friend, False)] # now that we have a dictionary mapping from # groups -> users # we need the information about the groups group_array = [] for group_id in result.keys(): group = db_session.query(models.Group).filter(models.Group.id == group_id).first() group_array.append(group) print group_array return utils.list_to_json("groups", group_array)
def getCards(page=1, type=None): result = {'cards': None, 'allPage': 0, 'page': page} count = 0 if type is None: count = db_session.query(func.count(Card.id)).first()[0] else: count = db_session.query(func.count( Card.id)).filter(Card.type == type).first()[0] curr = (page - 1) * PER_PAGE allPage = 1 if count % PER_PAGE == 0: allPage = count / PER_PAGE else: allPage = int(count / PER_PAGE) + 1 if curr > count: curr = count page = allPage if curr < 0: curr = 1 page = 1 cards = None if type is None: cards = Card.query.order_by(Card.cost, Card.atk, Card.health)[curr:curr + PER_PAGE] else: cards = Card.query.filter(Card.type == type).\ join(CardType, CardType.typeId==Card.type).\ join(CardClass, CardClass.classId == Card.cclass).\ join(CardRarity, CardRarity.rarityId == Card.rarity).\ order_by(Card.cost, Card.atk, Card.health)[curr : curr + PER_PAGE] result['cards'] = cards result['allPage'] = allPage result['page'] = page return result
def notifyMove(): #must also store the actual mouse value and check always that it hasn't been voted completely offsetX = request.form.get("offsetX") offsetY = request.form.get("offsetY") x = request.form.get("x") y = request.form.get("y") pid = request.form.get("pid") vid = request.form.get("vid") poll = db_session.query(Poll).filter_by(pid=pid).first() voter = db_session.query(Voter).filter_by(id=vid).first() if (poll.started and not poll.completed): voter.x = voter.x + int(offsetX) voter.y = voter.y + int(offsetY) db_session.commit() # push notification #if (math.sqrt((poll.x - int(x))**2 + (poll.y - int(y))**2) > 50): #pusher.trigger(pid, "notify-move", {"x": poll.x, "y": poll.y}) # get rid of this in response? resp = jsonify(new_point={"x": voter.x, "y": voter.y}) resp.headers['Access-Control-Allow-Origin'] = '*' return resp, 200 else: return "Unable to vote at this time.", 400
def get_phenotype_candidates(self, source='efo', mod_fn=lambda x: x.lower(), peek=False): """Returns dictionary of phenotype candidates Outputs all phenotypes described in gwas_catalog, plus their EFO mappings """ if peek: associations = db_session.query(Association).all() phenotypes = [ p for a in associations for p in a.phenotype.equivalents ] else: phenotypes = db_session.query(Phenotype).filter( Phenotype.source == source).all() phenotype_names = set() for phenotype in phenotypes: if phenotype.name: phenotype_names.add(mod_fn(phenotype.name)) if phenotype.synonyms: synonyms = [ mod_fn(syn) for syn in phenotype.synonyms.split('|') ] phenotype_names.update(synonyms) return list(phenotype_names)
def download_oa(folder, wait=1, supplementary=False): open_papers = db_session.query(Paper).filter( Paper.open_access == True).all() n_open_papers = db_session.query(Paper).filter( Paper.open_access == True).count() for i, paper in enumerate(open_papers): print '%d/%d' % (i, n_open_papers) if not paper.pmc_id: paper.pmc_id = _get_pmc_id(paper.pubmed_id) if not paper.abstract: paper.abstract = _get_abstract(paper.pubmed_id) if not paper.files: # download xml body filename = str(paper.pubmed_id) + '.xml' if _get_oa_body(paper.pubmed_id, paper.pmc_id, folder): file = File(paper=paper, format='xml', filename=filename) db_session.add(file) # download pdf and supplementary files = _get_oa_pdf(paper.pubmed_id, paper.pmc_id, folder) for filename in files: if filename.endswith('.pdf'): format = 'pdf' elif filename.endswith('.tgz'): format = 'tgz' else: continue file = File(paper=paper, format='html', filename=filename) db_session.add(file) if supplementary: _get_supplementary(paper.pubmed_id, paper.pmc_id, folder) time.sleep(wait) db_session.commit()
def parse_transactions(company, jres): transaction_rows = jres['Rows']['Row'] for tr in transaction_rows: data = tr['ColData'] tx_date = data[0]['value'] txn_type = data[1]['value'] intuit_id = int(data[1]['id']) if data[1]['value'] else None doc_num = data[2]['value'] if data[2]['value'] else None is_no_post = True if data[3]['value'] == 'Yes' else False name = data[4]['value'] memo = data[5]['value'] credit_account_intuit_id = int(data[6]['id']) if data[6]['id'] else None credit_account = db_session.query(Account).filter(Account.intuit_id==credit_account_intuit_id).first() debit_account_intuit_id = int(data[7]['id']) if data[6]['id'] else None debit_account = db_session.query(Account).filter(Account.intuit_id==debit_account_intuit_id).first() account_name = data[7]['value'] amount = float(data[8]['value']) if data[8]['value'] else 0 transaction = db_session.query(Transaction).filter(Transaction.intuit_id==intuit_id).first() if not transaction: print('No transaction with intuit id', intuit_id) transaction = Transaction(intuit_id=intuit_id, created_at=datetime.datetime.now(), updated_at=datetime.datetime.now()) else: print('Already a transaction with intuit id', intuit_id, '. Still updating.') values = dict(company_id=company.id, tx_date=tx_date, txn_type=txn_type, doc_num=doc_num, is_no_post=is_no_post, name=name, memo=memo, credit_account=credit_account, debit_account=debit_account, amount=amount) print(values) for key, value in values.items(): setattr(transaction, key, value) print(transaction) db_session.add(transaction) db_session.commit()
def slug_exists(cls, uid, slug): from acct import Account acct_page_join = db_session.query(Account).\ join(Account.pages).\ filter(Account.uid==uid).\ filter(cls.slug==slug) slug_exists = db_session.query(acct_page_join.exists()).scalar() return slug_exists
def title_exists(cls, uid, title): from acct import Account acct_page_join = db_session.query(Account).\ join(Account.pages).\ filter(Account.uid==uid).\ filter(cls.title==title) title_exists = db_session.query(acct_page_join.exists()).scalar() return title_exists
def count_ip(): # TODO try to use db_session.query(***).scalar() just once distict_ip_num = db_session.query(func.count(distinct(Log.IP))).scalar() distict_code_num = db_session.query(func.count(distinct( Log.CODE))).scalar() distinct_method = db_session.query(func.count(distinct( Log.METHOD))).scalar() return distict_ip_num, distict_code_num, distinct_method
def update_report_local_name(rep_id, lang_id, local_name): db_session.query(report_strings).filter( report_strings.report_id == rep_id, report_strings.lang_id == lang_id).update({ 'local_name': local_name, 'updated': datetime.now() }) db_session.commit()
def update_template_default(rep_id, lang_id, local_template): db_session.query(report_strings).filter( report_strings.report_id == rep_id, report_strings.lang_id == lang_id).update({ 'default_template': local_template, 'updated': datetime.now() }) db_session.commit()
def login(token): if (token, ) not in db_session.query(User.token).all(): return apology("Your magic link is incorrect") user = db_session.query(User).filter_by(token=token).first() user.counter += 1 db_session.commit() session["user_id"] = token return redirect("/")
def save_ads_to_database(ads): for ad in ads: if is_existing_ad(ad['id']): db_session.query(Ad).filter(Ad.id == ad['id']).update(ad) else: db_session.add(Ad(**ad)) db_session.commit()
def pulseNotify(): # notify each active survey to update r = db_session.query(Poll).filter_by(started=True).filter_by( completed=False).all() for p in r: # voters voters = db_session.query(Voter).filter_by(poll_id=p.id).all() # get sum of offsets offsetX = 0 offsetY = 0 for v in voters: offsetX = offsetX + (v.x - p.x) offsetY = offsetY + (v.y - p.y) # reset poll to new location p.x = p.x + offsetX p.y = p.y + offsetY for v in voters: v.x = p.x v.y = p.y db_session.commit() pusher.trigger(str(p.pid), "notify-move", {"x": p.x, "y": p.y}) # calculate a win # complete if distance is greater than small radius dist = math.sqrt((p.y - 300)**2 + (p.x - 300)**2) if (dist > 240): p.completed = True # determine winner and add to list of winners options = p.options.split("|") angle = float(math.atan2(p.y - 300, p.x - 300)) if (angle < 0): angle = math.pi * 2 + angle a = float(math.pi * 2) / len(options) winner = "__UNKNOWN__" for index, o in enumerate(options): begin = a * index end = begin + a if (angle >= begin and angle < end): winner = o break p.winners = p.winners + winner + "|" db_session.commit() pusher.trigger(str(p.pid), "notify-status", { "status": "COMPLETED", "winners": p.winners.split("|") }) threading.Timer(0.3, pulseNotify).start()
def get_user_group(group): '''accepts string denoting MCME group returns list containing names of desired group or empty list''' if group == 'staff': user_list = ses.query(User).filter(User.staff == True).all() else: user_list = ses.query(User).filter(User.group == group).all() return [u.name for u in user_list]
def delete(self): try: self._delete_session() db_session.query(User).filter(User.id == self.id).delete() db_session.commit() return 1 except Exception as UserDeletingError: log(UserDeletingError) db_session.rollback() return 0
def slug_exists(cls, uid, slug): from acct import Account acct_page_join = db_session.query(Account).\ join(Account.pages).\ filter(Account.uid==uid).\ filter(cls.slug==slug) slug_exists = db_session.query( acct_page_join.exists() ).scalar() return slug_exists
def edit_task(task_id, group_id, new_body, new_due_date): query_result = db_session.query(models.Task).filter(models.Task.id==int(task_id)).first() if query_result: query_result.body = new_body if new_due_date: query_result.due_date = new_due_date db_session.commit() updated_query = db_session.query(models.Task).filter(models.Task.group_id==int(group_id)).order_by(models.Task.date).all() return utils.list_to_json('tasks', updated_query) return utils.error_json_message("invalid task id")
def title_exists(cls, uid, title): from acct import Account acct_page_join = db_session.query(Account).\ join(Account.pages).\ filter(Account.uid==uid).\ filter(cls.title==title) title_exists = db_session.query( acct_page_join.exists() ).scalar() return title_exists
def delete_task(task_id, group_id): print "taskid = " + task_id print "group_id = " + group_id query_result = db_session.query(models.Task).filter(models.Task.id==int(task_id)).first() if query_result: db_session.delete(query_result) db_session.commit() updated_query = db_session.query(models.Task).filter(models.Task.group_id==int(group_id)).order_by(models.Task.date).all() return utils.list_to_json('tasks', updated_query) return utils.error_json_message("invalid task id")
def put(self, process_id): dict_to_update = arg_parse() process_performer_id = dict_to_update.get('process_performer_id') user_query = db_session.query(ProcessPerformer).\ filter(ProcessPerformer.process_performer_id == process_performer_id).first() if user_query is not None or process_performer_id is None: db_session.query(Process).filter(Process.process_id == process_id).update(dict_to_update) db_session.commit() else: return abort(204, message="process performer {} doesn't exist".format(process_performer_id)) return dict_to_update, 201
def subscription_confirmation(email: str) -> bool: """TODO make it confirmed""" try: db_session.query(Subscriber).filter(Subscriber.email == email).update( Subscriber.is_subscribed is True) db_session.commit() return True except Exception as e: logging.error( f"Unable to create subscription for: {email} | Error {e}") return False
def get_track(id=None, provider_id=None): ret = None if id: ret = db_session.query(Track).filter_by(id=id).first() elif provider_id: #might already be in there ret = db_session.query(Track).filter_by(provider_id=provider_id).first() if ret == None: ret = MusicProvider.get_track(provider_id) db_session.add(ret) db_session.commit() return ret
def test_delete_collection_updates_db(self): """Verify that when a source is deleted, their Source identity record, as well as Reply & Submission records associated with that record are purged from the database.""" self._delete_collection_setup() journalist.delete_collection(self.source.filesystem_id) results = Source.query.filter(Source.id == self.source.id).all() self.assertEqual(results, []) results = db_session.query(Submission.source_id == self.source.id).all() self.assertEqual(results, []) results = db_session.query(Reply.source_id == self.source.id).all() self.assertEqual(results, [])
def del_entry(view,id): if view=='books': entry = db_session.query(Book).filter(Book.id==id).first() else: entry = db_session.query(Author).filter(Author.id==id).first() if entry: db_session.delete(entry) db_session.commit() flash('%s successfuly removed from database' % entry.name) else: flash('No entry with id = %d' % id) return redirect(url_for('admin_show_entries',pagin = app.config['PAGIN'], page=1, view=view))
def show_entries(view,pagin,page): if view =='books': entries_all = db_session.query(Book).all() else : entries_all = db_session.query(Author).all() paginator = Paginator(entries_all,pagin) try: entries = paginator.page(page) except PageNotAnInteger: entries = paginator.page(1) except EmptyPage: entries = paginator.page(paginator.num_pages) return render_template('show_entries.html', entries=entries, entries_per_page=pagin,view_mode=view, config=app.config)
def delete_user(): _args = request.args.to_dict() if 'id' in _args: usr = db_session.query(User).filter_by(id=_args['id']).first() elif 'name' in _args: usr = db_session.query(User).filter_by(name=_args['name']).first() elif 'email' in _args: usr = db_session.query(User).filter_by(email=_args['email']).first() if usr: db_session.delete(usr) db_session.commit() return make_response('User deleted!', 200)
def logout(cls, token): try: db_session.query(User).filter(Session.token == token).delete() db_session.commit() del session['token'] return True except Exception as Error: print 'logout error: %s' % Error traceback.print_exc(file=sys.stdout) db_session.rollback() return False
def authors_edit(): for model in app.config['REGISTERED_MODELS']: if model.__name__==request.form['name']: entry = db_session.query(model).filter(model.id==request.form['id']).first() entry_to_check = db_session.query(model).filter(model.name==request.form['value']).first() if entry: #check if entry with the same name already exists if entry_to_check and entry.id!=entry_to_check.id: return 'already exists in database', 500 entry.name = request.form['value'] db_session.commit() return entry.name, 200 return 'failed', 500
def stop_subscription(email: str) -> bool: """Cancel a subscription. Args: email (str): [email associated with subscriber] """ try: db_session.query(Subscriber).filter(Subscriber.email == email).update( Subscriber.is_subscribed is False) db_session.commit() return True except Exception as e: logging.error( f"Unable to cancel subscription for: {email} | Error {e}") return False
def filter_by_user_and_store(): records = [] if request.method == 'POST': user_id = request.form['user'] store_id = request.form['store'] records = db_session.query(Purchase, Product).filter( Purchase.buyer_id == user_id and Purchase.store_id == store_id).join(Product) render_params = { 'records': records, 'users': db_session.query(User).order_by(User.id), 'stores': db_session.query(Store).order_by(Store.id) } return render_template('purchase/filter.html', **render_params)
def list(page=0): total_count = db_session.query(Store).count() if page == 0: stores = db_session.query(Store).order_by(Store.id) else: offset = (page - 1) * PAGE_SIZE stores = db_session.query(Store).order_by(Store.id).offset(offset).limit(PAGE_SIZE) render_params = { 'stores_list': stores, 'stores_count': total_count, 'page': page, 'total_pages': (total_count + PAGE_SIZE - 1) / PAGE_SIZE } return render_template('store/list.html', **render_params)
def get_user_friend_groups(user_id): # gets all the users who active and friends with the user users_are_friends = db_session.query(models.Friend).filter(models.Friend.right_id == user_id).all() if users_are_friends: return get_groups_users_map(users_are_friends) return utils.error_json_message("You have no friends")
def update_broadcasting_status(group_id): users = db_session.query(models.User).filter(models.User.group_id == group_id) if users: for user in users: user.is_near_dorm = NOT_BROADCASTING db_session.commit() return 1
def create_document_folder_on_drive(self, document_redmine_id, document_name): if not document_redmine_id: raise Exception("document_redmine_id is required") if not document_name: raise Exception("document_name is required") if not self.try_acquire_lock(): self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) document = db_session.query(Document).filter_by( id=document_redmine_id).first() if not document: logger.error("No document with id %s", document_redmine_id) raise "Bad document id passed" % document_redmine_id if len(document.project.drive_documents ) == 0 or not document.project.drive_documents[0].drive_id: logger.info( "Project %s has no drive documents mapping, calling creation, will retry", document.project.name) create_project_documents_folder_on_drive.delay( project_redmine_id=document.project.id, project_name=document.project.name) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, document.project.drive_documents[0].drive_id, 'document', document_redmine_id, document.title)
def save_book_word(book: Book, word: str): db_word = Word(word=word, cnt=1) try: db_session.add(db_word) db_session.commit() except IntegrityError as e: db_session.rollback() db_session.query(Word).filter(Word.word == word).update( {Word.cnt: Word.cnt + 1}) db_word = db_session.query(Word).filter(Word.word == word).first() db_book_word = BookWord(book_id=book.id, word_id=db_word.id) try: db_session.add(db_book_word) db_session.commit() except IntegrityError as e: db_session.rollback()
def create_project_folder_on_drive(self, project_redmine_id, project_name): if not project_redmine_id: raise Exception("project_redmine_id is required") if not project_name: raise Exception("folder_name is required") if not self.try_acquire_lock(): self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) project = db_session.query(Project).filter_by(id=project_redmine_id).first() if not project: logger.error("No project with id %s", project_redmine_id) raise "Bad project id passed" % project_redmine_id if project.parent: if len(project.parent.drive_project) == 0 or not project.parent.drive_project[0].drive_id: logger.info("Parent Project %s of %s has no drive mapping, calling creation, will retry", project.parent.name, project.name) create_project_folder_on_drive.delay(project_redmine_id=project.parent_id, project_name=project.parent.name) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, project.parent.drive_project[0].drive_id, 'project', project_redmine_id, project.name) else: basedir_id = get_basedir() if not basedir_id: logger.info("Project %s has no parent and basedir is missing, calling creation, will retry", project.name) create_basedir.delay() self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, basedir_id, 'project', project_redmine_id, project.name)
def get_search_words(p_session_id: int) -> Iterable[int]: ret = [] for i in db_session.query(Word.word).join( SearchWords, Word.id == SearchWords.word_id).filter( SearchWords.session_id == p_session_id).all(): ret.append(i[0]) return ret
def create_document_attachment_on_drive(self, attachment_redmine_id, attachment_name): if not attachment_redmine_id: raise Exception("attachment_redmine_id is required") if not attachment_name: raise Exception("attachment_name is required") if not self.try_acquire_lock(): self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) attachment = db_session.query(DocumentAttachment).filter_by(id=attachment_redmine_id).first() if not attachment: logger.error("No document attachment with id %s", attachment_redmine_id) raise "Bad attachment id passed" % attachment_redmine_id if len(attachment.document.drive) == 0 or not attachment.document.drive[0].drive_id: logger.info("Document %s has no drive mapping, calling creation, will retry", attachment.document.title) create_document_folder_on_drive.delay(document_redmine_id=attachment.document.id, document_name=attachment.document.title) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) if attachment.disk_directory: local_path = os.path.join(celeryconfig.REDMINE_TO_DRIVE_FILES_FOLDER, attachment.disk_directory, attachment.disk_filename) else: local_path = os.path.join(celeryconfig.REDMINE_TO_DRIVE_FILES_FOLDER, attachment.disk_filename) return create_single_version_file_on_drive(self, parent_drive_id=attachment.document.drive[0].drive_id, redmine_type="document_attachment", redmine_id=attachment_redmine_id, file_name=attachment.filename, local_path=local_path, description=attachment.description, mime_type=attachment.content_type, version=1, modified_date=attachment.created_on)
def create_single_version_file_on_drive(task, parent_drive_id, redmine_type, redmine_id, file_name, local_path, description, mime_type, version, modified_date): if not parent_drive_id: raise Exception("parent_drive_id is required") if not redmine_type: raise Exception("redmine_type is required") if redmine_id is None: raise Exception("redmine_id is required") if not file_name: raise Exception("folder_name is required") if not local_path: raise Exception("local_path is required") if not os.path.isfile(local_path): raise Exception("local_path %s is missing" % local_path) db_mapping = db_session.query(RedmineToDriveMapping).filter_by(redmine_id=redmine_id).filter_by( mapping_type=redmine_type).first() if db_mapping and db_mapping.drive_id: logger.info("File %s already mapped to %s", file_name, db_mapping.drive_id) return if not db_mapping: try: db_mapping = RedmineToDriveMapping(redmine_id=redmine_id, mapping_type=redmine_type, last_update=datetime.datetime.utcnow()) db_session.add(db_mapping) db_session.commit() logger.info("Created mapping for %s %s id:%s", redmine_type, file_name, redmine_id) except IntegrityError, e: logger.info("Cannot create mapping due to duplicate, will retry: %s", e) db_session.rollback() task.retry(countdown=min(2 + (2 * current_task.request.retries), 128))
def resolve_organization(self, info): import utils if utils.isAllowAccess(): orgs = db_session.query(OrganizationDBModel).all() return orgs
def create_dmsf_folder_on_drive(self, folder_redmine_id, folder_name): if not folder_redmine_id: raise Exception("folder_redmine_id is required") if not folder_name: raise Exception("folder_name is required") if not self.try_acquire_lock(): self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) folder = db_session.query(DmsfFolder).filter_by(id=folder_redmine_id).first() if not folder: logger.error("No DMSF Folder with id %s", folder_redmine_id) raise "Bad DMSF id passed" % folder_redmine_id if folder.parent: if len(folder.parent.drive) == 0 or not folder.parent.drive[0].drive_id: logger.info("Parent DMSF Folder %s of %s has no drive mapping, calling creation, will retry", folder.parent.title, folder.title) create_dmsf_folder_on_drive.delay(folder_redmine_id=folder.parent.id, folder_name=folder.parent.title) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, folder.parent.drive[0].drive_id, 'dmsf_folder', folder_redmine_id, folder.title) else: if len(folder.project.drive_dmsf) == 0 or not folder.project.drive_dmsf[0].drive_id: logger.info("Project DMSF Folder %s has no drive mapping, calling creation, will retry", folder.project.name) create_project_dmsf_folder_on_drive.delay(project_redmine_id=folder.project.id, project_name=folder.project.name) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, folder.project.drive_dmsf[0].drive_id, 'dmsf_folder', folder_redmine_id, folder.title)
def create_folder_on_drive(task, parent_drive_id, redmine_type, redmine_id, folder_name): if not parent_drive_id: raise Exception("parent_drive_id is required") if not redmine_type: raise Exception("redmine_type is required") if redmine_id is None: raise Exception("redmine_id is required") if not folder_name: raise Exception("folder_name is required") db_mapping = db_session.query(RedmineToDriveMapping).filter_by( redmine_id=redmine_id).filter_by(mapping_type=redmine_type).first() if db_mapping and db_mapping.drive_id: logger.info("Folder %s already mapped to %s", folder_name, db_mapping.drive_id) return if not db_mapping: try: db_mapping = RedmineToDriveMapping( redmine_id=redmine_id, mapping_type=redmine_type, last_update=datetime.datetime.utcnow()) db_session.add(db_mapping) db_session.commit() logger.info("Created mapping for %s %s id:%s", redmine_type, folder_name, redmine_id) except IntegrityError, e: logger.info( "Cannot create mapping due to duplicate, will retry: %s", e) db_session.rollback() task.retry(countdown=min(2 + (2 * current_task.request.retries), 128))
def create_project_documents_folder_on_drive(self, project_redmine_id, project_name): if not project_redmine_id: raise Exception("project_redmine_id is required") if not project_name: raise Exception("folder_name is required") if not self.try_acquire_lock(): self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) project = db_session.query(Project).filter_by( id=project_redmine_id).first() if not project: logger.error("No project with id %s", project_redmine_id) raise "Bad project id passed" % project_redmine_id if len(project.drive_project ) == 0 or not project.drive_project[0].drive_id: logger.info( "Project %s has no drive mapping, calling creation, will retry", project.name) create_project_folder_on_drive.delay(project_redmine_id=project.id, project_name=project.name) self.retry(countdown=min(2 + (2 * current_task.request.retries), 128)) return create_folder_on_drive(self, project.drive_project[0].drive_id, 'project_docs', project_redmine_id, "Documents")
def post_delete(id): post = db_session.query(Post).get(id) if post is None: abort(404) db_session.delete(post) db_session.commit() return redirect(url_for('index'))
def get_basedir(): basedir = db_session.query(RedmineBasedirToDriveMapping).filter_by(redmine_id=0).first() if basedir and basedir.drive_id: return basedir.drive_id return None
def servers(): ret = [] for server in db_session.query(Server): ret.append({'ip':server.ip, 'name':server.name}) if len(ret) > 100: break return Response(json.dumps(ret, indent = 2), mimetype='application/json')
def cpu(): if request.method == "POST": res = db_session.query(Cpu).filter(Cpu.id > int(request.form['id'])) # 返回1条或多条数据 return jsonify(insert_time=[x.time for x in res], cpu1=[x.cpu1 for x in res], cpu2=[x.cpu2 for x in res], cpu3=[x.cpu3 for x in res], cpu4=[x.cpu4 for x in res]) # 返回json格式数据
def verify_token(token): s = Serializer(SECRET_KEY) try: data = s.loads(token) except SignatureExpired: return None except BadSignature: return None user = db_session.query(User).filter_by(id=data['id']).first() return user
def playlist(self): pl = Playlist(loc_id=self.id, cur_pli_id=self.currently_playing) for pli, t, u in db_session.query(PlaylistItem, Track, User).\ filter(PlaylistItem.location_id == self.id).\ filter(Track.id == PlaylistItem.track_id).\ filter(PlaylistItem.user_id == User.id).\ filter(PlaylistItem.done_playing == False).\ filter(PlaylistItem.bumped == False): pl.queue.append((t, pli, u)) return pl
def handle_event(body, fb_id): user = db_session.query(models.User).filter(models.User.fb_id == fb_id).first() if user is not None: group_id = user.group_id group = db_session.query(models.Group).filter(models.Group.id == group_id).first() new_msg = models.Message( body = body, time_stamp = datetime.datetime.utcnow(), offset = group.offset, user_id = '0', group_id = user.group_id, color_id = user.color_id ) db_session.add(new_msg) db_session.commit() push_notification.push_notify_group(group_id, body) all_messages = db_session.query(models.Message).filter(models.Message.group_id == group_id).order_by(models.Message.time_stamp).all() return utils.list_to_json('messages', all_messages)
def add_task(group_id, body, due_date): new_task = models.Task( group_id=group_id, body=body, date=datetime.datetime.utcnow(), due_date=due_date ) db_session.add(new_task) db_session.commit() query_result = db_session.query(models.Task).filter(models.Task.group_id==group_id).order_by(models.Task.date).all() return utils.list_to_json('tasks', query_result)
def check_broadcast(): all_users = db_session.query(models.User).all() curr_time = datetime.datetime.utcnow() for user in all_users: last_broadcast = user.last_broadcast diff = curr_time - last_broadcast print diff.total_seconds() if diff.total_seconds() > FOUR_HOURS and user.is_near_dorm != NOT_BROADCASTING: user.is_near_dorm = NOT_BROADCASTING db_session.commit() return "OK"
def change_group_id(fb_id, new_group, passcode): result = db_session.query(models.User).filter(models.User.fb_id == fb_id).first() new_grp = db_session.query(models.Group).filter(models.Group.id == new_group).first() if result and new_grp: print new_grp.passcode print passcode if str(new_grp.passcode) == str(passcode): # Changing a group_id to the same group_id will cause a server error if int(result.group_id) == int(new_group): return utils.obj_to_json('user', result, True) # Modifying the user's color_id result.group_id = new_group new_group = db_session.query(models.Group).filter(models.Group.id == new_group).first() new_group.users.append(result) # changing the color_id result.color_id = get_new_color(result.group_id) temp = result db_session.commit() return utils.obj_to_json('user',temp, True) elif result and passcode == "newgroup": print "entered correct code" # create new group new_g = models.Group(offset = 0, passcode=0, latitude=0.0, longitude=0.0) # reflect that in the user info result.group_id = new_g.id new_g.users.append(result) result.color_id = 0 temp = result db_session.add(new_g) db_session.commit() groups.assign_passcode(result.fb_id) return utils.obj_to_json('user',temp, True) return Response('Wrong access code', 401) return utils.json_message("Wrong group access token")
def get_new_color(group_id): users = db_session.query(models.User).filter(models.Group.id == group_id).all() all_colors = set([i for i in range(NUM_COLORS)]) used_colors = set() for user in users: used_colors.add(user.color_id) legal_colors = all_colors - used_colors for color in legal_colors: # Returning the first color that we see, bad style but idgaf return color return 0
def assign_passcode(fb_id): user = db_session.query(models.User).filter(models.User.fb_id==fb_id).first() group_id = user.group_id group = db_session.query(models.Group).filter(models.Group.id == group_id).first() if group: new_passcode = random.randint(100000, 999999) group.passcode = new_passcode message_body = "Welcome to your Flat!" message_body1 = "Your Flat's access token is " + str(new_passcode) message_body1 += ". Send your friends this number so they can join your Flat!" greeting = models.Message( body = message_body, time_stamp = datetime.datetime.utcnow(), offset = 0, user_id = 1, group_id = group_id, color_id = 9 ) db_session.add(greeting) passcode_message = models.Message( body = message_body1, time_stamp = datetime.datetime.utcnow(), offset = 0, user_id = 1, group_id = group_id, color_id = 9 ) db_session.add(passcode_message) db_session.commit() return return
def getCards(page=1, type=None): result = { 'cards': None, 'allPage': 0, 'page': page } count = 0 if type is None: count = db_session.query(func.count(Card.id)).first()[0] else: count = db_session.query(func.count(Card.id)).filter(Card.type == type).first()[0] curr = (page - 1) * PER_PAGE allPage = 1 if count % PER_PAGE == 0 : allPage = count / PER_PAGE else: allPage = int(count / PER_PAGE) + 1 if curr > count: curr = count page = allPage if curr < 0: curr = 1 page = 1 cards = None if type is None: cards = Card.query.order_by(Card.cost, Card.atk, Card.health)[curr : curr + PER_PAGE] else: cards = Card.query.filter(Card.type == type).\ join(CardType, CardType.typeId==Card.type).\ join(CardClass, CardClass.classId == Card.cclass).\ join(CardRarity, CardRarity.rarityId == Card.rarity).\ order_by(Card.cost, Card.atk, Card.health)[curr : curr + PER_PAGE] result['cards'] = cards result['allPage'] = allPage result['page'] = page return result
def edit_entry(view, id): entry = db_session.query(Book).filter(Book.id==id).first() form = BookForm(request.form,obj=entry) if request.method == 'POST' and form.validate(): if view=='books': entry.authors = [] for author in form.authors.data : entry.authors.append(get_or_create(Author,name=author)) entry.name = form.name.data entry.description = form.description.data db_session.commit() flash('%s successfuly have edited' % entry.name) return redirect(url_for('admin_show_entries',pagin = app.config['PAGIN'], page=1, view=view)) flash('edited entry to %s database' % view) return render_template('add_entry.html', form=form, view=view,entry=entry, mode="edit")
def check(): try: data = json.loads(request.data, cls=APIDecoder) data = standart_oms_hook(data) q = [getattr(Clients, k) == v for k, v in data.iteritems()] count = db_session.query(Clients).filter(and_(*q)).count() if count > 0: return "true" return "false" except ValueError as e: logging.exception(e) res = dict(code=400, message=e.message, exc_name=e.__class__.__name__) return json.dumps(res, cls=APIEncoder), 400 except Exception as e: logging.exception(e) res = dict(code=500, message=e.message, exc_name=e.__class__.__name__) return json.dumps(res, cls=APIEncoder), 500