def show_today_cleaning_list(message): """今日の掃除当番を表示する :param message: slackbot.dispatcher.Message """ dow = datetime.datetime.today().weekday() s = Session() users = [get_user_display_name(c.slack_id) for c in s.query(Cleaning).filter(Cleaning.day_of_week == dow)] botsend(message, '今日の掃除当番は{}です'.format('、'.join(users)))
def updata_recruit(): """ 修复数据库中的结果 :return: """ session = Session() list_recruit = session.query(Recruit).all() for rec in list_recruit: rec.salary = rec.salary.upper() session.commit()
def count_redbull_stock(message): """現在のRedBullの在庫本数を返すコマンド :param message: slackbotの各種パラメータを保持したclass """ s = Session() q = s.query(func.sum(RedbullHistory.delta).label('stock_number')) stock_number = q.one().stock_number if stock_number is None: stock_number = 0 botsend(message, 'レッドブル残り {} 本'.format(stock_number))
def hn_fetch_item(self, hn_id, item_info): self.STAT_ITEM_FROM_HN += 1 # Recurse to get the top_parent parent_id = item_info.get('parent') if parent_id is None: logging.info('Failed to find the top parent for ' + str(hn_id)) self.STAT_ITEM_NO_TOP_PARENT += 1 top_parent = None parent_item = None else: parent_item = self.fetch_item_or_article_by_id(parent_id) # When an article is returned, we know we've hit the top if isinstance(parent_item, Article): # logging.info('Found top parent' + str(parent_id)) top_parent = parent_item parent_item = None elif parent_item is None: top_parent = None elif parent_item.top_parent is None: top_parent = None else: # the top_parent is at least 1 grandparent away, or not found top_parent = parent_item.top_parent item = Session().query(Item).filter(Item.hn_id == hn_id).first() if not item: submitter_id = item_info.get('by') submitter = self.query_or_create_user(submitter_id) item = Item( hn_id=hn_id, submitter=submitter, type=item_info.get('type'), parent=parent_item, top_parent=top_parent ) Session().add(item) self.STAT_ITEM_CREATED += 1 return item
def test_sql(self): sheet = pe.load_from_sql(Session(), Pyexcel) content = dedent(""" pyexcel: +------------+----+-------+--------+ | birth | id | name | weight | +------------+----+-------+--------+ | 2014-11-11 | 0 | Adam | 11.25 | +------------+----+-------+--------+ | 2014-11-12 | 1 | Smith | 12.25 | +------------+----+-------+--------+""").strip('\n') self.assertEqual(str(sheet), content)
def post(self): session = Session() request_data = json.loads(request.data.decode('utf-8')) if is_product_data_valid(request_data): product = self.model.from_dict(request_data) session.add(product) session.commit() return jsonify('ok') else: response = jsonify('data error') response.status_code = 400 return response
def is_exist_id(user_id) -> int: try: session_obj = Session() ret = session_obj.query(user_info).filter_by(id=user_id).first() Session.remove() if (ret == None): return -1 return 0 except Exception as e: Logger().get_logger().error(str(e)) session_obj.rollback() return -2
def search_songs(query, limit=20): songs = [] if query: session = Session() res = session.query(Song).filter(or_( Song.title.like('%' + query + '%'), Song.artist.like('%' + query + '%'), Song.album.like('%' + query + '%') )).limit(limit).all() session.commit() songs = [song.dictify() for song in res] return {'query': query, 'limit': limit, 'results': songs}
def query_packages(package_id): try: session_obj = Session() ret = session_obj.query(package_info).options( subqueryload( package_info.user)).filter_by(id=package_id).first() Session.remove() return ret except Exception as e: Logger().get_logger().error(str(e)) session_obj.rollback() return -1
def showAnswers(q_id): list = [] sess = Session() for instance in sess.query(Answer).filter_by(questionID=q_id): if instance.isPublished: ans = [ instance.id, instance.answerer, instance.content, instance.numAgree, instance.numCollect, instance.date ] list.append(ans) sess.commit() return list
def spider_opened(self, spider): db_session = Session() db_spider = db_session.query(Spider).filter( Spider.name == spider.name).one() if db_spider and db_spider.use_proxies and not db_spider.use_tor: spider.proxy_list_id = db_spider.proxy_list_id self.use_proxies.add(spider.name) # for stats or PhantomJS spider._proxies_list = self._get_proxies(spider) db_session.close() self._etc_hosts_rules, self._etc_hosts_ips = get_etc_hosts_rules()
def delete(p_id): session = Session() delete_row = session.query(ProductDAO).filter( ProductDAO.id == p_id).delete() session.commit() session.close() if delete_row == 0: return jsonify( {'message': f'There is no product to delete with id {p_id}'}), 404 else: return jsonify({'message': 'The product is removed from db'}), 200
def register_product(body): session = Session() product = ProductDAO(body['title'], body['overview'], body['release_date'], body['runtime'], body['adult'], body['original_language'], body['budget'], body['revenue'], body['product_quantity'], body['unit_price']) session.add(product) session.commit() session.refresh(product) session.close() return jsonify({'product_id': product.id}), 200
def Person_experience(_id): """ Повертає записи з таблиці про досвід роботи для фіз особи. В групу Experience :param _id: id фіз особи """ session = Session() result = session.query(Experience).filter( Experience.fk_person == _id).all() converter = ExperienceSchema(many=True, exclude=['person']) dumps_data = converter.dump(result).data session.close() return jsonify(dumps_data)
def create_or_update_user(telegram_id): session = Session() user = session.query(User).filter_by(telegram_id=telegram_id).first() if not user: user = User(telegram_id=telegram_id) session.add(user) session.commit() session.close() return user
def create_account(body): session = Session() account = AccountDAO(body['customer_name'], body['customer_address'], body['customer_email'], body['customer_password'], datetime.now()) #Simply add objects to table, advantage of ORM session.add(account) session.commit() session.refresh(account) session.close() #Use Jsonify to return json object with status code when API is called. return jsonify({'customer_id': account.id}), 200
def show_kintai_history_csv(message, time=None): """指定した月の勤怠記録をCSV形式で返す :param message: slackbotの各種パラメータを保持したclass :param str time: `/` 区切りの年月(例: 2016/1) """ user_id = message.body['user'] if time: year_str, month_str = time.split('/') else: now = datetime.datetime.now() year_str, month_str = now.strftime('%Y'), now.strftime('%m') year, month = int(year_str), int(month_str) if not 1 <= month <= 12: message.send('指定した対象月は存在しません') return s = Session() qs = (s.query(KintaiHistory).filter( KintaiHistory.user_id == user_id).filter( func.extract('year', KintaiHistory.registered_at) == year).filter( func.extract('month', KintaiHistory.registered_at) == month)) kintai = defaultdict(list) for q in qs: registered_at = q.registered_at.strftime('%Y-%m-%d') kintai[registered_at].append( (q.is_workon, '{:%I:%M:%S}'.format(q.registered_at))) rows = [] for day in range(1, monthrange(year, month)[1] + 1): aligin_date = '{}-{:02d}-{:02d}'.format(year, month, day) workon, workoff = '', '' for d in sorted(kintai[aligin_date]): if d[0]: workon = d[1] else: workoff = d[1] rows.append([aligin_date, workon, workoff]) output = StringIO() w = csv.writer(output) w.writerows(rows) param = { 'token': settings.API_TOKEN, 'channels': message.body['channel'], 'title': '勤怠記録' } requests.post(settings.FILE_UPLOAD_URL, params=param, files={'file': output.getvalue()})
def vacation_release(_id, date): """ Розрахунок залишку відпустки при звільненні :param _id: ід працівника :param date: дата звільнення """ session = Session() date1 = datetime.strptime(str(date), "%Y-%m-%d").date() id_w = session.query( Worker.id, Worker.started_to_work).filter_by(fk_person=_id).first() dd = datetime.strptime(str(id_w[1]), "%Y-%m-%d").date() records = session.query(Vacation).filter_by(fk_worker=id_w[0]).all() converter = VacationSchema(many=True, only=['count', 'used']) vacations = converter.dump(records).data vac_count = 0 used = 0 for arg in vacations: vac_count += arg['count'] used += arg['used'] # якщо прийнятий та звільнений тогож місяця if dd.month == date1.month and dd.year == date1.year: if date1.day - dd.day > 20: count = 2 elif date1.day - dd.day > 10: count = 1 else: count = 0 response = count else: # рахуємо відпустки за ост місяць if date1.day > 20: s2 = 2 elif date1.day > 9: s2 = 1 else: s2 = 0 # Якщо людина прийнята на роботу цього року if dd.year == date1.year: if dd.day < 10: s1 = 2 elif dd.day < 21: s1 = 1 else: s1 = 0 # кількість заробленої відпустки за цей рік count = s1 + s2 + 2 * (date1.month - dd.month - 1) response = count else: # якщо людина прийнята на роботу в попередніх роках count = 2 * (date1.month - 1) + s2 response = vac_count - 24 + count session.close() return response - used
def list_entries(): if request.method == 'POST': data = json.loads(request.data) title = data.get('title') content = data.get('content') session = Session() entry = Entry(title=title, content=content) session.add(entry) session.commit() return 'ok' else: session = Session() entries = session.query(Entry).all() return Response(json.dumps([{ "title": x.title, "content": x.content } for x in entries]), mimetype='application/json')
def anketa_statistic(_id): """ Повертає відповіді анкетування. :param _id: ід фіз особи """ session = Session() all_records = session.query(Answer).join(Question, Question.id == Answer.fk_question).filter( Answer.fk_person == _id).all() converter = AnswerSchema(many=True, only=['answer', 'question.question', 'question.id']) response = converter.dump(all_records).data session.close() return jsonify(response)
def test_sql(self): sheet = pe.load_from_sql(Session(), Pyexcel) content = dedent(""" Sheet Name: pyexcel +------------+----+-------+--------+ | birth | id | name | weight | +------------+----+-------+--------+ | 2014-11-11 | 0 | Adam | 11.250 | +------------+----+-------+--------+ | 2014-11-12 | 1 | Smith | 12.250 | +------------+----+-------+--------+""").strip('\n') assert str(sheet) == content
def api_catdidate_type_info(_id): """ Повертає загальну інформацію про кандидата. :param _id: id кандидата """ session = Session() candidate = session.query(Person).filter(Person.id == _id).all() converter = PersonSchema(many=True, exclude=['poll']) result = converter.dump(candidate).data session.close() print(result) return jsonify(result)
def get_history(limit=20): session = Session() history_items = (session.query(PlayHistory) .filter_by(player_name=PLAYER_NAME) .order_by(PlayHistory.id.desc()).limit(limit).all()) session.commit() songs = [] for item in history_items: song_obj = session.query(Song).get(item.song_id).dictify() song_obj['played_at'] = str(item.played_at) songs.append(song_obj) return {'limit': limit, 'results': songs}
def credential_check( username, password): # returns 'no username' if username doesnt exist session = Session() if session.query(User).filter_by(username=username).first() == None: session.close() return "no username" if password == session.query(User).filter_by( username=username).first().password: session.close() return True session.close() return False
def create(body): session = Session() delivery = DeliveryDAO( body['customer_id'], body['provider_id'], body['package_id'], datetime.now(), datetime.strptime(body['delivery_time'], '%Y-%m-%d %H:%M:%S.%f'), StatusDAO(STATUS_CREATED, datetime.now())) session.add(delivery) session.commit() session.refresh(delivery) session.close() return jsonify({'delivery_id': delivery.id}), 200
def uid_to_stuff(uid): # returns false if event_name doesnt exist session = Session() if session.query(User).filter_by(uid=uid).first() == None: session.close() return False user = session.query(User).filter_by(uid=uid).first() ret = list() ret.append(user.email) ret.append(user.phone_number) ret.append(user.address) session.close() return ret # email, list, address
def test_sql(self): sheet = pe.get_sheet(session=Session(), table=Pyexcel) content = dedent(""" pyexcel: +------------+----+-------+--------+ | birth | id | name | weight | +------------+----+-------+--------+ | 2014-11-11 | 0 | Adam | 11.25 | +------------+----+-------+--------+ | 2014-11-12 | 1 | Smith | 12.25 | +------------+----+-------+--------+""").strip("\n") self.assertEqual(str(sheet), content)
def usernames_to_rid(receiving_id, requesting_id): session = Session() a = session.query(Request).filter_by(requesting_id=requesting_id).all() print(requesting_id) print(receiving_id) for i in a: if int(i.receiving_id) == receiving_id: rid = i.rid session.close() return rid session.close() return "none thring"
def test_save_book_as_file_from_sql(self): test_file = "book_from_sql.xls" pe.save_book_as(dest_file_name=test_file, session=Session(), tables=[Signature, Signature2]) book_dict = pe.get_book_dict(file_name=test_file) expected = OrderedDict() expected.update({'signature': [['X', 'Y', 'Z'], [1, 2, 3], [4, 5, 6]]}) expected.update( {'signature2': [['A', 'B', 'C'], [1, 2, 3], [4, 5, 6]]}) assert book_dict == expected os.unlink(test_file)
def by_hour(request, guild_id): session = Session() query = session.query(Messages) \ .with_entities(func.strftime('%H', Messages.hour, 'unixepoch').label('agg_hour'), func.sum(Messages.count).label('count')) \ .group_by('agg_hour').order_by('agg_hour') query = _filter(query, int(guild_id), request.query) data = [] for row in query: data.append({'hour': row.agg_hour, 'count': row.count}) return Response.json(data)