def post(self, topic_id): session = Session() roles = oidc.user_getfield('cognito:groups') if oidc.user_getfield( 'cognito:groups') else [] username = oidc.user_getfield('username') kargs = request.get_json(silent=True) if not kargs.get('desc'): json_abort(400, "desc missing") topic = session.query(Topic).get(topic_id) if not topic: json_abort(404) now = datetime.datetime.now() if topic.start_time <= now: json_abort(403, "Voting already started. No changes allowed") grant = session.query(RoleGrant).filter( RoleGrant.topic_id == topic_id).filter( RoleGrant.role.in_(roles)).all() logger.debug("{}, {}, {}".format(topic.user, username, topic.user != username)) if topic.user != username and 'admin' not in roles and not grant: json_abort(403) option = TopicOption(**kargs) option.topic_id = topic_id session.add(option) session.commit() logger.debug(option.to_dict()) session = Session() if not oidc.is_api_request(): url = url_for('api_topic', id=topic_id) data = {"url": url, "message": "Success. Redirecting to %s" % url} return jsonify(data) return jsonify(option)
def put(self, topic_id, id): session = Session() roles = oidc.user_getfield('cognito:groups') if oidc.user_getfield( 'cognito:groups') else [] username = oidc.user_getfield('username') kargs = request.get_json(silent=True) if not kargs.get('role'): json_abort(400, "role missing") topic = session.query(Topic).get(topic_id) if not topic: json_abort(404, "Topic doesn't exist") now = datetime.datetime.now() if topic.start_time <= now: json_abort(403, "Voting already started. No changes allowed") grant = session.query(RoleGrant).filter( RoleGrant.topic_id == topic_id).filter( RoleGrant.role.in_(roles)).all() if 'admin' not in roles and not grant: json_abort(403) invite = session.query(Invite).get(id) if kargs.get('role'): invite.role = kargs.get('role') session.add(invite) session.commit() logger.debug(invite.to_dict()) session = Session() if not oidc.is_api_request(): url = url_for('api_topic', id=topic_id) data = {"url": url, "message": "Success. Redirecting to %s" % url} return jsonify(data) return jsonify(invite)
def put(self, id): session = Session() username = oidc.user_getfield('username') topic = session.query(Topic).get(id) if topic: now = datetime.datetime.now() if topic.start_time <= now and now <= topic.end_time: json_abort(403) if topic.user != username: json_abort(403) kargs = request.get_json(silent=True) logger.debug(kargs) fields = Topic.get_form_fields() for field in fields: if kargs.get(field): setattr(topic, field, kargs[field]) now = datetime.datetime.now() minimum = datetime.timedelta(minutes=5) if kargs.get('start_time') and (parse(kargs.get('start_time'), yearfirst=True) - now) < minimum: json_abort(400, "You can't edit a topic 5 minutes before start.") if topic.start_time >= topic.end_time: json_abort(400, "End time can not be less than Start time.") session.commit() if not oidc.is_api_request(): url = url_for('api_topic', id=int(id)) data = { "url": url, "message": "Success. Redirecting to %s" % url } return jsonify(data) return jsonify(topic)
def SAP_process_table(table, fields, options, columns_dict, model): session = Session() df = SAP_get_table(table, fields, options) df.rename(columns=columns_dict, inplace=True) session.execute(model.__table__.insert(), df.to_dict(orient="records")) session.commit() session.close() return df
def save(self, data): if not SAVE_TO_DB: return session = Session() # if not session.query(Item).filter_by(title=data['title']).first(): obj = Item(**data) session.add(obj) session.commit()
async def course_create(token: str = Form(...), name: str = Form(...)): s = Session() responsible = validate_user(s, token, True) c = Course(name=name, responsible=responsible) s.add(c) s.commit() return {'message': 'Course created', 'id': c.id}
def save(self, data): if not SAVE_TO_DB: return session = Session() if not session.query(Item).filter_by(title=data['title']).first(): obj = Item(**data) session.add(obj) session.commit()
async def user_create(token: str = Form(...), lms_user: str = Form(...), is_prof: bool = Form(...)): s = Session() creator = validate_user(s, token, True) new_user = User(lms_user=lms_user, is_prof=is_prof) s.add(new_user) s.commit() return {'message': 'ok', 'token': new_user.token}
def delete(self, id): session = Session() username = oidc.user_getfield('username') grant = session.query(RoleGrant).get(id) if not grant: json_abort(404) if not grant.topic.user == username: json_abort(403) session.delete(grant) session.commit() logger.debug(grant) return jsonify(grant)
def post(self): session = Session() data = json.loads(self.request.body.decode('utf8')) if data.get('id'): workspace = session.query(Workspace).get(data['id']) else: workspace = Workspace() workspace.name = data.get('name', '') session.add(workspace) session.commit() workspace.update_labels(session, data['labels']) workspace.update_urls(session, data['urls']) session.commit() self.write({'id': workspace.id})
def register_submit(session): username = bottle.request.json["username"] password = bottle.request.json["password"] email = bottle.request.json.get("email") sa_session = Session() user = User(username=username, password=password, email=email, slug=sa_helper.generate_slug(sa_session, User, slugify.slugify(username))) sa_session.add(user) sa_session.commit() session["user_id"] = user.id return {"success": True}
def put(self, id): session = Session() username = oidc.user_getfield('username') grant = session.query(RoleGrant).get(id) kargs = request.get_json(silent=True) if kargs.get('id'): topic = session.query(Topic).get(kargs['topic_id']) if not topic.user == username: json_abort(403) grant.topic = topic if kargs.get('role'): grant.role = kargs.get('role') session.commit() logger.debug(grant) return jsonify(grant)
def Add_Stock(): res = requests.get('http://isin.twse.com.tw/isin/C_public.jsp?strMode=2') df = pd.read_html(res.text)[0] df = df.drop([0, 1]).reset_index() # 刪除"股票"列 df = df.drop('index', axis=1) df = df.iloc[:946, :1] # 946列是股票9958最後一檔 df[0] = df[0].map(lambda x: x.replace(' ', ' ')) df['id'] = df[0].map(lambda x: x.split(' ')[0]) df['name'] = df[0].map(lambda x: x.split(' ')[1]) objects = list() for _, data in df.iterrows(): objects.append(dict(id=data['id'], name=data['name'])) Session.bulk_insert_mappings(Stock, objects) Session.commit()
def Print(): Base.metadata.create_all(engine) session2 = Session() # 将 ./User_Files/To_Print/ 中的文件名导入到预打印日志中 cmd = "ls -t ./User_Files/To_Print/ > ./log/ToPrint_filename" subprocess.call(cmd, shell=True) ToPrint = open("./log/ToPrint_filename", 'r+') direction_option = "" # 打印方向参数 for line in ToPrint: printed_order = session2.query(Order).filter( Order.File_Dir == line[:-1]) # 查询当前打印订单对象 if printed_order.Print_Direction == 2: direction_option = "-o landscape" try: # 开始尝试打印 print_cmd = 'lp -d {} -n {} -o fitplot {} ./User_Files/To_Print/{}'.format( Printer_Name, printed_order.Print_Copies, direction_option, line[:-1]) returnCode = subprocess.call(print_cmd, shell=True) if returnCode != 0: error = commands.getoutput(print_cmd) raise IOError(error) except Exception as e: # 捕获错误,并将错误写入错误日志中 with open('./log/print_error_log', 'a') as f: f.write( str(datetime.datetime.now()) + " " + line[:-1] + " " + str(e) + "\n") else: # 将打印成功的文件移动到 ./User_Files/Finished_Print 这个目录中 subprocess.call( 'mv ./User_Files/To_Print/{} ./User_Files/Finished_Print/'. format(line[:-1]), shell=True) # 在数据库中修改打印状态为3,表示已经打印 printed_order.Print_Status = 3 session2.commit() # session2.close() # 在./log/print_access_log 中写入打印成功日志 with open('./log/print_access_log', 'a') as f: f.write( str(datetime.datetime.now()) + " " + line[:-1] + " " + "Successfully-Added-To-Printer")
def post(self): session = Session() username = oidc.user_getfield('username') kargs = request.get_json(silent=True) logger.debug(kargs) if not kargs.get('topic_id'): json_abort(400) topic = session.query(Topic).get(kargs['topic_id']) if not topic: json_abort(400) if not topic.user == username: json_abort(403) grant = RoleGrant(**kargs) session.add(grant) session.commit() logger.debug(grant) return jsonify(grant)
def delete(self, id): session = Session() roles = oidc.user_getfield('cognito:groups') if oidc.user_getfield('cognito:groups') else [] topic = session.query(Topic).get(id) if topic: now = datetime.datetime.now() if now > topic.start_time: json_abort(403, "Voting started. Can't delete") if 'admin' not in roles: json_abort(403) topic = session.query(Topic).get(id) session.delete(topic) session.commit() logger.debug(topic.to_dict()) if not oidc.is_api_request(): url = url_for('api_topic', id=int(id)) data = { "url": url, "message": "Success. Redirecting to %s" % url } return jsonify(topic)
async def submit_task(course_id: int, activity_id: str, token: str = Form(...), content: str = Form(...), type: Optional[str] = Form( ActivityType.multiple_choice)): s = Session() sender = validate_user(s, token, False) try: course = s.query(Course).filter(Course.id == course_id).one() except NoResultFound: raise HTTPException(status_code=404, detail='Invalid course id') try: activity = s.query(Activity).filter( Activity.course_id == course_id, Activity.course_specific_id == activity_id).one() except NoResultFound: activity = Activity(course_specific_id=activity_id, type=type, course=course) s.add(activity) sub = Submission(sender=sender, activity=activity, content=content) s.add(sub) try: correct = s.query(Submission.content).filter( Submission.activity == activity, Submission.sender.has(is_prof=True)).first()[0] except NoResultFound: correct = "" s.commit() return { 'message': 'OK', 'id': sub.id, "correct_answer": correct, "activity_id": activity_id, "content": content }
def post(self): session = Session() username = oidc.user_getfield('username') roles = oidc.user_getfield('cognito:groups') if oidc.user_getfield( 'cognito:groups') else [] kargs = request.get_json(silent=True) logger.debug(kargs) vote_jwt = kargs.get('vote') if not vote_jwt: json_abort(400, "Vote missing") if not oidc.is_api_request(): json_abort(403) secret = oidc.get_access_token().split('.')[-1] payload = jwt.decode(vote_jwt, secret, algorithms=['HS256']) fields = ['token', 'topic_id', 'option_id'] for field in fields: if not payload.get(field): json_abort(400, "%s missing in token" % field) topic_id = payload.get('topic_id') topic = session.query(Topic).get(topic_id) if not topic: json_abort(404, description="Topic not found") now = datetime.datetime.now() if topic.start_time > now and topic.end_time < now: json_abort(400, description="Voting not begun yet") mapper = session.query(Mapper).filter( Mapper.topic_id == topic_id).filter(Mapper.user == username).all() if mapper: json_abort(409) invite = session.query(Invite).filter( Invite.topic_id == topic_id).filter(Invite.role.in_(roles)).all() if not invite and topic.user != username: json_abort(403) vote = Vote(topic_id=payload['topic_id'], option_id=payload['option_id'], token=payload['token']) mapper = Mapper(user=username, topic_id=topic_id) session.add(vote) session.add(mapper) session.commit() logger.debug(vote) return jsonify(vote)
def process_input(header=True): csv_file = open(tmp_file, 'r', encoding='utf-8') csv_reader = None try: csv_reader = list(csv.reader(csv_file, delimiter=',')) except: print("Error in opening csv file. Please check the format/encoding!!") quit() line_no = 0 if header: line_no += 1 csv_reader = csv_reader[1:] session = Session() all_reviews = session.query(Review).all() reviews_set = {(review.product_id, str(review.review_text).lower()) for review in all_reviews} reviews_in_file = set() print("Processing input file..") for line in csv_reader: line_no += 1 review_text = line[0] product_id = line[4] search_key = (product_id, str(review_text).lower()) if search_key in reviews_set: print("Review at line: {} is already in db!!".format(line_no)) elif search_key in reviews_in_file: print("Review at line: {} is duplicate in file!!".format(line_no)) else: review_ob = Review(product_id, review_text) session.add(review_ob) reviews_in_file.add(search_key) print("Committing data...") session.commit() session.close() try: # Remove downloaded file os.remove(tmp_file) except: pass
def delete(self, topic_id, id): session = Session() roles = oidc.user_getfield('cognito:groups') if oidc.user_getfield( 'cognito:groups') else [] username = oidc.user_getfield('username') kargs = request.get_json(silent=True) topic = session.query(Topic).get(topic_id) if not topic: json_abort(404, "Topic doesn't exist") now = datetime.datetime.now() if topic.start_time <= now and now <= topic.end_time: json_abort(403, "Voting already started. No changes allowed") grant = session.query(RoleGrant).filter( RoleGrant.topic_id == topic_id).filter( RoleGrant.role.in_(roles)).all() if 'admin' not in roles and not grant and topic.user != username: json_abort(403) option = session.query(TopicOption).get(id) session.delete(option) session.commit() logger.debug(option) return jsonify(option)
def Query(): # 查询排队方式为自动排队的订单 Base.metadata.create_all(engine) session = Session() All_Order = session.query(Order).filter(Order.Time_Way == 1).all() if All_Order: for i in range(len(All_Order)): if All_Order[i].Print_Status == 1: # 尝试下载订单中的文件 try: url = "http://rooins.careyou.xin/static/Upload_Files/" + All_Order[ i].File_Dir r = requests.get(url) if r.status_code != 200: # 判断url若不是200,则记录错误到日志 raise IOError('{} {} {}'.format( r.status_code, r.reason, r.url)) else: with open( './User_Files/To_Print/' + All_Order[i].File_Dir, 'wb') as f: f.write(r.content) except Exception as e: # 将错误写入下载错误日志 with open('./log/download_error_log', 'a') as f: f.write( str(datetime.datetime.now()) + " " + All_Order[i].File_Dir + " " + str(e) + "\n") else: # 将下载成功写入下载成功日志 with open("./log/download_log", "a") as f: f.write( str(datetime.datetime.now()) + " " + All_Order[i].File_Dir + " " + "success-download" + "\n") # 在数据库中做出标记,文件已下载成功 All_Order[i].Print_Status = 2 finally: session.commit()
def process_reviews(): session = Session() client = language.LanguageServiceClient() reviews_to_analyze = session.query(Review).filter_by( review_analyzed=False).all() total = len(reviews_to_analyze) processed = 0 step = 1 throttle_limit = 500 print("Processing Reviews...") print("Processed {}/{} ".format(processed, total), end='\r') start_time = time.time() one_minute = 60 for review in reviews_to_analyze: try: analyze_review(client, review, session) except: pass review.review_analyzed = True session.add(review) processed += 1 # if processed % step == 0: print("Processed {}/{} ".format(processed, total), end='\r') if processed % throttle_limit == 0: end_time = time.time() time_taken = end_time - start_time if time_taken < one_minute: time.sleep(one_minute - time_taken) start_time = time.time() print("Processed {}/{} ".format(processed, total)) print("Committing data...") session.commit() session.close() print("Processed data stored successfully!!")
def post(self): session = Session() data = json.loads(self.request.body.decode('utf8')) ws = session.query(Workspace).get(data['wsId']) page = session.query(Page).filter_by( workspace=ws.id, url=data['url']).one() element_label = session.query(ElementLabel).filter_by( page=page.id, selector=data['selector']).one_or_none() if data.get('label') is not None: label = session.query(Label).filter_by( workspace=ws.id, text=data['label']).one() if element_label is None: element_label = ElementLabel( page=page.id, selector=data['selector'], label=label.id) else: element_label.label = label.id session.add(element_label) elif element_label is not None: session.delete(element_label) session.commit() self.write({'ok': True})
def post(self): session = Session() username = oidc.user_getfield('username') kargs = request.get_json(silent=True) logger.debug(kargs) topic = Topic( **kargs ) now = datetime.datetime.now() minimum = datetime.timedelta(minutes=5) if (parse(topic.start_time, yearfirst=True) - now) < minimum: json_abort(400, "You can only create a topic with minimum 5 minute in advance.") if topic.start_time >= topic.end_time: json_abort(400, "End time can not be less than Start time.") topic.user = username session.add(topic) session.commit() logger.debug(topic.to_dict()) session = Session() if not oidc.is_api_request(): url = url_for('api_topic', id=int(topic.id)) data = { "url": url, "message": "Success. Redirecting to %s" % url } return jsonify(data) return jsonify(topic)
from sqlalchemy.exc import IntegrityError from fact_models import FactArtistByYear, FactGenreByYear, FactSongByYear from data_processor import DataProcessor from config import Session, dataSource session = Session() dirname = os.path.dirname(__file__) ds = DataProcessor(os.path.join(dirname, dataSource)) ds.process() for i, row in ds.artistsByYear().iterrows(): record = FactArtistByYear(year=row[0], artist=row[1], titles=row[2]) try: session.add(record) session.commit() except IntegrityError: print('FactArtistByYear Record exists for year {}'.format(row[0])) session.rollback() query = session.query(FactArtistByYear) print('{} records exist in FactArtistByYear'.format(query.count())) for i, row in ds.genreByYear().iterrows(): record = FactGenreByYear(year=row[0], genre=row[1], titles=row[2]) try: session.add(record) session.commit() except IntegrityError: print('FactGenreByYear Record exists for year {}'.format(row[0])) session.rollback() query = session.query(FactGenreByYear)
def SAP_notification_process(date, end_date): # Get all notifications from the VIQMEL print('Processing notifications') #df_notification = SAP_process_table('VIQMEL',['QMNUM','IWERK','QMART','AUFNR','EQUNR','QMTXT','STRMN','LTRMN','ERDAT','PRIOK','ERNAM','TPLNR','ARBPL', 'OBJNR'],\ # "IWERK EQ 'TR01' AND ERDAT >= '" + date + "' and ERDAT <= '"+end_date+"'",notification_columns, Notification) df_notification = pd.read_sql_table("SAP_notifications", con=engine) # Get all notifications´ types TQ80 print('Processing notifications type') #SAP_process_table('TQ80', ['QMART','STSMA'], "MANDT EQ '020'",notification_type_columns, Notification_Type) df_type = pd.read_sql_table("SAP_notification_type", con=engine) # Get all notifications´ additional texts or comments QMFE print('Processing notifications text') #SAP_process_table('QMFE', ['QMNUM','FETXT'], "FETXT NE '' AND ERDAT >= '" + date + "'",notification_text_columns, Notification_Text) # Get all notifications' activities from the QMMA print('Processing notifications activities') #SAP_process_table('QMMA', ['MANDT','QMNUM','MATXT','MNGRP','MNCOD','MNKAT'], "MANDT EQ '020' AND ERDAT >= '" + date + "'", notification_activity_columns, Notification_Activity) # Get all notification´s activities header for the QPCD print('Processing notifications activities header') #SAP_process_table('QPCT', ['MANDT','CODEGRUPPE','CODE','KURZTEXT','KATALOGART'], "MANDT EQ '020' AND INAKTIV EQ ''", notification_activity_header_columns, Notification_Activity_Header) # Get all notification´s catalog types for the TQ15 print('Processing notifications catalog') #SAP_process_table('TQ15T', ['MANDT','KATALOGART','KATALOGTXT'], "MANDT EQ '020' AND SPRACHE EQ 'S'", notification_catalog_columns, Notification_Catalog) # Get all notifications' causes from the QMUR print('Processing notifications causes') #SAP_process_table('QMUR', ['MANDT','QMNUM','URTXT'], "MANDT EQ '020' AND ERDAT >= '" + date + "'", notification_cause_columns, Notification_Cause) # Get all TRANSELCA´s equipment (K: Equipos Transelca - L: MAF Transelca) print('Processing equipments') #SAP_process_table('EQUI', ['MANDT','EQUNR','EQART','HERST','TYPBZ'], "EQTYP EQ 'K' OR EQTYP EQ 'L'", equipment_columns, Equipment) # Get all equipments´ descriptions print('Processing equipments description') #SAP_process_table('EQKT', ['MANDT','EQUNR','EQKTX'], "MANDT EQ '020'", equipment_text_columns, Equipment_Text) # Get all TRANSELCA's workplaces print('Processing workplaces') #df_workplace_id = SAP_get_table('CRHD',['MANDT','OBJID','ARBPL'],"MANDT EQ '020' AND WERKS EQ 'TR01'") #df_workplace_text = SAP_get_table('CRTX',['MANDT','OBJID','KTEXT'],"MANDT EQ '020' AND SPRAS EQ 'S'") #df_workplace = pd.merge(df_workplace_id, df_workplace_text, how='inner', on=['OBJID']) # filter per values in df_system #df_workplace.rename(columns=work_center_columns,inplace=True) #session = Session() #session.execute(Work_Center.__table__.insert(),df_workplace.to_dict(orient="records")) #session.commit() #session.close() # Get all TRANSELCA's functional location print('Processing functional location') #session = Session() #print(df_notification['func_location'].unique().shape) #df_func = SAP_get_table_by_data('IFLOS',['TPLNR','STRNO', 'ACTVS','TPLKZ','ERDAT','VERSN','ERNAM'],"ACTVS EQ 'X' AND TPLNR EQ '#field#'",df_notification['func_location'].unique()) #df_func = df_func.drop_duplicates() #df_func.rename(columns=func_location_columns,inplace=True) #session.execute(Functional_Location.__table__.insert(),df_func.to_dict(orient="records")) #session.commit() #session.close() # Get System Status print('Processing System Status Header') #df_system = SAP_process_table('TJ02T', ['ISTAT','TXT04','TXT30'], "SPRAS EQ 'S'", system_columns, System_Status) df_system = pd.read_sql_table("SAP_system_status", con=engine) # Get User Status Header print('Processing User Status Header') #df_user = SAP_process_table('TJ30T', ['STSMA','ESTAT','TXT04','TXT30'], "MANDT EQ '020' AND SPRAS EQ 'S'", user_columns, User_Status) df_user = pd.read_sql_table("SAP_user_status", con=engine) # Get all statuses print(df_notification.shape) print('GET STATUSES START') #session = Session() #df_status = SAP_get_table_by_data('JEST',['MANDT','OBJNR','STAT','INACT'],"MANDT EQ '020' AND OBJNR EQ '#field#'",df_notification['obj_nr']) #df_status = df_status.drop_duplicates() #df_status.rename(columns=status_columns,inplace=True) #df_status_system = pd.merge(df_status, df_system, how='inner', on=['status_id']) # filter per values in df_system #df_status_user = pd.merge(df_status, df_user['status_id'].drop_duplicates(), how='inner', on=['status_id']) # filter per values in df_user #df_notification_2 = pd.merge(df_notification, df_type, how='inner', on=['type_n']) # filter per values in df_user #df_status_user_x = pd.merge(df_status_user, df_notification_2[['obj_nr','status_schema']], how='inner', on=['obj_nr']) #session.execute(Notification_System_Status.__table__.insert(),df_status_system.to_dict(orient="records")) #session.execute(Notification_User_Status.__table__.insert(),df_status_user_x.to_dict(orient="records")) #session.commit() #session.close() # Get log statuses print('GET LOG STATUSES START') session = Session() df_status_system = pd.read_sql_table('SAP_notification_system_status', con=engine) df_status_user_x = pd.read_sql_table('SAP_notification_user_status', con=engine) df_log_status = SAP_get_table_by_data( 'JCDS', [ 'MANDT', 'OBJNR', 'STAT', 'USNAM', 'UDATE', 'UTIME', 'TCODE', 'CDTCODE', 'INACT', 'CHIND' ], "MANDT EQ '020' AND UDATE >='" + date + "' AND OBJNR EQ '#field#'", df_notification['obj_nr']) df_log_status = df_log_status.drop_duplicates() df_log_status.rename(columns=log_columns, inplace=True) df_log_status_system = pd.merge(df_log_status, df_status_system[['obj_nr', 'status_id']], how='inner', on=['obj_nr', 'status_id']) df_log_status_user = pd.merge( df_log_status, df_status_user_x[['obj_nr', 'status_id', 'status_schema']], how='inner', on=['obj_nr', 'status_id']) # filter session.execute(Notification_Log_User.__table__.insert(), df_log_status_user.to_dict(orient="records")) session.execute(Notification_Log_System.__table__.insert(), df_log_status_system.to_dict(orient="records")) session.commit() session.close()
#%% import sqlalchemy from sqlalchemy.orm import sessionmaker import models from config import Session #%% s = Session() #%% u = models.User(lms_user='******', is_prof=True, token='alks') s.add(u) s.commit() # %% print(sess.query(models.User).all()) # %% u = s.query(models.User).first() c = models.Course(name='test', responsible=u) s.add(c) s.commit() # %% a = models.Activity(course_specific_id='at1', course=c) s.add(a) s.commit()