def orderSuccess(self, pay_order_id=0, params=None): try: pay_order_info = session.query(PayOrder).filter_by( id=pay_order_id).first() if not pay_order_info or pay_order_info.status not in [-8, -7]: return True pay_order_info.pay_sn = params[ 'pay_sn'] if params and 'pay_sn' in params else '' pay_order_info.status = 1 pay_order_info.express_status = -7 pay_order_info.updated_time = getCurrentDate() pay_order_info.pay_time = getCurrentDate() session.add(pay_order_info) pay_order_items = session.query(PayOrderItem).filter_by( pay_order_id=pay_order_id).all() for order_item in pay_order_items: tmp_model_sale_log = FoodSaleChangeLog() tmp_model_sale_log.food_id = order_item.food_id tmp_model_sale_log.quantity = order_item.quantity tmp_model_sale_log.price = order_item.price tmp_model_sale_log.member_id = order_item.member_id tmp_model_sale_log.created_time = getCurrentDate() session.add(tmp_model_sale_log) session.commit() except Exception as e: session.rollback() print(e) return False return True
def __commit(self): """ """ try: handles = [i.handle for i in self.motifs] if len(handles) != len(set(handles)): pdb.set_trace() r = session.query(NR_release).filter(NR_release.id == self.release.id).first() if not r: session.add(self.release) session.add_all(self.motifs) session.add_all(self.loops) session.add_all(self.history) session.add_all(self.intersection) session.add_all(self.release_diff) session.commit() logging.info("Successful update") except sqlalchemy.exc.SQLAlchemyError, e: logging.error("Update failed. SQLAlchemy error. Rolling back.") logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit()
def set_online(cls, jid, show=None): """ 设置成员在线 Arguments: `jid` - 成员jid `show` - stanza.show """ m = cls.get_one(jid) if not m: return False try: status = session.query(Status)\ .filter(and_(Status.resource == jid.resource, Status.member == m)).one() status.show = show except NoResultFound: status = Status(show, jid.resource) if m.status: m.status.append(status) else: m.status = [status] finally: try: session.commit() except: session.rollback() return True
def set_info(cls, jid, key, value): """ 设置成员选项 Arguments: `jid` - jid `key` - 选项键 `value` - 选项值 """ m = cls.get_one(jid) try: info = session.query(Info).filter( and_(Info.key == key, Info.member == m, Info.is_global == 0)).one() info.value = value except NoResultFound: info = Info(key, value) if m.infos: m.infos.append(info) else: m.infos = [info] finally: try: session.commit() except: session.rollback() return info
def create_or_update_existing_account(facebook_account): user = session \ .query(User) \ .filter_by(social_profile_id=facebook_account.id, social_profile_type='facebook') \ .first() if not user: try: user = User(social_profile_id=facebook_account.id, social_profile_type='facebook', first_name=facebook_account.first_name, last_name=facebook_account.last_name, profile_image=facebook_account.profile_image) session.add(user) session.flush() session.commit() except IntegrityError: session.rollback() logging.warning('User already exists in the system!') user = session \ .query(User) \ .filter_by(social_profile_id=facebook_account.id, social_profile_type='facebook') \ .first() return { 'token': savory_token_client.create_savory_token(user) }, HTTP_STATUS_OK
async def subscribe(request): schema = SubscribeSchema() data, error = schema.load(request.form) # todo 传入空的字符串不能required不能识别 if error: for key, value in error.items(): error[key] = key + ':' + ','.join(value) return resp_error(';'.join(error.values())) subscriber = Subscriber() subscriber.nick_name = data['nick_name'] subscriber.email = data['email'] subscriber.resources = data['resources'] session.add(subscriber) try: session.commit() session.close() except Exception as e: session.rollback() session.close() return resp_error(e.args) email_ = await init_email(data['nick_name'], data['email'], data['resources']) if not email_: return resp_ok('您已经成功订阅') else: return resp_error(email_)
def modify_nick(cls, jid, nick): """ 修改成员昵称 Arguments: `jid` - jid `nick` - 新昵称 Return: False // 昵称已存在 True // 更改昵称成功 """ m = cls.get_one(jid) if not m: return False if m: exists = cls.get_with_nick(nick) if exists: return False m.nick = nick m.last_change = now() cls.set_info( jid, "change_nick_times", int(cls.get_info(jid, "change_nick_times", 0).value) + 1) try: session.commit() except: session.rollback() return True
def guildwars2_filter_cm(comments, array_anet_names): for cm in comments: logging.info("comment") if cm.author.name in array_anet_names: logging.info("comment from anet: " + cm.name) row = bot_submissions() title = cm.link_title if (len(title) + len(cm.author.name) + 3) > 300: title = title[: 300 - len(cm.author.name) - 3 - 3] title += "..." row.title = title + " [" + cm.author.name + "]" row.type = "link" row.subreddit = "gw2devtrack" row.submitted = False row.content = cm.permalink.replace("//www.reddit.com", "//np.reddit.com") + "?context=1000" session.add(row) continue # DISALLOWS COMMENTS TO BE PARSED FPR GW2 LINKS if re.search("http.*?:\/\/.*?guildwars2.com\/", cm.body) != None: logging.info("comment with gw2 link: " + cm.name) all_links = re.findall("http.*?:\/\/.*?guildwars2.com\/[^ \])\s]*", cm.body) for link in all_links: if link != "": try: prepare_comment(cm.name, False, guildwars2.locate_origin(link)) except Exception as e: logging.error(e) session.rollback() else: session.commit()
def set_info(cls, jid, key, value): """ 设置成员选项 Arguments: `jid` - jid `key` - 选项键 `value` - 选项值 """ m = cls.get_one(jid) try: info = session.query(Info).filter(and_(Info.key == key, Info.member == m, Info.is_global == 0)).one() info.value = value except NoResultFound: info = Info(key, value) if m.infos: m.infos.append(info) else: m.infos = [info] finally: try: session.commit() except: session.rollback() return info
def create_or_update_account(updated_account): existing_account = session \ .query(Account) \ .filter_by(google_id=updated_account.google_id) \ .first() if not existing_account: try: logging.info("Account does not exist in the system!") session.add(updated_account) session.flush() session.commit() existing_account = updated_account except IntegrityError: session.rollback() logging.info('User already exists in the system!') existing_account = session \ .query(Account) \ .filter_by(google_id=updated_account.google_id) \ .first() return { 'token': jwt_token_utils.create_jwt_token(existing_account) }, HTTPStatus.OK
def addBasePokemonToUser(self, addRequestDict): try: userId = addRequestDict.get("userId") pokedexId = addRequestDict.get("pokedexId") pokemon = self.pokemonService.getPokemonByPokedexId(pokedexId) if pokemon is None: raise Exception("Pokemon with pokedexId:" + str(pokedexId) + " was not found!") userPokemon = UserPokemon() userPokemon.userId = userId userPokemon.pokedexId = pokedexId userPokemon.name = pokemon.name userPokemon.nickname = addRequestDict.get("nickname", pokemon.name) userPokemon.height = pokemon.height userPokemon.sprites = pokemon.sprites userPokemon.weight = pokemon.weight userPokemon.hunger = sysConst.MAX_HUNGER userPokemon.maxHp = pokemon.hp userPokemon.currentHp = pokemon.hp userPokemon.attack = pokemon.attack userPokemon.defense = pokemon.defense userPokemon.specialAttack = pokemon.specialAttack userPokemon.specialDefense = pokemon.specialDefense userPokemon.speed = pokemon.speed userPokemon.healthState = sysConst.ALIVE_POKEMON_STATE session.add(userPokemon) session.flush() session.commit() return userPokemon.id except Exception as e: session.rollback() raise e
def guildwars2_filter_cm(comments, array_anet_names): for cm in comments: logging.info("comment") if cm.author.name in array_anet_names: logging.info("comment from anet: " + cm.name) row = bot_submissions() title = cm.link_title if (len(title) + len(cm.author.name) + 3) > 300: title = title[:300 - len(cm.author.name) - 3 - 3] title += '...' row.title = title + ' [' + cm.author.name + ']' row.type = 'link' row.subreddit = 'gw2devtrack' row.submitted = False row.content = cm.permalink.replace('//www.reddit.com','//np.reddit.com') + '?context=1000' session.add(row) continue # DISALLOWS COMMENTS TO BE PARSED FPR GW2 LINKS if re.search('http.*?:\/\/.*?guildwars2.com\/', cm.body) != None: logging.info("comment with gw2 link: " + cm.name) all_links = re.findall('http.*?:\/\/.*?guildwars2.com\/[^ \])\s]*', cm.body) for link in all_links: if link != '': try: prepare_comment(cm.name, False, guildwars2.locate_origin(link)) except Exception as e: logging.error(e) session.rollback() else: session.commit()
def home(request): session.rollback() session.commit() filter_date = datetime.datetime.utcnow() - datetime.timedelta(seconds=60) wemo_device_count = session.query(func.count(distinct(WemoTimeSeries.device_name))).first()[0] wemo = session.query(WemoTimeSeries).order_by(WemoTimeSeries.datetime.desc()).limit(wemo_device_count).all() hue_device_count = session.query(func.count(distinct(HueTimeSeries.device_name))).first()[0] hue = session.query(HueTimeSeries).order_by(HueTimeSeries.datetime.desc()).limit(hue_device_count).all() nest = session.query(NestTimeSeries).order_by(NestTimeSeries.datetime.desc()).limit(1).first() apex = session.query(ApexTimeSeries).filter(ApexTimeSeries.value != None).filter(ApexTimeSeries.datetime>filter_date).all() roomba_device_count = session.query(func.count(distinct(RoombaTimeSeries.device_name))).first()[0] roomba = session.query(RoombaTimeSeries).order_by(RoombaTimeSeries.datetime.desc()).limit(roomba_device_count).all() f = Flower() flower = f.get_data(.001)[-1] return render(request, template_name='home.html', dictionary={'wemo': wemo, 'hue': hue, 'nest': nest, 'apex': apex, 'roomba': roomba, 'flower': flower, })
def websocket_to_database(): try: websocket = yield from websockets.connect("wss://ws-feed.exchange.coinbase.com") except gaierror: db_logger.error('socket.gaierror - had a problem connecting to Coinbase feed') return yield from websocket.send('{"type": "subscribe", "product_id": "BTC-USD"}') while True: message = yield from websocket.recv() if message is None: file_logger.error('Websocket message is None!') break try: message = json.loads(message) except TypeError: db_logger.error('JSON did not load, see ' + str(message)) continue if message['type'] != 'match': continue new_message = Messages() for key in message: if hasattr(new_message, key): setattr(new_message, key, message[key]) else: db_logger.error(str(key) + ' is missing, see ' + str(message)) continue try: session.add(new_message) session.commit() except IntegrityError: session.rollback() except DatabaseError: file_logger.error('Database Error') session.rollback()
def main(): global r logging.config.fileConfig(path_to_cfg) # the below only works with re2 # re.set_fallback_notification(re.FALLBACK_EXCEPTION) # which queues to check and the function to call queue_funcs = {'report': 'get_reports', 'spam': 'get_mod_queue', 'submission': 'get_new', 'comment': 'get_comments'} while True: try: r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent')) logging.info('Logging in as {0}' .format(cfg_file.get('reddit', 'username'))) r.login(cfg_file.get('reddit', 'username'), cfg_file.get('reddit', 'password')) sr_dict, cond_dict = initialize(queue_funcs.keys()) break except Exception as e: logging.error('ERROR: {0}'.format(e)) run_counter = 0 while True: run_counter += 1 try: # only check reports every 10 runs # sleep afterwards in case ^C is needed if run_counter % 10 == 0: check_queues(queue_funcs, sr_dict, cond_dict) Condition.clear_standard_cache() if process_messages(): sr_dict, cond_dict = initialize(queue_funcs.keys(), reload_mod_subs=False) logging.info('Sleeping ({0})'.format(datetime.now())) sleep(5) run_counter = 0 else: check_queues({q: queue_funcs[q] for q in queue_funcs if q != 'report'}, sr_dict, cond_dict) if process_messages(): sr_dict, cond_dict = initialize(queue_funcs.keys(), reload_mod_subs=False) except (praw.errors.ModeratorRequired, praw.errors.ModeratorOrScopeRequired, HTTPError) as e: if not isinstance(e, HTTPError) or e.response.status_code == 403: logging.info('Re-initializing due to {0}'.format(e)) sr_dict, cond_dict = initialize(queue_funcs.keys()) except KeyboardInterrupt: raise except Exception as e: logging.error('ERROR: {0}'.format(e)) session.rollback()
def addPokemon(self, newPoke): try: session.add(newPoke) session.commit() return newPoke.id except Exception as e: session.rollback() raise e
def addUser(self, newUser): try: newUser.salt = randint(0, 10000000) session.add(newUser) session.commit() return newUser.id except Exception as e: session.rollback() raise e
def submit_post(): new_post_submission = NewPostSubmissionForm() #TODO: #Add logic to determine user, and post to the appropriate page. #If no user is logged in, then it should post the guest page. #Limit number of posts by IP if not logged in. (3 posts should be enough for posting.) #Do stuff with the form data here: if new_post_submission.validate_on_submit(): print("Form submitted!") post_title = new_post_submission.title.data post_body = new_post_submission.post_body.data post_image_caption = new_post_submission.image_caption.data post_image_file_name = new_post_submission.image.data.filename #Sanitizing input. post_image_file_name = post_image_file_name.replace(" ", "") # Adding final new line to file. post_body += '\n' # TODO: # Humans separate paragraphs by 2 new lines. # Need to convert this to 1 new line after post has been submitted. post_image = new_post_submission.image.data post_image_file_name = no_duplicate_files( post_image_file_name, './static/images/post_images/') post_image.save(f'./static/images/post_images/{post_image_file_name}') new_post = Posts(title=post_title, post_body=post_body, image_file_name=post_image_file_name, image_caption=post_image_caption) try: session.add(new_post) session.commit() print("A new post has been added to db!") except Exception as error: print(error) session.rollback() new_post_id = session.query(Posts).order_by(desc(Posts.id)).first().id return redirect(f"/post/{new_post_id}") else: return render_template('submit.html', new_post_submission=new_post_submission)
def add_history(cls, jid, to_jid, content): m = cls.get_one(jid) m.last_say = now() if m.history: m.history.append(History(to_jid, content)) else: m.history = [History(to_jid, content)] try: session.commit() except: session.rollback()
def empty_status(): all_status = session.query(Status).all() for status in all_status: try: session.delete(status) except: session.rollback() try: session.commit() except: session.rollback()
def _crash(self, msg=None): """ """ if msg: logging.critical(msg) try: session.rollback() logging.critical('Session rolled back') except: logging.critical('Session rollback failed') self.send_report() sys.exit(2)
def update_peers(): peers = session.query(Peer).all() for peer in peers: try: height = get_height(peer.url) peer.height = height if height is not None: peer.updated_at = datetime.utcnow() session.add(peer) session.commit() except: print('Error {}'.format(peer.url)) session.rollback()
def __release_diff_commit(self): """ """ try: session.add_all(self.release_diff) session.query(NR_handle).delete() session.commit() logging.info("Successful update") except sqlalchemy.exc.SQLAlchemyError, e: logging.error("Update failed. SQLAlchemy error. Rolling back.") logging.error(str(e)) session.rollback() sys.exit()
async def _retry_error_pan(self): while self.fail_pan_info: pan_info = self.fail_pan_info.pop() location = Location(episode=pan_info[0], url=pan_info[1], resource=pan_info[2]) session.add(location) try: session.commit() except Exception: session.rollback() self.fail_pan_info.append(pan_info) else: session.close()
def update_resource(): try: transform = BaseTransForm() for name in transform.methods__(): args = get_func_args(name, transform) instance = session.query(TransformModel).filter_by( name=name).first() tmp_func = getattr(transform, name) for property_name, value in vars(transform).items(): if property_name.find(name) != -1: # 方法和方法的属性设置 if not instance: function_model = TransformModel( name=name, module_id=value["module_id"], args=str(args), is_primary_key=value["primary_key"], description=tmp_func.__doc__) session.add(function_model) else: session.query(TransformModel).filter_by( name=name).update({ "args": str(args), "module_id": value["module_id"], "is_primary_key": value["primary_key"], "description": tmp_func.__doc__ }) # source = '''{"connect": {"database": "db_mid_bigdata", "ip": "192.168.1.100", "password": "******", "port": 3306, # "user": "******"}, "id": 2, "sql": "select * from test_apply where WJID != 'NULL';", "type": 1}''' # target = "{'connect': {'database': 'db_mid_bigdata', 'ip': '192.168.1.100', 'password': '******'," \ # " 'port': 3306, 'user': '******'}, 'id': 2, 'table': 'wj_answer_copy1', 'type': 1}" # methods = "[{'translate': {}}]" # primary_key = "{'to_primary_field': 'ID'}" # task_instance = session.query(TaskModel).filter_by(name="档案库任务").first() # if not task_instance: # task = TaskModel(name="档案库任务",source=source,methods=methods,target=target,primary_key=primary_key) # session.add(task) try: session.commit() except Exception as e: session.rollback() traceback.print_exc() # TODO 启动日志记录 except Exception as e: traceback.print_exc() # TODO 启动日志记录
def post(self, *args, **kwargs): username = self.get_argument('username') email = self.get_argument('email') password = self.get_argument('password') user = User(username=username, email=email, password=password) session.rollback() session.add(user) session.commit() token = user.generate_confirmation_token() message = Loader('templates').load('mail_template.html').generate( title="Tornado-blog", username=username, content=token) send_email(msg_to=user.email, message=message) self.render('login.html', message='An email has been sent to you email-address!')
def set_offline(cls, jid): m = cls.get_one(jid) if not m: return False try: status = session.query(Status)\ .filter(and_(Status.resource == jid.resource, Status.member == m)).one() m.status.pop(m.status.index(status)) try: session.delete(status) session.commit() except: session.rollback() except NoResultFound: pass
async def _retry_error_page(self): while self.fail_page_info: page_info = self.fail_page_info.pop() resource = Resources(name=page_info[0], owner='电波字幕组', stype='tvshow', original=page_info[1]) session.add(resource) try: session.commit() except Exception: session.rollback() self.fail_page_info.append(page_info) else: session.close()
async def add_user(request): try: data = await request.json() u = User(data['username'], data['firstname'], data['lastname'], int(data['groupusers_id'])) u.set_password(data['password']) session.add(u) session.commit() response_obj = {'status': 'success'} return json_response(response_obj, status=200) except Exception as e: response_obj = {'status': 'failed', 'reason': str(e)} session.rollback() return json_response(response_obj, status=500)
def __store_unit_ids(self, unit_ids): """ """ logging.info('Importing unit ids') for (old_id, new_id) in unit_ids: # old id 1EKA_AU_1_B_8_C_ (pdb_id,au_ba,model,chain,seq_id,comp_id,ins_code) = old_id.split('_') pdb_file = 'pdb' if au_ba == 'AU' else 'pdb' + au_ba[2:] # BA1, BA10 underscores = new_id.count('|') if underscores == 8: # all fields present (pdb_id,model,chain,comp_id,seq_id,atom,alt_id,ins_code,sym_op) = new_id.split('|') elif underscores == 7: # default sym_op (pdb_id,model,chain,comp_id,seq_id,atom,alt_id,ins_code) = new_id.split('|') sym_op = '1_555' elif underscores == 4: # default sym_op, ins_code, alt_id, all atoms (pdb_id,model,chain,comp_id,seq_id) = new_id.split('|') sym_op = '1_555' ins_code = '' alt_id = '' atom = '' else: msg = 'Unknown id format %s' % new_id logging.critical(msg) session.rollback() self._crash(msg) U = PdbUnitIdCorrespondence(old_id = old_id, unit_id = new_id, pdb = pdb_id, model = model, chain = chain, seq_id = seq_id, atom = atom, comp_id = comp_id, alt_id = alt_id, ins_code = ins_code, sym_op = sym_op, pdb_file = pdb_file) try: session.add(U) except: pass session.commit() logging.info('Ids successfully imported')
def main(): global r logging.config.fileConfig(path_to_cfg, disable_existing_loggers=False) bot = Polarbyte(cfg_file) forceAuthAgain = False while True: try: bot.check() bot.collect() bot.process_posts() bot.submit() except praw.errors.OAuthScopeRequired as e: logging.error(e) bot.forceAuthenticationAgain = True except Exception as e: print(vars(e)) logging.error(e) session.rollback()
def main(): global r logging.config.fileConfig(path_to_cfg, disable_existing_loggers=False) bot = Polarbyte(cfg_file) forceAuthAgain = False while (True): try: bot.check() bot.collect() bot.process_posts() bot.submit() except praw.errors.OAuthScopeRequired as e: logging.error(e) bot.forceAuthenticationAgain = True except Exception as e: print(vars(e)) logging.error(e) session.rollback()
def add(cls, jid, nick = None, show = None): """ 添加一个成员 Arguments: `jid` - 成员jid `nick` - 昵称 `show` - stanze.show """ if cls.get_one(jid): return m = Member(jid, nick) m.status = [Status(show, jid.resource)] try: session.add(m) session.commit() except: session.rollback() return m
def add(cls, jid, nick=None, show=None): """ 添加一个成员 Arguments: `jid` - 成员jid `nick` - 昵称 `show` - stanze.show """ if cls.get_one(jid): return m = Member(jid, nick) m.status = [Status(show, jid.resource)] try: session.add(m) session.commit() except: session.rollback() return m
def __commit(self): try: session.add(self.release) session.add_all(self.motifs) session.add_all(self.loops) session.add_all(self.history) session.add_all(self.intersection) session.add_all(self.release_diff) session.add_all(self.loop_order) session.add_all(self.loop_positions) session.add_all(self.loop_discrepancy) session.commit() logging.info('Successful update') except sqlalchemy.exc.SQLAlchemyError, e: logging.error('Update failed. Rolling back.') logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit()
def follow_user(follower_user, followed_user): follow_relationship = FollowRelationship(follower_user_id=follower_user.id, followed_user_id=followed_user.id) try: session.add(follow_relationship) session.flush() session.commit() except IntegrityError: logging.info('Follow relationship already exists in the system!') session.rollback() session.query(FollowRelationship) \ .filter_by(follower_user_id=follower_user.id, followed_user_id=followed_user.id)\ .update({'is_deleted': False}) session.commit() return {'following': True}, HTTP_STATUS_OK
def updateUser(self, updatedUserDict): mergedUser = None userId = updatedUserDict.pop('id') if userId is None: raise Exception("Cannot update User without User ID") try: fromDbUserDict = adapters.as_dict(self.getUser(userId)) fromDbUserDict.update(updatedUserDict) print(fromDbUserDict) session.query(User).filter(User.id == userId).update( fromDbUserDict, synchronize_session=False) session.commit() mergedUser = self.getUser(userId) except Exception as e: session.rollback() raise e return mergedUser
def remove_release(self, release): """ """ try: session.query(NR_release).filter(NR_release.id == release).delete(synchronize_session="fetch") session.query(NR_class).filter(NR_class.release_id == release).delete(synchronize_session="fetch") session.query(NR_pdb).filter(NR_pdb.release_id == release).delete(synchronize_session="fetch") session.query(NR_setdiff).filter(NR_setdiff.release_id == release).delete(synchronize_session="fetch") session.query(NR_parents).filter(NR_parents.release_id == release).delete(synchronize_session="fetch") session.query(NR_release_diff).filter(NR_release_diff.nr_release_id1 == release).delete( synchronize_session="fetch" ) session.query(NR_release_diff).filter(NR_release_diff.nr_release_id2 == release).delete( synchronize_session="fetch" ) session.commit() logging.info("Release %s deleted successfully" % release) except: logging.error("Removing release %s failed" % release) session.rollback() sys.exit()
def guildwars2_filter_sm(submissions, array_anet_names): for sm in submissions: logging.info('submission') if sm.author.name in array_anet_names: logging.info("submission from anet: " + sm.name ) row = bot_submissions() title = sm.title if (len(title) + len(sm.author.name) + 3) > 300: title = title[:300 - len(sm.author.name) - 3 - 3] title += '...' row.title = title + ' [' + sm.author.name + ']' row.type = 'link' row.subreddit = 'gw2devtrack' row.submitted = False row.content = sm.permalink.replace('//www.reddit.com','//np.reddit.com') + '?context=1000' session.add(row) if re.search('http.*?:\/\/.*?guildwars2.com\/', sm.selftext) != None: logging.info("submission with gw2 link in selftext: " + sm.name) all_links = re.findall('http.*?:\/\/.*?guildwars2.com\/[^ \])\s]*', sm.selftext) for link in all_links: if link != '': try: prepare_comment(sm.name, False, guildwars2.locate_origin(link)[1]) except Exception as e: session.rollback() logging.error(e) session.commit() if re.search('http.*?:\/\/.*?guildwars2.com\/', sm.url) != None: logging.info("submission with gw2 link in url: " + sm.name) all_links = re.findall('http.*?:\/\/.*?guildwars2.com\/[^ \])]*', sm.url) for link in all_links: if link != '': try: prepare_comment(sm.name, False, guildwars2.locate_origin(link)[1]) except Exception as e: logging.error(e) session.rollback() session.commit()
def guildwars2_filter_sm(submissions, array_anet_names): for sm in submissions: logging.info("submission") if sm.author.name in array_anet_names: logging.info("submission from anet: " + sm.name) row = bot_submissions() title = sm.title if (len(title) + len(sm.author.name) + 3) > 300: title = title[: 300 - len(sm.author.name) - 3 - 3] title += "..." row.title = title + " [" + sm.author.name + "]" row.type = "link" row.subreddit = "gw2devtrack" row.submitted = False row.content = sm.permalink.replace("//www.reddit.com", "//np.reddit.com") + "?context=1000" session.add(row) if re.search("http.*?:\/\/.*?guildwars2.com\/", sm.selftext) != None: logging.info("submission with gw2 link in selftext: " + sm.name) all_links = re.findall("http.*?:\/\/.*?guildwars2.com\/[^ \])\s]*", sm.selftext) for link in all_links: if link != "": try: prepare_comment(sm.name, False, guildwars2.locate_origin(link)[1]) except Exception as e: session.rollback() logging.error(e) session.commit() if re.search("http.*?:\/\/.*?guildwars2.com\/", sm.url) != None: logging.info("submission with gw2 link in url: " + sm.name) all_links = re.findall("http.*?:\/\/.*?guildwars2.com\/[^ \])]*", sm.url) for link in all_links: if link != "": try: prepare_comment(sm.name, False, guildwars2.locate_origin(link)[1]) except Exception as e: logging.error(e) session.rollback() session.commit()
def set_global_info(key, value): """ 设置全局选项 Arguments: `key` - 选项键 `value` - 选项值 """ try: info = session.query(Info).filter(and_(Info.key == key, Info.is_global == 1)).one() info.value = value except NoResultFound: info = Info(key, value, True) try: session.add(info) except: session.rollback() finally: try: session.commit() except: session.rollback() return info
def set_global_info(key, value): """ 设置全局选项 Arguments: `key` - 选项键 `value` - 选项值 """ try: info = session.query(Info).filter( and_(Info.key == key, Info.is_global == 1)).one() info.value = value except NoResultFound: info = Info(key, value, True) try: session.add(info) except: session.rollback() finally: try: session.commit() except: session.rollback() return info
async def process_page_info(self, html): """ 返回单页上所有的数据列表 """ soup2 = self.soup(html).find_all('a', attrs={'rel': 'bookmark'}) info_list = [] for s in soup2: s = self.soup(str(s)) info_list.append( (s.span.string, s.a['href'])) # todo 放进redis数据库后再读取 resource = Resources(name=s.span.string, owner='电波字幕组', stype='tvshow', original=s.a['href']) session.add(resource) try: session.commit() info_list = [] except Exception as e: session.rollback() logging.error('插入数据库发生错误--{}'.format(str(e))) finally: session.close() return info_list
def chart(request, device=None, chart_type=None): device = urllib.unquote(device) chart_type = urllib.unquote(chart_type) # print 'device: ', device # print 'chart_type: ', chart_type session.rollback() session.commit() if not device: raise NoDeviceSpecified() if not chart_type: raise NoChartTypeSpecified() filter_date = datetime.datetime.utcnow() - datetime.timedelta(days=1) if device == 'nest': nest = session.query(NestTimeSeries).filter(NestTimeSeries.datetime>filter_date).all() df = pd.DataFrame([{'datetime': n.datetime, 'value': getattr(n, chart_type)} for n in nest]).set_index('datetime')['value'].dropna() elif device == 'apex': apex = session.query(ApexTimeSeries).filter(ApexTimeSeries.device_name==chart_type).filter(ApexTimeSeries.datetime>filter_date).all() df = pd.DataFrame([{'datetime': a.datetime, 'value': a.value} for a in apex]).set_index('datetime')['value'].dropna() elif device == 'wemo': wemo = session.query(WemoTimeSeries).filter(WemoTimeSeries.device_name==chart_type).filter(WemoTimeSeries.datetime>filter_date).all() df = pd.DataFrame([{'datetime': w.datetime, 'value': float(w.state)} for w in wemo]).set_index('datetime')['value'].dropna() elif device == 'hue': hue = session.query(HueTimeSeries).filter(HueTimeSeries.device_name==chart_type).filter(HueTimeSeries.datetime>filter_date).all() df = pd.DataFrame([{'datetime': h.datetime, 'value': float(h.state) * float(h.reachable)} for h in hue]).set_index('datetime')['value'].dropna() elif device == 'roomba': roomba = session.query(RoombaTimeSeries).filter(RoombaTimeSeries.device_name==chart_type).filter(RoombaTimeSeries.datetime>filter_date).all() df = pd.DataFrame([{'datetime': r.datetime, 'value': r.current} for r in roomba]).set_index('datetime')['value'].dropna() elif device == 'flower': f = Flower() df = pd.DataFrame(f.get_data(1)).set_index('datetime')[chart_type].dropna() return render(request, template_name='chart.html', dictionary={'chart_type': 'device' + '_' + chart_type, 'series': df})
def modify_nick(cls, jid, nick): """ 修改成员昵称 Arguments: `jid` - jid `nick` - 新昵称 Return: False // 昵称已存在 True // 更改昵称成功 """ m = cls.get_one(jid) if not m: return False if m: exists = cls.get_with_nick(nick) if exists: return False m.nick = nick m.last_change = now() cls.set_info(jid, "change_nick_times", int(cls.get_info(jid, "change_nick_times", 0).value) + 1) try: session.commit() except: session.rollback() return True
def main(): global r logging.config.fileConfig(path_to_cfg) # which queues to check and the function to call queue_funcs = {'report': 'get_reports', 'spam': 'get_mod_queue', 'submission': 'get_new', 'comment': 'get_comments'} while True: try: r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent')) logging.info('Logging in as {0}' .format(cfg_file.get('reddit', 'username'))) r.login(cfg_file.get('reddit', 'username'), cfg_file.get('reddit', 'password')) sr_dict = get_enabled_subreddits() Condition.update_standards() cond_dict = load_all_conditions(sr_dict, queue_funcs.keys()) break except Exception as e: logging.error('ERROR: {0}'.format(e)) reports_mins = int(cfg_file.get('reddit', 'reports_check_period_mins')) reports_check_period = timedelta(minutes=reports_mins) last_reports_check = time() while True: try: # if the standard conditions have changed, reinit all conditions if Condition.update_standards(): logging.info('Updating standard conditions from database') cond_dict = load_all_conditions(sr_dict, queue_funcs.keys()) # check reports if past checking period if elapsed_since(last_reports_check) > reports_check_period: last_reports_check = time() check_queues({'report': queue_funcs['report']}, sr_dict, cond_dict) check_queues({q: queue_funcs[q] for q in queue_funcs if q != 'report'}, sr_dict, cond_dict) updated_srs = process_messages() if updated_srs: if any(sr not in sr_dict for sr in updated_srs): sr_dict = get_enabled_subreddits(reload_mod_subs=True) else: sr_dict = get_enabled_subreddits(reload_mod_subs=False) for sr in updated_srs: update_conditions_for_sr(cond_dict, queue_funcs.keys(), sr_dict[sr]) except (praw.errors.ModeratorRequired, praw.errors.ModeratorOrScopeRequired, HTTPError) as e: if not isinstance(e, HTTPError) or e.response.status_code == 403: logging.info('Re-initializing due to {0}'.format(e)) sr_dict = get_enabled_subreddits() except KeyboardInterrupt: raise except Exception as e: logging.error('ERROR: {0}'.format(e)) session.rollback()
def check_items(name, items, sr_dict, cond_dict, stop_time): """Checks the items generator for any matching conditions.""" item_count = 0 comment_counts = Counter() start_time = time() seen_subs = set() logging.info('Checking new %ss', name) try: for item in items: # skip any items in /new that have been approved if name == 'submission' and item.approved_by: continue item_time = datetime.utcfromtimestamp(item.created_utc) if item_time <= stop_time: break subreddit = sr_dict[item.subreddit.display_name.lower()] conditions = cond_dict[item.subreddit.display_name.lower()][name] # don't need to check for shadowbanned unless we're in spam if name == 'spam': for condition in conditions: condition.check_shadowbanned = True else: for condition in conditions: condition.check_shadowbanned = False item_count += 1 if name == 'comment': comment_counts[item.subreddit.display_name.lower()] += 1 if subreddit.name not in seen_subs: setattr(subreddit, 'last_'+name, item_time) seen_subs.add(subreddit.name) logging.debug(' Checking item %s', get_permalink(item)) # check removal conditions, stop checking if any matched if check_conditions(subreddit, item, [c for c in conditions if c.action == 'remove']): continue # check set_flair conditions check_conditions(subreddit, item, [c for c in conditions if c.action == 'set_flair']) # check approval conditions check_conditions(subreddit, item, [c for c in conditions if c.action == 'approve']) # check alert conditions check_conditions(subreddit, item, [c for c in conditions if c.action == 'alert']) # check report conditions check_conditions(subreddit, item, [c for c in conditions if c.action == 'report']) # if doing reports, check auto-reapproval if enabled if (name == 'report' and subreddit.auto_reapprove and item.approved_by is not None): try: # see if this item has already been auto-reapproved entry = (session.query(AutoReapproval).filter( AutoReapproval.permalink == get_permalink(item)) .one()) in_db = True except NoResultFound: entry = AutoReapproval() entry.subreddit_id = subreddit.id entry.permalink = get_permalink(item) entry.original_approver = item.approved_by.name entry.total_reports = 0 entry.first_approval_time = datetime.utcnow() in_db = False if (in_db or item.approved_by.name != cfg_file.get('reddit', 'username')): item.approve() entry.total_reports += item.num_reports entry.last_approval_time = datetime.utcnow() session.add(entry) session.commit() logging.info(' Re-approved %s', entry.permalink) log_request('reapprove') session.commit() except Exception as e: logging.error(' ERROR: %s', e) session.rollback() # This isn't really correct, since we don't collect any 0 samples # but the difference won't matter much in practice for subreddit in comment_counts: prev_total = (sr_dict[subreddit].avg_comments * sr_dict[subreddit].avg_comments_samples) new_avg = ((prev_total + comment_counts[subreddit]) / (sr_dict[subreddit].avg_comments_samples + 1)) sr_dict[subreddit].avg_comments = new_avg sr_dict[subreddit].avg_comments_samples += 1 session.commit() logging.info(' Checked %s items in %s', item_count, elapsed_since(start_time)) log_request('listing', item_count / 100 + 1)
def main(): global r global search_comment_id global search_submission_id logging.config.fileConfig(path_to_cfg) r = praw.Reddit(cfg_file["reddit"]["user_agent"]) enabled_subreddits = ["Guildwars2"] while True: try: authenticate(cfg_file["oauth2"]) except Exception: logging.error("OAuth2 could not authenticate") continue else: logging.info("OAuth2 authenticated") break while True: try: subreddit_obj = session.query(subreddit).filter_by(website="reddit").first() search_comment_id = subreddit_obj.last_comment search_submission_id = subreddit_obj.last_submission except: logging.warning("search_comment_id and search_submission_id initialized with 0") search_comment_id = 0 search_submission_id = 0 submission_queue = {} comment_queue = {} try: comment_queue = load_recent_comments(enabled_subreddits) submission_queue = load_recent_submissions(enabled_subreddits) distribute_queues(comment_queue, submission_queue) last_ids = session.query(subreddit).filter_by(website="reddit").first() if last_ids == None: row = subreddit() row.website = "reddit" row.last_submission = search_submission_id row.last_comment = search_comment_id # logging.info('last submission updated: ' + str(search_submission_id) + ' - last comment updated: ' + str(search_comment_id)) session.add(row) else: session.query(subreddit).filter_by(website="reddit").update( {"last_submission": search_submission_id, "last_comment": search_comment_id} ) # logging.info('last submission updated: ' + str(search_submission_id) + ' - last comment updated: ' + str(search_comment_id)) session.commit() except KeyboardInterrupt: raise except Exception as e: logging.error(e) session.rollback() try: to_be_commented = session.query(bot_comments).filter_by(submitted=False).all() for tbcm in to_be_commented: cm_obj = r.get_info(thing_id=tbcm.thing_id) if tbcm.thing_id[:2] == "t3": try: reply_obj = cm_obj.add_comment(tbcm.content) except (praw.errors.InvalidSubmission): session.query(bot_comments).filter_by(id=tbcm.id).update( {"submitted": True, "submitted_id": "del-1"} ) logging.info(str(tbcm.id) + " in bot_comments not submitted, parent deleted") else: session.query(bot_comments).filter_by(id=tbcm.id).update( {"submitted": True, "submitted_id": reply_obj.name} ) logging.info(str(tbcm.id) + " in bot_comments submitted") elif tbcm.thing_id[:2] == "t1": try: reply_obj = cm_obj.reply(tbcm.content) except (praw.errors.InvalidComment): session.query(bot_comments).filter_by(id=tbcm.id).update( {"submitted": True, "submitted_id": "del-1"} ) logging.info(str(tbcm.id) + " in bot_comments not submitted, parent deleted") else: session.query(bot_comments).filter_by(id=tbcm.id).update( {"submitted": True, "submitted_id": reply_obj.name} ) logging.info(str(tbcm.id) + " in bot_comments submitted") elif tbcm.thing_id[:1] == "i": new_id = session.query(bot_comments).filter_by(id=tbcm.thing_id[1:]).first().submitted_id if new_id == "del-1": session.query(bot_comments).filter_by(id=tbcm.id).update( {"submitted": True, "submitted_id": "del-1"} ) if new_id != None: session.query(bot_comments).filter_by(id=tbcm.id).update({"thing_id": new_id}) session.commit() except Exception as e: logging.error(e) session.rollback() try: to_be_submitted = session.query(bot_submissions).filter_by(submitted=False).all() for tbsm in to_be_submitted: if tbsm.type == "link": r.submit(tbsm.subreddit, tbsm.title, url=tbsm.content) elif tbsm.type == "self": r.submit(tbsm.subreddit, tbsm.title, text=tbsm.content) session.query(bot_submissions).filter_by(id=tbsm.id).update({"submitted": True}) logging.info(str(tbsm.id) + " in bot_submissions submitted") session.commit() except Exception as e: logging.error(e) session.rollback()
r = session.query(NR_release).filter(NR_release.id == self.release.id).first() if not r: session.add(self.release) session.add_all(self.motifs) session.add_all(self.loops) session.add_all(self.history) session.add_all(self.intersection) session.add_all(self.release_diff) session.commit() logging.info("Successful update") except sqlalchemy.exc.SQLAlchemyError, e: logging.error("Update failed. SQLAlchemy error. Rolling back.") logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit() except sqlalchemy.exc.DBAPIError, e: logging.error("Update failed. DBAPI error. Rolling back.") logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit() except sys.exc_info()[0]: logging.error("Update failed. Rolling back.") logging.error(sys.exc_info()[0]) session.rollback() self.remove_release(self.release.id) sys.exit()