def on_success(self, retval, task_id, args, kwargs): super(EmailTask, self).on_success(retval, task_id, args, kwargs) logger.info('Task Completed Successfully : Task Name - {0}, args - {1}, id - {2}'.format( self.request.task, args, self.request.id )) email = (json.loads(args[0])).get('email') user_id = session.query(User.id).filter(User.email == email).first() if not user_id: raise ValueError("Enter-Valid-Email") try: celery_task_obj = CeleryTask( user_id=int(user_id[0]), task_module=self.request.task, payload_data=email, task_status='COMPLETED', started_on=datetime.datetime.now(), task_id=self.request.id, ) session.add(celery_task_obj) session.commit() except Exception as e: logger.info("Insertion Failed with - {}".format(str(e))) session.rollback() session.remove()
def add_shipping_info(): form = AddShippingForm() if request.method == "POST": data = { 'company_id': form.company_id.data, 'shipping_time': form.shipping_time.data, 'model': form.model.data, 'quantity': form.quantity.data, 'worth': form.worth.data, 'weight': form.weight.data, 'shipping_number': form.shipping_number.data, } try: insert_shipping_info(**data) flash('You added a shipping info.') return redirect(url_for('index')) except Exception as e: redirect(url_for('add_shipping_info')) session.rollback() flash('You added a shipping info.') return redirect(url_for('index')) return render_template('add_shipping_info.html', form=form)
def on_failure(self, exc, task_id, args, kwargs, einfo): super(EmailTask, self).on_failure(exc, task_id, args, kwargs, einfo) logger.info('Task Failed : Task Name - {0}, args - {1}, id - {2}, stack_trace - {3}'.format( self.request.task, args, self.request.id, einfo )) email = (json.loads(args[0])).get('email') user_id = session.query(User.id).filter(User.email == email).first() if not user_id: raise ValueError("Enter-Valid-Email") try: celery_task_obj = CeleryTask( user_id=int(user_id[0]), task_module=self.request.task, payload_data=email, task_status='EXCEPTION', started_on=datetime.datetime.now(), task_id=self.request.id, ) session.add(celery_task_obj) session.commit() except Exception as e: logger.info("Insertion Failed with - {}".format(str(e))) session.rollback() session.remove()
def song_delete(): """ delete a song """ headers = {"Location": url_for("song_delete")} # get the data from the form data = request.json post_song = models.Song(song_name=data["song_name"],id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"],file_name=data["file_name"],id=data["file_id"]) if session.query(models.Song).get(post_song.id): #consider adding a check here for duplicate fileID too del_song=session.query(models.Song).get(post_song.id) session.delete(del_song) del_file = session.query(models.File).get(post_file.id) session.delete(del_file) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps({"status":"failed - that song doesnt exists"}),500,mimetype="application/json") return Response(json.dumps({"status":"deleted"}), 200, headers=headers, mimetype="application/json")
def song_post(): """ post a song """ headers = { "Location": url_for("song_post"), "Content-Type": "application/json" } ######### get the data from the form data = request.json post_song = models.Song( song_name=data["song_name"], id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"], file_name=data["file_name"], id=data["file_id"]) if not session.query(models.Song).get( post_song.id ): #consider adding a check here for duplicate fileID too session.add_all([post_song, post_file]) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps( {"status": "failed - that song already exists"}), 500, mimetype="application/json") return Response(stripUnicode(data), 200, headers=headers, mimetype="application/json")
def session_clear(exception): if exception and session.is_active: session.rollback() else: session.commit() session.close()
async def handle_subscribe(bot, ctx, sub_arg): session.flush() if len(sub_arg) == 0: return await bot.send(ctx, '缺少参数') if is_number(sub_arg[0]) == False and sub_arg[0] != 'all': return await bot.send(ctx, message='参数只能是编号或者all', at_sender=True) vid_list = list(map(lambda x: x[0], session.query(Vtb.vid).all())) user_id = ctx['user_id'] try: if sub_arg[0] == 'all': session.bulk_save_objects( [UserSubscribe(user_id=user_id, vid=vid) for vid in vid_list]) else: if int(sub_arg[0]) not in vid_list: return await bot.send(ctx, message='你订阅了不存在的vtb', at_sender=True) model = UserSubscribe(user_id=user_id, vid=sub_arg[0]) session.merge(model) except Exception: session.rollback() return await bot.send(ctx, message='成功订阅(如已订阅请忽略)', at_sender=True)
def auto_rollback(): for item in session.query(TodoItem).all(): session.delete(item) session.flush() engine.commit = lambda: None yield session.rollback()
async def handle_unsubscribe(bot, ctx, sub_arg): if len(sub_arg) == 0: return await bot.send(ctx, '缺少参数') if is_number(sub_arg[0]) == False and sub_arg[0] != 'all': return await bot.send(ctx, message='参数只能是编号或者all', at_sender=True) user_id = ctx['user_id'] try: if sub_arg[0] == 'all': session.query(UserSubscribe).filter( UserSubscribe.user_id == user_id).delete() else: if is_number(sub_arg[0]) == False: return await bot.send(ctx, message='参数只能是编号或者all', at_sender=True) session.query(UserSubscribe).filter( and_(UserSubscribe.user_id == user_id, UserSubscribe.vid == sub_arg[0])).delete() except Exception: session.rollback() return await bot.send(ctx, message='成功取消订阅(如未订阅请忽略)', at_sender=True)
def _all(cls, *instances): try: session.add_all(instances) session.commit() return True except: warnings.warn(f'Add fails: instances={instances}') session.rollback() return False
def session_commit(response): if response.status_code >= 400: return response try: session.commit() # pylint: disable=maybe-no-member return response except DatabaseError: session.rollback() # pylint: disable=maybe-no-member raise
def edit_item(category_name, item_title, item=None): """Edit a item.""" if request.method == "POST": if request.form["title"]: item.title = request.form["title"].strip() if not item.title: flash("Please enter a title.", "warning") categories = session.query(Category).order_by( asc(Category.name)).all() return render_template( "edit_item.html", item=item, categories=categories) if request.form["description"]: item.description = request.form["description"].strip() if request.form["category_name"]: name = request.form["category_name"].strip() category = session.query(Category).filter_by(name=name).one_or_none() if category: item.category_id = category.id else: flash("Please select a category.", "warning") categories = session.query(Category).order_by( asc(Category.name)).all() return render_template( "edit_item.html", item=item, categories=categories) try: item.update() session.add(item) session.commit() flash( "Item '{}' Successfully Edited".format(item.title), "success") return redirect(url_for( "item.show_item", category_name=item.category.name, item_title=item.title)) except exc.IntegrityError: session.rollback() flash( "You can not update this item since another item already " " exists in the database with the same title and category.", "warning", "warning") item = session.query(Item).filter_by( category_id=category.id, title=item_title).one_or_none() return redirect(url_for( "item.edit_item", category_name=item.category.name, item_title=item.title)) else: categories = session.query(Category).order_by( asc(Category.name)).all() return render_template( "edit_item.html", item=item, categories=categories)
def post(self): args = Order.parser.parse_args() data = json.loads(args['data'][0].replace( "\'", "\"")) # json 형식에 맞게 작은따옴표를 쌍따옴표로 바꾸고 dictionary화 # Orders Table # 총 금액 total_price = data['totalPrice'] try: order_time = datetime.now() # 주문 시각 order = models.Order(order_time=order_time, completed=False, total_price=total_price) # 주문 행 생성 session.add(order) session.flush() # 주문 메뉴 연결하기 위해 pk 생성 필요 # 넘겨받은 주문 메뉴 리스트 menu_list = data['menus'] for each in menu_list: # Products Table # 주문 메뉴 1개에 대한 Product Table 레코드 생성 order_menu_pk = each['menuId'] # 메뉴 pk quantity = each['quantity'] # 수량 product = models.OrderProduct(order_pk=order.order_pk, order_menu_pk=order_menu_pk, quantity=quantity) session.add(product) session.flush() # 주문 옵션을 연결하기 위해 pk 생성 필요 product_pk = product.product_pk # Order_options Table # 주문 옵션 리스트 for each_option in each['options']: option_id = each_option # 옵션 pk product_option = models.OrderOption( product_pk=product_pk, option_pk=option_id) # 주문 메뉴와 연결 session.add(product_option) session.commit() # 아무 문제 없으면 DB 반영 except Exception as err: session.rollback() # 에러 시 rollback return Response(status=400) # 에러코드 전송 session.close() # 문제 없다면 return Response(status=201) # CREATED 코드 전송
def create_user(**kwargs): """ Creates a new user by params in kwargs :param kwargs: <dict> kwargs['user_id'] and all columns in the table usertg :return: None or user object """ assert kwargs.get('user_id'), f'This func requires `user_id` key in kwargs,' \ f' because it will create user by this value. Received {kwargs}' try: user = UserTg.create(**kwargs) session.commit() return user except sa.exc.IntegrityError as _er: session.rollback() return
def create_connection(name_key, awsAccessKeyId, awsSecretKeyId): """ :name_cluster: Name new cluster :name_key: Name connection strint to aws in table connection :awsAccessKeyId: Access key id for access to aws :awsSecretKeyId: Secret key id for access to aws """ logging.info("Creating connection") try: session.add(ConnectionTable(name_key, awsAccessKeyId, awsSecretKeyId)) session.commit() except: logging.error("Error add connection entry to ConnectionTable") session.rollback() raise return Connection(name_key)
def insert_sensor_data(json, status): try: insert = Sensor(regiao=json['tag'].split('.')[1], timestamp=datetime.fromtimestamp( json['timestamp'], pytz.timezone("America/Sao_Paulo")), tag=json['tag'], valor=json['valor'], status=status) session.add(insert) session.commit() session.refresh(insert) except Exception as err: print(err) session.rollback()
def addLEIRecord(self): db_record = LegalEntity() db_record.lei = self.legal_entity.get('LEI', None) db_record.legal_name = self.legal_entity.get('LegalName', None) db_record.legal_address_line1 = self.legal_entity.get('LegalAddressLine1', None) db_record.legal_address_line2 = self.legal_entity.get('LegalAddressLine2', None) db_record.legal_address_line3 = self.legal_entity.get('LegalAddressLine3', None) db_record.legal_address_line4 = self.legal_entity.get('LegalAddressLine4', None) db_record.legal_address_city = self.legal_entity.get('LegalAddressCity', None) db_record.legal_address_region = self.legal_entity.get('LegalAddressRegion', None) db_record.legal_address_country = self.legal_entity.get('LegalAddressCountry', None) db_record.legal_address_postal_code = self.legal_entity.get('LegalAddressPostalCode', None) db_record.hq_address_line1 = self.legal_entity.get('HeadquartersAddressLine1', None) db_record.hq_address_line2 = self.legal_entity.get('HeadquartersAddressLine2', None) db_record.hq_address_line3 = self.legal_entity.get('HeadquartersAddressLine3', None) db_record.hq_address_line4 = self.legal_entity.get('HeadquartersAddressLine4', None) db_record.hq_address_city = self.legal_entity.get('HeadquartersAddressCity', None) db_record.hq_address_region = self.legal_entity.get('HeadquartersAddressRegion', None) db_record.hq_address_country = self.legal_entity.get('HeadquartersAddressCountry', None) db_record.hq_address_postal_code = self.legal_entity.get('HeadquartersAddressPostalCode', None) db_record.business_registry = self.legal_entity.get('BusinessRegister', None) db_record.business_registry_id = self.legal_entity.get('BusinessRegisterEntityID', None) db_record.legal_jurisdiction = self.legal_entity.get('LegalJurisdiction', None) db_record.legal_form = self.legal_entity.get('LegalForm', None) db_record.entity_status = self.legal_entity.get('EntityStatus', None) db_record.entity_expiration_date = self.legal_entity.get('EntityExpirationDate', None) db_record.entity_expiration_reason = self.legal_entity.get('EntityExpirationReason', None) db_record.successor_lei = self.legal_entity.get('SuccessorLEI', None) db_record.initial_registration_date = self.legal_entity.get('InitialRegistrationDate', None) db_record.last_update_date = self.legal_entity.get('LastUpdateDate', None) db_record.registration_status = self.legal_entity.get('RegistrationStatus', None) db_record.next_renewal_date = self.legal_entity.get('NextRenewalDate', None) db_record.managing_lou = self.legal_entity.get('ManagingLOU', None) db_record.validation_status = self.legal_entity.get('ValidationSources', None) self.db_record_cnt += 1 try: session.add(db_record) session.commit() except (DataError, IntegrityError) as e: self.db_error_cnt += 1 session.rollback() print "===========================" print "LEI: " + self.legal_entity.get('LEI', 'No LEI') print "DB record count: " + str(self.db_record_cnt) print "DB error count: " + str(self.db_error_cnt) print e.orig.message, e.params
def create_cluster(name_cluster, name_key, zone): """ :name_cluster: Name new cluster :name_key: Name connection strint to aws in table connection :awsAccessKeyId: Access key id for access to aws :awsSecretKeyId: Secret key id for access to aws """ logging.info("Creating cluster") try: session.add(ClusterTable(name_cluster, name_key, zone)) session.commit() except: logging.error("Error add cluster to table") session.rollback() raise return Cluster(name_cluster)
def create_user_profiles_from_invite(args): user_list = [] for key, value in ast.literal_eval(args['user_list']).items(): if validate_user_profile_by_email(email=value['email']): return None user_list.append( UserProfile(email=value['email'], role_id=value['role'], password=generate_password(), company_id=args['company_id'])) try: session.add_all(user_list) session.commit() except Exception as e: session.rollback() else: for user in user_list: send_mail(user.email, user.password) return user_list
def song_post(): """ post a song """ headers = {"Location": url_for("song_post"),"Content-Type": "application/json"} ######### get the data from the form data = request.json post_song = models.Song(song_name=data["song_name"],id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"],file_name=data["file_name"],id=data["file_id"]) if not session.query(models.Song).get(post_song.id): #consider adding a check here for duplicate fileID too session.add_all([post_song,post_file]) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps({"status":"failed - that song already exists"}),500, mimetype="application/json") return Response(stripUnicode(data), 200, headers=headers, mimetype="application/json")
def post(self): parser = reqparse.RequestParser() parser.add_argument('project_name', type=str, required=True, help='Name of project') parser.add_argument('users', type=str, action='append') args = parser.parse_args() project_name = args['project_name'] all_users = args['users'] project = Project(project_name=project_name, date_added=datetime.datetime.now()) session.add(project) all_names = get_all_usernames() user_objects = [] for name in all_users: if name in all_names: user_objects.append(get_user(name)) else: abort(404, {'message': 'Username %s does not exist' % name}) project.add_users(user_objects) try: session.commit() except Exception as project_exception: session.rollback() abort(404, {'message': str(project_exception)}) return jsonify({ 'project': 'http://%s:%s/project/%s' % (HOST, PORT, project_name), 'users': all_users })
def song_delete(): """ delete a song """ headers = {"Location": url_for("song_delete")} # get the data from the form data = request.json post_song = models.Song( song_name=data["song_name"], id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"], file_name=data["file_name"], id=data["file_id"]) if session.query(models.Song).get( post_song.id ): #consider adding a check here for duplicate fileID too del_song = session.query(models.Song).get(post_song.id) session.delete(del_song) del_file = session.query(models.File).get(post_file.id) session.delete(del_file) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps( {"status": "failed - that song doesnt exists"}), 500, mimetype="application/json") return Response(json.dumps({"status": "deleted"}), 200, headers=headers, mimetype="application/json")
def post(self): ''' Attributes: username Username, string password Password, string ''' parser = reqparse.RequestParser() parser.add_argument('username', type=str, help='Username of account') parser.add_argument('password', type=str, help='Password of account') args = parser.parse_args() username = args['username'] password = args['password'] if username in [i[0] for i in session.query(User.username).all()]: abort(404, {'message': 'Username %s already exists' % username}) if username is None or password is None: abort(404, 'Must supply both username and password') user = User(username=username) user.hash_password(password) try: session.add(user) session.commit() except Exception as e: print(str(e)) session.rollback() abort(404, {'message': str(e)}) return jsonify({ 'Username': username, 'location': 'http://%s:%s/user/%s' % (HOST, PORT, username) })
def getTask(requestToken,block=True): """ -get a task out from the queue -update the task_log entry with dequeue time -return the task to the caller-crawlnode """ try: task = tm.get(block,requestToken) log.debug('trying to update the corresponding task_log with dequeue time') bindModelToDb(workspace_id=task[1].workspace_id) session.begin() task_log=session.query(model.TaskLog).filter_by(id=task[1].id).one() task_log.dequeue_time=datetime.utcnow() session.save_or_update(task_log) session.flush() log.debug("db entry updated for, task_log_id: %s, with a dequeue time." % task[1].id) log.debug('returning the task after getting from priority queue and updating dequeue time') session.commit() return task except: email_exception(str(traceback.format_exc()),interval = 600) log.exception('failed to get a task from the priority queue or failed to update the dequeue time') log.critical('failed to get a task from the priority queue or failed to update the dequeue time') if session: session.rollback()
def sign_up(): data = request.form try: if data['username'] and data['password']: pass except KeyError: return message_('Pass and user required') query = session.query(Participant).\ filter(Participant.username == data['username']) try: query.one() return message_('Already signed up') except NoResultFound: pass hashed_password = sha256_crypt.encrypt(data['password']) instance = Participant(username=data['username'], password=hashed_password) try: session.add(instance) session.commit() except (IntegrityError, InvalidRequestError) as e: session.rollback() print(e) return message_('Error. Contact zey.') return message_('Signed up. Names will be released on the 17th.')
def _by(cls, *models, all=False, count=False, iter=False, lock=None, **condition): try: query = session.query(*models).filter_by(**condition) if lock: query = query.with_lockmode(lock) if all: return query.all() if count: return query.count() if iter: return query return query.first() except Exception as e: print(e) args = f'models={repr(models)}, all={all}, condition={condition}' warnings.warn(f'Query fails: {args}') session.rollback() return list() if all else None
def new_category_item(category_name): """Add a new item.""" if request.method == "POST": item = Item() if request.form["title"]: item.title = request.form["title"].strip() if request.form["description"]: item.description = request.form["description"].strip() if request.form["category_name"]: category_name = request.form["category_name"].strip() category = session.query(Category).filter_by( name=category_name).one_or_none() item.category_id = category.id try: item.user_id = login_session["user_id"] session.add(item) session.commit() flash("Item '{}' Successfully Added".format(item.title), "success") return redirect(url_for( "item.show_item", category_name=item.category.name, item_title=item.title)) except exc.IntegrityError: session.rollback() flash( "You can not add this item since another item already " " exists in the database with the same title and category.", "warning") return redirect(url_for( "item.new_item", category_name=category_name)) else: categories = session.query(Category).order_by( asc(Category.name)).all() return render_template( "new_item.html", categories=categories, category_name=category_name)
def wrap(*args, **kwargs): try: return func(*args, **kwargs) except BaseException as e: session.rollback() return {"message": e.args}, 500
def wrapper(*args, **kwargs): try: return fn(*args, **kwargs) except UnprocessableEntity as exc: session.rollback() return abort(422, message=str(exc)) except ValueError as exc: session.rollback() return abort(400, message=str(exc)) except NoAuthorizationError as exc: session.rollback() return abort(401, message=str(exc)) except InvalidHeaderError as exc: session.rollback() return abort(400, message=str(exc)) except WrongTokenError as exc: session.rollback() return abort(400, message=str(exc)) except ExpiredSignatureError as e: session.rollback() abort(400, message=str(e)) except Exception: session.rollback() return abort(500, message='Something Went Wrong')
def wrap(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: session.rollback() return None
def __createTasks(connector_instances, priority): """ - gets a calculated frequency from the taskfeeder - calculates a priority based on the frequency - priority=96/frequency+1 - frequency|priority - - 1 - such tasks do not come from schedule - online, one time, weekly, monthly - 96 2 - 15 mins job - | | - 1 97 - once in a day - read scheduled connector instances from db according to the frequency - creates connector instance log and conector instance data for each connector instance - create task objects - putTask - if applyKeywords ==False, put keywords=None """ try: log.debug("iterating over connector instances, to create tasks") #to give the online tasks priority = 1 for connector_instance in connector_instances: try: log.debug('trying to create a connector instance log') if not __enqueueConnector(connector_instance): continue task_identifier = __getTaskIdentifier(connector_instance.workspace_id, json.loads(connector_instance.instance_data)['uri'] , priority,json.loads(connector_instance.\ instance_data).get('instance_filter_words')) print task_identifier if task_identifier: session.begin() print connector_instance.id connector_instance_log = model.ConnectorInstanceLog() connector_instance_log.connector_instance_id=connector_instance.id print connector_instance_log session.save_or_update(connector_instance_log) session.flush() print "got a connector instance log" log.debug("got a connector instance log") log.debug("trying to recreate connector instance data from connector and instance data") #re-construct the instance_data (merge the connector data in) task= __createTask(connector_instance, connector_instance_log.id,priority) log.debug("task created") print "task created" session.commit() #NOT SURE ABOUT THE LOCATION OF COMMIT log.debug('calling tm putTask') print "calling tm puttask" __putTask(task,task_identifier) print "return from tm puttask" log.debug('return from tm putTask') else: log.info('task already enqueued , so not enqueuing again') except: print traceback.format_exc() log.exception('one of the scheduled task failed to be read') log.critical('one of the scheduled task failed to be read') session.rollback() log.debug('all scheduled tasks created, iteration done') except: log.exception('failed to read schedule') log.critical('failed to read schedule')
requirement_list.append( Requirement(technology_id=1, vacancy_id=1, experience_id=1)) requirement_list.append( Requirement(technology_id=4, vacancy_id=2, experience_id=4)) requirement_list.append( Requirement(technology_id=2, vacancy_id=3, experience_id=2)) requirement_list.append( Requirement(technology_id=3, vacancy_id=4, experience_id=3)) technology_list = list() technology_list.append(Technology(name='html', category_id=1)) technology_list.append(Technology(name='python', category_id=1)) technology_list.append(Technology(name='js', category_id=1)) technology_list.append(Technology(name='flask', category_id=2)) technology_list.append(Technology(name='react', category_id=2)) technology_list.append(Technology(name='django', category_id=2)) technology_list.append(Technology(name='docker', category_id=3)) tables_list = [ category_list, experience_list, company_range_list, profession_list, role_list, company_list, userprofile_list, purchases_list, employee_list, vacancy_list, technology_list, competence_list, requirement_list ] try: for table in tables_list: session.add_all(table) session.commit() except Exception as e: session.rollback()
def __putTask(task, task_identifier): """ takes a task object tries to get a connector class based on the instance_data of a conector instance, restore the old session_info for a specific url call 'put' on the priority queue """ try: if not task.connClass: task.connClass = __guessConnectorName(task.instance_data, task.workspace_id) try: tokens = config.get(section='Connector', option='%s_tokens' % str(task.connClass.lower())) if tokens: task.token = tuple([t.strip() for t in tokens.split(',') if t.strip()]) except NoOptionError: print "Ignoring tokens for %s" % (str(task.connClass.lower()), ) # connector = session.query(model.Connector).filter_by(name=task.connClass).first() connector=session.query(model.Connector).filter_by(name=task.connClass).first() if connector: task.instance_data.update(json.loads(connector.connector_data)) ## log.debug('got connector class: %s' % task.connClass) #TODO-HAS A FLAW - ONLY CHKING THE WORKSPACE ID RATHER THEN THE CONNECTOR INSTANCE ID and CHK FOR CHANGED KEYWORDS #I have task.connector_instance_id >> last connector_instance_log_id >> match uri >> session_info last_connector_instance_log=session.query(model.ConnectorInstanceLog).\ filter(model.ConnectorInstanceLog.id != task.connector_instance_log_id).\ filter(model.ConnectorInstanceLog.connector_instance_id == task.connector_instance_id).\ order_by(model.ConnectorInstanceLog._tid.desc()).first() if last_connector_instance_log: res=session.query(model.TaskLog).\ filter_by(uri=task.instance_data['uri'], connector_instance_log_id=last_connector_instance_log.id, delete_status=False).\ order_by(model.TaskLog.completion_time.desc()).first() if res: log.debug("putting last session info") task.session_info=pickle.loads(res.session_info) log.debug("trying to create a task log entry in the DB") #creating a task and putting the enqueue time session.begin() task_log = model.TaskLog() task_log.enqueue_time = datetime.utcnow() task_log.uri = task.instance_data['uri'] task_log.workspace_id = task.workspace_id task_log.connector_instance_log_id = task.connector_instance_log_id task_log.level = task.level task_log.session_info = pickle.dumps(task.session_info) session.save_or_update(task_log) session.flush() session.commit() #task_log_id=task_log.id log.info("db entry created for, task_log_id: %s, with enqueue time" % task_log.id) log.debug('trying to put a task in the priority queue') task.id = task_log.id task.instance_data['parent_extracted_entities'] = __putParentSessionInfo(task.connector_instance_id) log.debug('task put in the priority queue') #session.commit() task.task_identifier = task_identifier #have included this attribute which is different from id #as a part of task which will be used to put/remove a task from bdb tm.put((task.priority, task, task_identifier)) except: print traceback.format_exc() #email_exception(str(traceback.format_exc()),interval = 600) log.exception('failed to get a connector/get session info or failed to create taskLog/ put task in the priority queue') log.critical('failed to get a connector/get session info or failed to create taskLog/ put task in the priority queue') if session:session.rollback()