def set_participation_urls(self, external_data): request_id = generate_request_id() patch_data = {"data": {"auctionUrl": self.auction_url, "bids": []}} for bid in external_data["data"]["bids"]: if bid.get('status', 'active') == 'active': participation_url = self.auction_url participation_url += '/login?bidder_id={}&hash={}'.format( bid["id"], calculate_hash(bid["id"], self.hash_secret)) patch_data['data']['bids'].append({ "participationUrl": participation_url, "id": bid["id"] }) else: patch_data['data']['bids'].append({"id": bid["id"]}) LOGGER.info("Set auction and participation urls for tender {}".format( self.source_id), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SET_AUCTION_URLS }) LOGGER.info(repr(patch_data)) make_request(self.api_url + '/auction', patch_data, user=self.api_token, request_id=request_id, session=self.session)
def run_server(auction, mapping_expire_time, logger, timezone='Europe/Kiev', bids_form=BidsForm, bids_handler=BidsHandler, form_handler=form_handler, cookie_path=AUCTION_SUBPATH): app = initialize_application() add_url_rules(app) app.config.update(auction.worker_defaults) # Replace Flask custom logger app.logger_name = logger.name app._logger = logger app.config['timezone'] = tz(timezone) app.config['SESSION_COOKIE_PATH'] = '/{}/{}'.format(cookie_path, auction.context['auction_doc_id']) app.config['SESSION_COOKIE_NAME'] = 'auction_session' app.oauth = OAuth(app) app.gsm = getGlobalSiteManager() app.context = app.gsm.queryUtility(IContext) app.bids_form = bids_form app.bids_handler = bids_handler() app.form_handler = form_handler app.remote_oauth = app.oauth.remote_app( 'remote', consumer_key=app.config['OAUTH_CLIENT_ID'], consumer_secret=app.config['OAUTH_CLIENT_SECRET'], request_token_params={'scope': 'email'}, base_url=app.config['OAUTH_BASE_URL'], access_token_url=app.config['OAUTH_ACCESS_TOKEN_URL'], authorize_url=app.config['OAUTH_AUTHORIZE_URL'] ) @app.remote_oauth.tokengetter def get_oauth_token(): return session.get('remote_oauth') os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = 'true' # Start server on unused port request_id = generate_request_id() listener = get_lisener(auction.worker_defaults["STARTS_PORT"], host=auction.worker_defaults.get("WORKER_BIND_IP", "")) app.logger.info( "Start server on {0}:{1}".format(*listener.getsockname()), extra={"JOURNAL_REQUEST_ID": request_id} ) server = WSGIServer(listener, app, log=_LoggerStream(logger), handler_class=AuctionsWSGIHandler) server.start() # Set mapping mapping_value = "http://{0}:{1}/".format(*listener.getsockname()) create_mapping(auction.worker_defaults, auction.context['auction_doc_id'], mapping_value) app.logger.info("Server mapping: {} -> {}".format( auction.context['auction_doc_id'], mapping_value, mapping_expire_time ), extra={"JOURNAL_REQUEST_ID": request_id}) # Spawn events functionality spawn(push_timestamps_events, app,) spawn(check_clients, app, ) return server
def _post_results_data(self, external_data, db_document): """ :param auction_data: data from api :param auction_document: data from auction module couchdb :return: response from api where data is posted """ request_id = generate_request_id() result_bids = deepcopy(db_document["results"]) posted_result_data = deepcopy(external_data["data"]["bids"]) for index, bid_info in enumerate(external_data["data"]["bids"]): if bid_info.get('status', 'active') == 'active': auction_bid_info = get_latest_bid_for_bidder( result_bids, bid_info["id"]) posted_result_data[index]["value"][ "amount"] = auction_bid_info["amount"] posted_result_data[index]["date"] = auction_bid_info["time"] data = {'data': {'bids': posted_result_data}} LOGGER.info("Approved data: {}".format(data), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_APPROVED_DATA }) return make_request(self.api_url + '/auction', data=data, user=self.api_token, method='post', request_id=request_id, session=self.session)
def update_source_object(self, external_data, db_document, history_data): """ :param external_data: data that has been gotten from api :param db_document: data that has been gotten from auction module db :param history_data: audit of auction :return: """ request_id = generate_request_id() doc_id = self.upload_auction_history_document(history_data) results = self._post_results_data(external_data, db_document) if results: bids_information = get_active_bids(results) new_db_document = open_bidders_name(deepcopy(db_document), bids_information) if doc_id and bids_information: # TODO: open bidders names in auction protocol self.upload_auction_history_document(history_data, doc_id) return new_db_document else: LOGGER.info("Auctions results not approved", extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_AUCTION_RESULT_NOT_APPROVED })
def end_bid_stage(self, bid): request_id = generate_request_id() LOGGER.info( '---------------- End Bids Stage ----------------', extra={"JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_BID_STAGE} ) # Cleaning up preplanned jobs SCHEDULER.remove_all_jobs() # Update auction protocol auction_protocol = approve_auction_protocol_info_on_bids_stage( self.context['auction_document'], self.context['auction_protocol'] ) self.context['auction_protocol'] = auction_protocol with utils.update_auction_document(self.context, self.database) as auction_document: # Creating new stages bid_document = { 'value': {'amount': bid['amount']}, 'minimalStep': auction_document['minimalStep'] } pause, main_round = utils.prepare_auction_stages( utils.convert_datetime(bid['time']), bid_document, self.context.get('deadline'), fast_forward=self.context['worker_defaults'].get('sandbox_mode', False) ) auction_document['stages'].append(pause) if main_round: auction_document['stages'].append(main_round) # Updating current stage auction_document["current_stage"] += 1 LOGGER.info('---------------- Start stage {0} ----------------'.format( self.context['auction_document']["current_stage"]), extra={"JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_START_NEXT_STAGE} ) # Adding jobs to scheduler deadline = self.context.get('deadline') if main_round: round_start_date = utils.convert_datetime(main_round['start']) round_end_date = get_round_ending_time( round_start_date, ROUND_DURATION, deadline ) self.job_service.add_pause_job(round_start_date) self.job_service.add_ending_main_round_job(round_end_date) else: self.job_service.add_ending_main_round_job(deadline)
def wait_to_end(self): request_id = generate_request_id() self._end_auction_event.wait() LOGGER.info("Stop auction worker", extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_STOP_AUCTION_WORKER })
def _prepare_auction_protocol(self): """ prepare valid auction_protocol object using data of completed auction from database and save it to context util """ request_id = generate_request_id() auction = self.datasource.get_data(with_credentials=True) self.context['auction_data'] = auction auction_protocol = utils.prepare_auction_protocol(self.context) auction_document = self.database.get_auction_document( self.context['auction_doc_id']) self.bidders_data = [{ 'id': bid['id'], 'date': bid['date'], 'value': bid['value'], 'owner': bid.get('owner', ''), 'bidNumber': bid.get('bidNumber') } for bid in auction['data'].get('bids', []) if bid.get('status', 'active') == 'active'] bids = deepcopy(self.bidders_data) bids_info = sorting_start_bids_by_amount(bids) self._set_mapping() bids_info.sort(key=lambda b: b['bidNumber']) for index, bid in enumerate(bids_info): auction_protocol['timeline']['auction_start'][ 'initial_bids'].append({ 'bidder': bid['id'], 'date': bid['date'], 'amount': auction_document['value']['amount'], 'bid_number': self.bids_mapping[bid['id']] }) utils.approve_auction_protocol_info(auction_document, auction_protocol) utils.approve_auction_protocol_info_on_announcement( auction_document, auction_protocol) auction_protocol['timeline']['auction_start'][ 'time'] = auction_document['stages'][0]['start'] auction_protocol['timeline']['results']['time'] = auction_document[ 'stages'][-1]['start'] LOGGER.info('Audit data: \n {}'.format(yaml_dump(auction_protocol)), extra={"JOURNAL_REQUEST_ID": request_id}) LOGGER.info(auction_protocol) self.context['auction_document'] = auction_document self.context['auction_protocol'] = auction_protocol
def switch_to_next_stage(self): request_id = generate_request_id() with lock_server(self.context['server_actions']): with update_auction_document(self.context, self.database) as auction_document: auction_document["current_stage"] += 1 LOGGER.info('---------------- Start stage {0} ----------------'.format( self.context['auction_document']["current_stage"]), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_START_NEXT_STAGE })
def end_bid_stage(self, bid): request_id = generate_request_id() LOGGER.info('---------------- End Bids Stage ----------------', extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_BID_STAGE }) # Cleaning up preplanned jobs SCHEDULER.remove_all_jobs() with utils.update_auction_document(self.context, self.database) as auction_document: # Creating new stages bid_document = { 'value': { 'amount': bid['amount'] }, 'minimalStep': auction_document['minimalStep'] } pause, main_round = utils.prepare_auction_stages( utils.convert_datetime(bid['time']), bid_document) auction_document['stages'].append(pause) if main_round: auction_document['stages'].append(main_round) # Updating current stage auction_document["current_stage"] += 1 LOGGER.info('---------------- Start stage {0} ----------------'.format( self.context['auction_document']["current_stage"]), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_START_NEXT_STAGE }) # Adding jobs to scheduler deadline = set_specific_hour(datetime.now(TIMEZONE), DEADLINE_HOUR) if main_round: round_start_date = utils.convert_datetime(main_round['start']) round_end_date = get_round_ending_time(round_start_date, ROUND_DURATION, deadline) self.job_service.add_pause_job(round_start_date) self.job_service.add_ending_main_round_job(round_end_date) else: self.job_service.add_ending_main_round_job(deadline)
def end_auction(self): request_id = generate_request_id() LOGGER.info('---------------- End auction ----------------', extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_AUCTION }) LOGGER.debug("Stop server", extra={"JOURNAL_REQUEST_ID": request_id}) if self.context.get('server'): self.context['server'].stop() delete_mapping(self.context['worker_defaults'], self.context['auction_doc_id']) LOGGER.debug("Clear mapping", extra={"JOURNAL_REQUEST_ID": request_id}) stage = { 'start': datetime.now(TIMEZONE).isoformat(), 'type': PREANNOUNCEMENT, } with update_auction_document(self.context, self.database) as auction_document: auction_document["stages"].append(stage) auction_document["current_stage"] = len( auction_document["stages"]) - 1 auction_protocol = approve_auction_protocol_info_on_announcement( self.context['auction_document'], self.context['auction_protocol']) self.context['auction_protocol'] = auction_protocol LOGGER.info('Audit data: \n {}'.format( yaml_dump(self.context['auction_protocol'])), extra={"JOURNAL_REQUEST_ID": request_id}) LOGGER.info(self.context['auction_protocol']) result = self.datasource.update_source_object( self.context['auction_data'], self.context['auction_document'], self.context['auction_protocol']) if result and isinstance(result, dict): self.context['auction_document'] = result auction_end = datetime.now(TIMEZONE) stage = prepare_end_stage(auction_end) with update_auction_document(self.context, self.database) as auction_document: auction_document["stages"].append(stage) auction_document["current_stage"] = len( auction_document["stages"]) - 1 auction_document['endDate'] = auction_end.isoformat() self.context['end_auction_event'].set()
def _upload_audit_file_with_document_service(self, history_data, doc_id=None): request_id = generate_request_id() files = { 'file': ('audit_{}.yaml'.format(self.source_id), yaml_dump(history_data, default_flow_style=False)) } ds_response = make_request(self.document_service_url, files=files, method='post', user=self.ds_credential["username"], password=self.ds_credential["password"], session=self.session_ds, retry_count=3) if doc_id: method = 'put' path = self.api_url + '/documents/{}'.format(doc_id) else: method = 'post' path = self.api_url + '/documents' response = make_request(path, data=ds_response, user=self.api_token, method=method, request_id=request_id, session=self.session, retry_count=2) if response: doc_id = response["data"]['id'] LOGGER.info("Audit log approved. Document id: {}".format(doc_id), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_APPROVED }) return doc_id else: LOGGER.warning("Audit log not approved.", extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_AUDIT_LOG_NOT_APPROVED })
def get_data(self, public=True, with_credentials=False): request_id = generate_request_id() if not public: auction_data = get_tender_data(self.api_url + '/auction', user=self.api_token, request_id=request_id, session=self.session) return auction_data else: credentials = self.api_token if with_credentials else '' auction_data = get_tender_data(self.api_url, request_id=request_id, user=credentials, session=self.session) return auction_data
def start_auction(self): request_id = generate_request_id() self.auction_protocol['timeline']['auction_start'][ 'time'] = datetime.now(TIMEZONE).isoformat() LOGGER.info('---------------- Start auction ----------------', extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_FIRST_PAUSE }) self.synchronize_auction_info() with utils.lock_server( self.context['server_actions']), utils.update_auction_document( self.context, self.database) as auction_document: self._prepare_initial_bids(auction_document) auction_document["current_stage"] = 0 LOGGER.info("Switched current stage to {}".format( auction_document['current_stage']))
def save_auction_document(self, auction_document, auction_doc_id): """ Save provided auction document to couchdb database :param auction_document: auction document object :param auction_doc_id: identifier of document in database :return: """ request_id = generate_request_id() public_document = deepcopy(dict(auction_document)) retries = self.db_request_retries while retries: try: self._update_revision(public_document, auction_doc_id) response = self._db.save(public_document) if len(response) == 2: LOGGER.info( "Saved auction document {0} with rev {1}".format( *response), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_DB_SAVE_DOC }) auction_document['_rev'] = response[1] return response except HTTPError, e: LOGGER.error( "Error while save document: {}".format(e), extra={'MESSAGE_ID': AUCTION_WORKER_DB_SAVE_DOC_ERROR}) except Exception, e: errcode = e.args[0] if errcode in RETRYABLE_ERRORS: LOGGER.error( "Error while save document: {}".format(e), extra={'MESSAGE_ID': AUCTION_WORKER_DB_SAVE_DOC_ERROR}) else: LOGGER.critical( "Unhandled error: {}".format(e), extra={ 'MESSAGE_ID': AUCTION_WORKER_DB_SAVE_DOC_UNHANDLED_ERROR })
def end_auction(self): request_id = generate_request_id() LOGGER.info('---------------- End auction ----------------', extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_SERVICE_END_AUCTION }) LOGGER.debug("Stop server", extra={"JOURNAL_REQUEST_ID": request_id}) if self.context.get('server'): self.context['server'].stop() delete_mapping(self.context['worker_defaults'], self.context['auction_doc_id']) LOGGER.debug("Clear mapping", extra={"JOURNAL_REQUEST_ID": request_id}) auction_end = datetime.now(TIMEZONE) stage = prepare_end_stage(auction_end) auction_document = self.context['auction_document'] auction_document["stages"].append(stage) auction_document["current_stage"] = len( self.context['auction_document']["stages"]) - 1 # TODO: work with audit LOGGER.info('Audit data: \n {}'.format(yaml_dump( self.context['audit'])), extra={"JOURNAL_REQUEST_ID": request_id}) LOGGER.info(self.context['audit']) auction_document['endDate'] = auction_end.isoformat() result = self.datasource.update_source_object( self.context['auction_data'], auction_document, self.context['audit']) if result: if isinstance(result, dict): self.context['auction_document'] = result self.database.save_auction_document( self.context['auction_document'], self.context['auction_doc_id']) self.context['end_auction_event'].set()
def _set_auction_data(self, prepare=False): # Get auction from api and set it to _auction_data request_id = generate_request_id() if prepare: self._auction_data = self.datasource.get_data() else: self._auction_data = {'data': {}} auction_data = self.datasource.get_data(public=False) if auction_data: self._auction_data['data'].update(auction_data['data']) self.startDate = utils.convert_datetime( self._auction_data['data']['auctionPeriod']['startDate']) del auction_data else: auction_document = self.database.get_auction_document( self.context['auction_doc_id']) if auction_document: auction_document["current_stage"] = -100 self.database.save_auction_document( auction_document, self.context['auction_doc_id']) LOGGER.warning("Cancel auction: {}".format( self.context['auction_doc_id']), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_AUCTION_CANCEL }) else: LOGGER.error("Auction {} not exists".format( self.context['auction_doc_id']), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_API_AUCTION_NOT_EXIST }) self._end_auction_event.set() sys.exit(1)
def get_auction_document(self, auction_doc_id): """ Retrieve auction document from couchdb database using provided identifier :param auction_doc_id: identifier of document in couchdb database :return: auction document object from couchdb database """ request_id = generate_request_id() retries = self.db_request_retries while retries: try: public_document = self._db.get(auction_doc_id) if public_document: LOGGER.info( "Get auction document {0[_id]} with rev {0[_rev]}". format(public_document), extra={ "JOURNAL_REQUEST_ID": request_id, "MESSAGE_ID": AUCTION_WORKER_DB_GET_DOC }) return public_document except HTTPError, e: LOGGER.error( "Error while get document: {}".format(e), extra={'MESSAGE_ID': AUCTION_WORKER_DB_GET_DOC_ERROR}) except Exception, e: errcode = e.args[0] if errcode in RETRYABLE_ERRORS: LOGGER.error( "Error while get document: {}".format(e), extra={'MESSAGE_ID': AUCTION_WORKER_DB_GET_DOC_ERROR}) else: LOGGER.critical( "Unhandled error: {}".format(e), extra={ 'MESSAGE_ID': AUCTION_WORKER_DB_GET_DOC_UNHANDLED_ERROR })
def generate_request_id(self): self.request_id = generate_request_id()