def test_get_candidate(self): service = Service.get_instance() candidate = service.get_candidate(candidate_id=1) self.assertIsNotNone(candidate, "Candidate should not be None") print(candidate.id, candidate.name) print("Evaluation size", len(candidate.evaluations)) somme = 0 for evaluation in candidate.evaluations: somme += evaluation.grade print(evaluation.grade) print("Grade sum", somme)
def parse_services(configuration): services = {} exposed_port = 5000 for name, config in configuration: service = Service(name) service.exposed_port = exposed_port exposed_port += 1 service.load_home_route() for route_ in config['routes']: service.load_route(route_) services[name] = service return services
def test_get_candidates(self): service = Service.get_instance() candidates = service.all_candidates() self.assertIsNotNone(candidates, "Candidates should not be null") for candidate in candidates: print(candidate.id, candidate.name)
def test_missing_evaluators(self): service = Service.get_instance() missing = service.get_missing_evaluators() for missin in missing: print(missin)
from flask import Flask, request from nltk.corpus import stopwords from src.service import Service app = Flask(__name__) app.config["DEBUG"] = True #stop_words = set(stopwords.words('english')) address = "mongodb://*****:*****@app.route('/api/q1', methods=['GET']) def get_task1(): if 'country' in request.args: country = str(request.args['country']) else: country = "all" return obj.task1(country) @app.route('/api/q1_spark', methods=['GET']) def get_task1_spark(): if 'country' in request.args: country = str(request.args['country']) else: country = "all" return obj.task1_spark(country) @app.route('/api/q2', methods=['GET'])
from src.service import load_etox_model from src.service import Service from src.service import CHECKPOINTS_BASEDIR, FRAMEWORK_BASEDIR import os root = os.path.dirname(os.path.realpath(__file__)) mdl = load_etox_model( os.path.join(root, "model", FRAMEWORK_BASEDIR), os.path.join(root, "model", CHECKPOINTS_BASEDIR), ) service = Service() service.pack("model", mdl) service.save()
from src.service import Service service = Service() service.pack("model", None) service.save()
def __init__(self): print('Controller class') global service service = Service()
def __init__(self): self.service = Service().instance() self.user_id = "me" self.messagesQueue = collections.deque() self.failedMessagesQueue = collections.deque()
class Processor: # Talk to google api, fetch results and decorate them def __init__(self): self.service = Service().instance() self.user_id = "me" self.messagesQueue = collections.deque() self.failedMessagesQueue = collections.deque() def get_messages(self): # Get all messages of user # Output format: # [{'id': '13c...7', 'threadId': '13c...7'}, ...] # if os.path.exists("messages.pickle"): # with open("messages.pickle", "rb") as token: # messages = pickle.load(token) # return messages # includeSpamTrash # labelIds response = self.service.users().messages().list( userId=self.user_id).execute() messages = [] est_max = response["resultSizeEstimate"] * 5 progress = Counter( f"{helpers.loader_icn} Fetching messages page ".ljust( _progressPadding, " ")) if "messages" in response: messages.extend(response["messages"]) while "nextPageToken" in response: page_token = response["nextPageToken"] response = (self.service.users().messages().list( userId=self.user_id, pageToken=page_token).execute()) messages.extend(response["messages"]) progress.next() progress.finish() return messages def process_message(self, request_id, response, exception): if exception is not None: self.failedMessagesQueue.append(exception.uri) return headers = response["payload"]["headers"] _date = next((header["value"] for header in headers if header["name"] == "Date"), None) _from = next((header["value"] for header in headers if header["name"] == "From"), None) self.messagesQueue.append({ "id": response["id"], "labels": response["labelIds"], "fields": { "from": _from, "date": _date }, }) def get_metadata(self, messages): # Get metadata for all messages: # 1. Create a batch get message request for all messages # 2. Process the returned output # # Output format: # { # 'id': '16f....427', # 'labels': ['UNREAD', 'CATEGORY_UPDATES', 'INBOX'], # 'fields': [ # {'name': 'Date', 'value': 'Tue, 24 Dec 2019 22:13:09 +0000'}, # {'name': 'From', 'value': 'Coursera <*****@*****.**>'} # ] # } # if os.path.exists("success.pickle"): # with open("success.pickle", "rb") as token: # self.messagesQueue = pickle.load(token) # return progress = IncrementalBar( f"{helpers.loader_icn} Fetching messages meta data ".ljust( _progressPadding, " "), max=len(messages), ) for messages_batch in helpers.chunks(messages, 250): # for messages_batch in [messages[0:1000]]: batch = self.service.new_batch_http_request() for message in messages_batch: msg_id = message["id"] batch.add( self.service.users().messages().get(userId=self.user_id, id=msg_id), callback=self.process_message, ) batch.execute() progress.next(len(messages_batch)) progress.finish()
from src.service import Service service = Service() service.pack('model', None) service.save()