def save_results(self): # TODO: finish save results in logger inp = input("Do you want to save as json, csv or print: ") if inp == "json": Logger().save_to_json(self.most_common_words) print("Saved to message.json file. ") elif inp == "csv": Logger().save_to_csv(self.most_common_words) print("Saved to message.csv file. ") elif inp == "print": print(self.most_common_words) else: pass
def create_test_set(test_data): try: print >> sys.stderr, "*** DOING TEST SET ***" X_test = test_data.values() Y_test = test_data.keys() return X_test, Y_test except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in create-test-set", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in create-test-set process" } return HttpResponse(json.dumps(response))
def __init__(self, no_of_testcases=100, verbose=True, nb=None, bw=None): self.logger = Logger('Comparer', 'logs\\comparer.log', is_verbose=verbose) self.load_html_structure() if nb is None: self.nb = NaiveBayes(verbose=False, test_set_count=no_of_testcases, no_of_grams=4) self.nb.ready() else: self.nb = nb self.nb.logger.is_verbose = False if bw is None: self.bw = BagOfWordSentiment(verbose=False, no_of_grams=4) self.bw.ready() else: self.bw = bw self.bw.logger.is_verbose = False self.no_of_testcases = no_of_testcases self.nb_correct, self.bw_correct, self.tb_correct = 0, 0, 0 self.nb_wrong, self.bw_wrong, self.tb_wrong = 0, 0, 0 self.nb_accuracy, self.bw_accuracy, self.tb_accuracy = 0, 0, 0 self.counter = 0 self.testcases = dict()
def do_GET(self): log = Logger() # log the received command and the client ip address and port number. log.log_info(f"{self.command} received from {self.client_address}") # parse the url url = urlparse.urlparse(self.path) # check if the client call is correct. if url.path == '/geocode': # send 200 : Ok status. self.send_response(200) self.send_header('Content-type', 'json') self.end_headers() # check if address query string is passed if urlparse.parse_qs(url.query).get('address'): # get the geocodes off the passed address address = urlparse.parse_qs(url.query)['address'][0].replace(" ", "+") self._get_geocode(address) else: # send 400 : Bad Request status and log the error. self.send_response(400) log.log_error("address parameter not passed") else: # send 404 : Not Found status and log the error. self.send_response(404) log.log_error("Unknown service requested.")
def team_ranker_ovr(data, greater_than, field, all_time_rpg, standard_deviation, average_deviation, playoff_data=None): logger = Logger(os.path.join(log_prefix, "import_data", "team_ranker_ovr.log")) logger.log("Calculating overall team ranks: " + field) start_time = time.time() final_data = {} if field != "ovrRank_ovr": for year, value in data.items(): final_data[year] = [] for ent in value: if field == "offRank_ovr": final_data[year].append([ent[0], (ent[1]/all_time_rpg) / (standard_deviation[str(year)]/average_deviation)]) else: final_data[year].append([ent[0], (ent[1]/all_time_rpg) * (standard_deviation[str(year)]/average_deviation)]) else: for year, value in data.items(): final_data[year] = [] for ent in value: for team_value in data[year]: if team_value[0] == ent[0]: playoff_bump = 1.0 for accomplishment, team_id in playoff_data.items(): if team_id == ent[0]: if accomplishment == 'ws_champ': playoff_bump += 0.005 playoff_bump += 0.005 final_data[year].append([ent[0], (ent[1]/(standard_deviation[str(year)]/average_deviation)) * playoff_bump]) write_to_file(final_data, greater_than, field) total_time = time_converter(time.time() - start_time) logger.log("\tTime = " + total_time + '\n\n')
def train_gmm(self): all_data = self.create_structure() path = os.path.dirname(os.path.abspath(__file__)) try: keys = all_data.keys() n_classes = len(np.unique(keys)) gmm_classifier = mixture.GMM(n_components=n_classes, covariance_type='full', init_params='wmc', min_covar=0.001, n_init=1, n_iter=100, params='wmc', random_state=None, thresh=None, tol=0.001) for data in all_data.values(): for val in data.values(): f1 = val.get_object(2) f2 = val.get_object(3) data = zip(f1, f2) if len(data) >= n_classes: gmm_classifier.fit(data) # save data path_trainset = os.path.join(path, self.trainset_name) with open(path_trainset, 'wb') as fid: cPickle.dump(all_data, fid) # save the classifier model_directory = os.path.join(path, self.model_name) with open(model_directory, 'wb') as fid: cPickle.dump(gmm_classifier, fid) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in GMM-train model", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in GMM-train-model process" } return HttpResponse(json.dumps(response))
def __init__(self, settings_file=None, *args, **kwargs): ''' Generate base frame and each page, bind them in a list ''' self.logger = Logger() self.baseUrl = '' # NOTE: Need to be overwritten self.destRootFolder = '' # NOTE: Need to be overwritten self.destFolder = '' self.auth = Authenticator() self.pathParser = PathParser() # load config from .flash_pvt file self.load_config_file(settings_file)
def main(): cli = CommandLineInterface(sys.argv) source_type = cli.get_source_type() part_of_speech = cli.get_part_of_speech() words_count = cli.get_words_count() if source_type == 'folder': parser = FolderParser(part_of_speech, words_count) elif source_type == 'web': parser = WebParser(part_of_speech, words_count) most_common_words = parser.parse_most_common_words( part_of_speech, words_count, directories=['target_folder']) Logger().message(debug_message=most_common_words) print("Saved into a file 'message.log'")
def get_geocodes(self, address_to_find): # log the usage of google service. log = Logger() log.log_info(f"Calling {self.geocoding_service_used} service") # create a request object req = request.Request(self.geocoding_api_url + address_to_find) try: # open the URL. with request.urlopen(req) as api_response: # get the HTTP status code from the service self.status = api_response.getcode() if api_response.status == 200: self.status_desc = "Ok" # get the response and load in json format. json_response = json.loads(api_response.read()) # assign the variables with values returned from the api call. self.latitude = json_response["results"][0]["geometry"][ "location"]["lat"] self.longitude = json_response["results"][0]["geometry"][ "location"]["lng"] self.full_address = json_response["results"][0][ "formatted_address"] # log the result. log.log_info( f"Status= {api_response.status}, lat={self.latitude}, lng={self.longitude}" ) # close the request. api_response.close() except Exception as ex: # set the status to 500 - internal server error. self.status_desc = ex self.status = 500 # log the critical error. log.log_critical(str(ex))
def __init__(self, no_of_grams=4, verbose=True, no_of_testcases=1000): self.verbose = verbose self.logger = Logger('BagOfWordSentiment', 'logs\\bag_of_words.log', is_verbose=self.verbose) self.no_of_grams = no_of_grams self.double_negations, self.double_negations_collection = set(), set() self.negations, self.negation_collection = set(), set() self.positive_words, self.positive_word_collection = set(), set() self.negative_words, self.negative_word_collection = set(), set() self.no_of_testcases = no_of_testcases self.positve_test_bag = list() self.negative_test_bag = list()
def make_ellipses(self, ax, native_f1, native_f2, predicted_f1, predicted_f2): try: print >> sys.stderr, "*** MAKE ELLIPSES ***" x1 = min(native_f1) x2 = max(native_f1) y1 = min(native_f2) y2 = max(native_f2) centroid_x = (x2 + x1) / 2 centroid_y = (y2 + y1) / 2 x_2 = math.pow((centroid_x - predicted_f1), 2) y_2 = math.pow((centroid_y - predicted_f2), 2) distance_from_centroid = math.sqrt(x_2 + y_2) ellipse = mpl.patches.Ellipse(xy=((x2 + x1) / 2, (y2 + y1) / 2), width=(x2 - x1) * 1.4, height=(y2 - y1) * 1.2) ellipse.set_edgecolor('r') ellipse.set_facecolor('none') ellipse.set_clip_box(ax.bbox) ellipse.set_alpha(0.5) ax.add_artist(ellipse) print >> sys.stderr, "*** ELLIPSES DONE ***" return distance_from_centroid except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in GMM-make ellipse", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in GMM-make-ellipse process" } return HttpResponse(json.dumps(response))
def extract_data(audio_file): print >> sys.stderr, "*** DOING EXTRACT DATA ***" # need to change speakerfile for the female gender path = os.path.dirname(os.path.abspath(__file__)) path_fave = path + "/libraries/FAVE_extract/" config_file = "--outputFormat txt --candidates --speechSoftware praat --formantPredictionMethod default --measurementPointMethod faav --nFormants 3 --minVowelDuration 0.001 --nSmoothing 12 --remeasure --vowelSystem phila --speaker " + path_fave + "/speakerinfo.speakerfile" textgrid_file_directory = path + "/data/" output_file_directory = path + "/data/" wav_file = audio_file wav_file_cleaned = wav_file.replace('.wav', '.TextGrid') (dir_name, file_name) = os.path.split(wav_file_cleaned) textgrid_file = os.path.join(textgrid_file_directory, file_name) output_file = os.path.join(output_file_directory, file_name.replace('.TextGrid', '.txt')) # debug print command = "python " + path_fave + "bin/extractFormants.py " + config_file + " " + audio_file + " " + textgrid_file + " " + output_file try: # run command proc = Popen(command, shell=True) proc.wait() except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in exctract-formants", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in extract-formants process" } return HttpResponse(json.dumps(response))
def run(host, port): log = Logger() try: # create http server. server = http.server.HTTPServer((host, port), RequestHandler) print(f"Geocoding Proxy Service - v{config.service_version}") print(f"Server Started on port :{port}") # log the start of the server. log.log_info(f"Server Started on port :{str(port)}") # listen for ever. server.serve_forever() except Exception as ex: # in case of exception, log the incident as critical. msg = f"Server Start error - {str(ex)}" log.log_critical(msg) exit(1)
def get_native_vowels(self, sentence): try: path = os.path.dirname(os.path.abspath(__file__)) label_path = path + self.native_vowels sentences_path = path + self.native_sentences s = sentence.lower() vowels = [] with open(label_path, 'rb') as vowels_file: reader = csv.reader(vowels_file, delimiter='\n') all_lines = list(reader) for line in all_lines: l = line[0].split(' ') vowels.append(l) sentences = [] with open(sentences_path, 'rb') as sentences_file: reader = csv.reader(sentences_file, delimiter='\n') all_lines = list(reader) for line in all_lines: sen = line[0] sentences.append(sen) result = dict(zip(sentences, vowels)) return result[s] except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in GMM-get-native-vowels-struct", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in GMM-get-native-vowels process" } return HttpResponse(json.dumps(response))
def __init__(self, verbose=True, training_cases=2500, testcases=500): self.verbose = verbose self.training_cases = training_cases self.testcases = testcases self.training = list() self.test = list() self.frequency = dict() self.stop_words = self.get_stop_words() self.positive_words = 0 self.negative_words = 0 self.positive_sentence_count = 0 self.negative_sentence_count = 0 self.total_sentences = 0 self.logger = Logger('NaiveBayers', 'NaiveBayers.log') self.filenames = [ 'res\\benchmark\\yelp_labelled.txt', 'res\\benchmark\\amazon_cells_labelled.txt', 'res\\benchmark\\imdb_labelled.txt' ]
def _get_geocode(self, param): log = Logger() # log the address received log.log_info(f"search for {param}") # get the geocodes using here service. service = Here() service.get_geocodes(param) # if the returned status is nnot 200: ok, call google service. if service.status != 200: service = Google() service.get_geocodes(param) # build meta data to attach to the result. meta = {'status': service.status, 'status_desc': str(service.status_desc), 'service_used': service.geocoding_service_used, 'requested_address': param, 'timestamp': service.timestamp} # compose the final results. result = {'lat': service.latitude, 'lng': service.longitude, 'meta': meta} # write the results is json format and in the encoding specified in the config file self.wfile.write(json.dumps(result).encode(config.reponse_encoding))
def logged(func=None, level=logging.DEBUG, name=None, msg=None): """ Decorator to automatically the time of execution of a function in a logfile, and write a message Parameters ---------- func : the function name level : the level of the log name : the name of the log message : specific message Examples -------- >>> @logged(level=logging.INFO) ... def toto(x, y): ... print x + y >>> toto(3, 4) 7 in toto.txt => INFO:toto:0.000001 """ if func is None: return partial(logged, level=level, name=name, msg=msg) logger = name if name else Logger(func.__name__ + ".log", logging.INFO) logmsg = msg if msg else func.__name__ @wraps(func) def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() msg = ":".join([str(func.__name__), str(end - start)]) logger.log(level, logmsg) logger.log(level, msg) return result return wrapper
def __init__(self, env, policy, algorithm, plot: bool = True): """ :param env: Contains the gym environment the simulations are performed on :type env: Environment :param policy: The policy to improve :type policy: Policy :param algorithm: The learning algorithm :type algorithm: NES or NPG :param plot: If True the results of Training and Benchmark will be plotted :type plot: bool """ self.policy = policy self.env = env self.algorithm = algorithm self.plot = plot self.logger = Logger()
def models_if_exist(self): try: path = os.path.dirname(os.path.abspath(__file__)) model_path = os.path.join(path, self.model_name) return os.path.exists(model_path) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] l = Logger() l.log_error( "Exception in GMM-check-if-models-exist", str(traceback.print_exc()) + "\n\n" + fname + " " + str(exc_tb.tb_lineno)) response = { 'Response': 'FAILED', 'Reason': "Exception in GMM-check-if-models-exist process" } return HttpResponse(json.dumps(response))
def __init__(self, verbose=True, test_set_count=500, no_of_grams=1): self.logger = Logger('NaiveBayes', 'logs\\NaiveBayes.log', is_verbose=verbose) self.verbose = verbose self.counts = dict() self.positive_bag = [] self.negative_bag = [] self.positve_test_bag = [] self.negative_test_bag = [] self.counts["test set"] = test_set_count self.counts["positive phrases"] = 0 self.counts["negative phrases"] = 0 self.counts["total sentences"] = 0 self.counts["positive sentences"] = 0 self.counts["negative sentences"] = 0 self.no_of_grams = no_of_grams self.phrase_occurrences = dict() self.phrase_probabilities = dict()
def main(): try: args = get_args() config = process_config(args.config) except: print("Missing or invalid arguments") exit(0) create_dirs([config.summary_dir, config.checkpoint_dir]) sess = tf.Session() data = DataGenerator(config) model = TemplateNet(config) logger = Logger(sess, config) trainer = TemplateTrainer(sess, model, data, config, logger) model.load(sess) trainer.train()
import os from utilities.time_converter import time_converter import urllib.request from bs4 import BeautifulSoup as bs from concurrent.futures import ThreadPoolExecutor from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from utilities.logger import Logger import datetime import time from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger logger = Logger( os.path.join(log_prefix, "import_data", "manager_table_constructor.log")) def manager_table_constructor(): driver_logger.log('\tGathering manager data (all-time)') print("Gathering manager data (all-time)") start_time = time.time() logger.log('Begin populating teams table || Timestamp: ' + datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')) table = str( bs( urllib.request.urlopen( 'https://www.baseball-reference.com/managers/'), 'html.parser')) rows = table.split('<tr') db = DatabaseConnection(sandbox_mode=True) db.write('ALTER TABLE managers DROP INDEX managerId;') db.close() with ThreadPoolExecutor(os.cpu_count()) as executor:
import os import time import datetime from utilities.logger import Logger from urllib.request import urlopen from bs4 import BeautifulSoup from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from utilities.translate_team_id import translate_team_id from utilities.time_converter import time_converter from utilities.anomaly_team import anomaly_team from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger logger = Logger( os.path.join(log_prefix, "import_data", "team_defensive_statistics.log")) def team_defensive_statistics(year): driver_logger.log("\tGathering team defensive statistics") print('Gathering team defensive statistics') start_time = time.time() logger.log('Downloading team defensive data for ' + str(year) + ' || Timestamp: ' + datetime.datetime.today().\ strftime('%Y-%m-%d %H:%M:%S')) page1 = str( BeautifulSoup( urlopen("https://www.baseball-reference.com/leagues/MLB/" + str(year) + "-standard-pitching.shtml"), "html.parser")) try: page2 = str( BeautifulSoup( urlopen("https://www.baseball-reference.com/leagues/MLB/" + str(year) + "-batting-pitching.shtml"), "html.parser"))
import jwt from utilities.dal import DbClient from utilities.logger import Logger from utilities.exceptions import * from utilities.utils import get_data_by_token db = DbClient() logger = Logger(__name__) def balance(request): ''' Get user current balance :param request: flask request object ''' try: logger.info(f'Get user current balance') token = request.headers.get('token', None) if not token: raise TokenNotExists() user_data = get_data_by_token(token) current_balance = db.get_user_balance(user_id=user_data['user_id']) return current_balance, 200 except TokenNotExists as e: logger.warning(e.__str__()) return e.__str__(), 401 except jwt.ExpiredSignatureError: logger.exception( f'Token is not authenticated! on request {request.remote_addr}')
import os from statistics import stdev from model.pitch import Pitch from utilities.logger import Logger from utilities.properties import log_prefix from controller.gauntlet import pick_from_options, pick_one_or_the_other, pick_true_or_false, get_location logger = Logger(os.path.join(log_prefix, "controller", "pitch.log")) def simulate_pitch(pitcher, batter, batter_orientation, pitcher_orientation, balls, strikes, strike_zone, pitching_year_info, driver_logger): count = str(balls) + '-' + str(strikes) driver_logger.log('\tSimulating ' + count + ' pitch') logger.log('Simulating ' + count + ' pitch') pitch = Pitch( pitcher, determine_pitch_type(pitcher, batter, pitcher_orientation, batter_orientation, count), balls, strikes) pitch_location = determine_pitch_location(pitcher, batter, pitcher_orientation, batter_orientation, count, pitch, strike_zone) ball_strike = determine_ball_strike(pitch_location, strike_zone) batter_swung = determine_if_batter_swung( pitching_year_info, get_batter_swing_rate(batter, pitcher_orientation, count, ball_strike, pitch), batter_orientation, pitcher, batter,
from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from utilities.translate_team_name import translate_team_name from utilities.logger import Logger from utilities.time_converter import time_converter import datetime import time import os from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger logger = Logger( os.path.join(log_prefix, "import_data", "populate_teams_table.log")) def populate_teams_table(year): driver_logger.log('\tPopulating teams table') print("Populating teams table") start_time = time.time() logger.log('Begin populating teams table for ' + str(year) + ' || Timestamp: ' + datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')) with open(os.path.join("..", "background", "yearTeams.txt"), 'rt') as file: db = DatabaseConnection(sandbox_mode) db.write('ALTER TABLE teams DROP INDEX teamId;') for line in file: if str(year) in line: temp_line = line.split(',')[1:-1] for team in temp_line: team_id = team.split(';')[0] db.write('insert into teams (teamId, teamName) values ("' + team_id + '", "' + translate_team_name(team_id).replace("'", "\'") +
def __init__(self, parent, controller): Frame.__init__(self, parent) self.logger = Logger() self.grid() self.controller = controller
import datetime from urllib.request import urlopen, urlretrieve from bs4 import BeautifulSoup from concurrent.futures import ThreadPoolExecutor from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from import_data.player_data.pitching.league_pitching_ratios_constructor import league_pitching_ratios_constructor from utilities.translate_team_id import translate_team_id from utilities.time_converter import time_converter from utilities.logger import Logger from utilities.anomaly_team import anomaly_team from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger data = {} pages = {} temp_pages = {} logger = Logger(os.path.join(log_prefix, "import_data", "pitchers.log")) def pitching_constructor(year): global data data = {} print('Downloading pitcher images and attributes') driver_logger.log("\tDownloading pitcher images and attributes") start_time = time.time() logger.log("Downloading pitcher " + str(year) + " data || Timestamp: " + datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')) logger.log("\tAssembling list of players") table = str(BeautifulSoup(urlopen("https://www.baseball-reference.com/leagues/MLB/" + str(year) + "-standard-pitching.shtml"), 'html.parser')).\ split('<table class="sortable stats_table" id')[1].split('<tbody>')[1].split('</tbody>')[0].split('<tr') for row in table:
import datetime from urllib.request import urlopen, urlretrieve from bs4 import BeautifulSoup from concurrent.futures import ThreadPoolExecutor from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from utilities.translate_team_id import translate_team_id from utilities.time_converter import time_converter from utilities.logger import Logger from utilities.anomaly_team import anomaly_team from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger from import_data.player_data.fielding.cathchers_defense import catcher_defense data = {} pages = {} temp_pages = {} logger = Logger(os.path.join(log_prefix, "import_data", "fielders.log")) def fielding_constructor(year): print('Downloading fielder images and attributes') driver_logger.log("\tDownloading fielder images and attributes") start_time = time.time() global data data = {} catcher_info = catcher_defense(year, logger) logger.log("Downloading fielder " + str(year) + " data || Timestamp: " + datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')) logger.log("\tAssembling list of players") table = str(BeautifulSoup(urlopen("https://www.baseball-reference.com/leagues/MLB/" + str(year) + "-standard-fielding.shtml"), 'html.parser')).\ split('<table class="sortable stats_table" id')[1].split('<tbody>')[1].split('</tbody>')[0].split('<tr')
import os from utilities.database.wrappers.baseball_data_connection import DatabaseConnection from import_data.consolidata.team_roster_info import consolidate_hitter_spots, consolidate_player_positions from import_data.consolidata.write_consolidated_data import write_roster_info from import_data.consolidata.player_stats import consolidate_player_stats from utilities.logger import Logger from utilities.properties import sandbox_mode, log_prefix, import_driver_logger as driver_logger from utilities.time_converter import time_converter import datetime import time logger = Logger(os.path.join(log_prefix, "import_data", "consolidata.log")) def consolidate_data(year): driver_logger.log("\tConsolidating data") print("Consolidating data") start_time = time.time() logger.log("Consolidating team data || Timestamp: " + datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')) db = DatabaseConnection(sandbox_mode) for ty_uid in db.read( 'select ty_uniqueidentifier from team_years where year = ' + str(year) + ';'): team_start_time = time.time() logger.log('\t' + db.read( 'select teamId from team_years where ty_uniqueidentifier = ' + str(ty_uid[0]) + ';')[0][0]) write_roster_info( ty_uid[0], { 'hitter_spots':