def PostEmployee(self, s): e = EMPLOYEE() Json.Fill(e, s) e.rvv = self.GetRvv() conn.execute(Json.PostSQL(e)) conn.commit() return 'OK'
def Authenticate(self): if not 'Authorization' in self.headers: raise Json.RestException(401, 'No authorization header') h = self.headers['Authorization'] d = str(base64.b64decode(h[6:len(h)]), 'utf-8') s = d.split(':') c = conn.cursor() r = c.execute("select count(*) from user where uname='" + s[0] + "' and password='******'").fetchone() if int(r[0]) != 1: raise Json.RestException(401, 'Not authenticated') return
def PostPost(self, s): p1 = POST1() Json.Fill(p1, s) Json.CheckRvv(EMPLOYEE(), p1.emp_rvv, conn) p = POST() p.empid = p1.empid p.efrom = p1.efrom p.grade = p1.grade p.manager = p1.manager p.rvv = self.GetRvv() conn.execute(Json.PostSQL(p)) conn.commit() return 'OK'
def PostHoliday(self, s): h1 = HOLIDAY1() Json.Fill(h1, s) Json.CheckRvv(EMPLOYEE(), h1.emp_rvv, conn) Json.CheckRvv(POST(), h1.post_rvv, conn) h = HOLIDAY() h.empid = h1.empid h.hfrom = h1.hfrom h.hto = h1.hto h.rvv = self.GetRvv() conn.execute(Json.PostSQL(h)) conn.commit() return 'OK'
def UpdateStatus(id,status): """ >>> UpdateStatus(123456789,"TestStatus") TestName status is update """ data=Json.FetchJson() for c in data: if (str(id)==str(c['ID'])): print(c['Name']+" status is update") c["Status"]=status Json.SaveJason(data)
def get(self): app_id = self.request.get('appid') params = self.request.arguments() # gets all the url parameters if app_id: if self.request.get('key'):#check whether the request key is available key = self.request.get('key') app_secret = dataprovider.getAppSecret({"appid":app_id}) urlstr= '' urldict = {} if app_secret:#check whether a matching app secret for the token was found for param in params: if param != "key": urlstr += "%s=%s&" %(param, self.request.get(param)) urldict[param] = self.request.get(param) testKey = hmac.new(app_secret, urlstr.rstrip('&'), hashlib.sha256).hexdigest() # check to see the key(url parameters hmac'd using appsecret) passed by the user matches the testKey a value derived by hmacing the url parameters using the app secret strored in the datastore if testKey == key: #the testKey and key match token = dataprovider.getToken(app_id) response = {"status":"SUCCESS", "token":str(token)} else:#the keys do not match meaning that this is an invalid request response = { #the response when no key is available "status":"VERIFICATION_FAILED", "message":"Keys did not match. Confirm that you have the correct appsecret." } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else:#if there was no matching appsecret was found response = { #the response when no key is available "status":"VERIFICATION_FAILED", "message":"The appid supplied did not match any authorized apps" } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else:#if no matching app secret for the token was found, respond with an error response = { #the response when no key is available "status":"NO_KEY", "message":"No key passed. Request can not be verified" } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else: response = { #the response when no key is available "status":"NO_APPID", "message":"Missing appid. Please confirm that you have supplied the appid" } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response))
def CreateCV(name, edu, pic, id, exp, sLink='Unavailable'): data = Json.FetchJson() D = { 'Notes': "", 'Status': "Under review", 'Picture': CloudinaryP.upload(pic, id), 'Experience': exp, 'Education': edu, 'ID': id, 'Name': name, 'Social account': sLink } data.append(D) Json.SaveJason(data)
def main(wish): # variable for checking type of errors error_code = 0 answer = "" result = False all_data = dict() try: # load information from json-file all_data, error_code = Json.reading_from_the_data_file() except: error_code = 2 if error_code == 0: answer, result, error_code = Logic.core(all_data, wish) if result == True: # management command: switch off if answer == "Ok, bye": subprocess.call(["shutdown", "/s"]) # management command: restart elif answer == "Ok, see you soon": subprocess.call(["shutdown", "/r"]) elif answer == "Ok, here it is": if platform.system() == "Windows": os.startfile("C:") elif platform.system() == "Darwin": subprocess.Popen(["open", "C:"]) else: subprocess.Popen(["xdg-open", "C:"]) else: # show answer to user Output.positive_output(answer) # if answer wasn't given... elif result == False: # show sorry-message to user Output.negative_output() # write wish to the list for future answers error_code = Json.writing_to_the_data_file(wish) return answer, result, error_code
def AddNotes(id,note): """ >>> AddNotes(123456789,"TestNotes") Note add to TestName """ data=Json.FetchJson() for c in data: if (str(id)==str(c['ID'])): c["Notes"]=note print ("Note add to "+c['Name']) Json.SaveJason(data)
def GetPost(self, p): if (len(p) == 5): return (POST._FindNth(int(p[3]), int(p[4]), conn)) elif (len(p) == 4): return (POST._GetAllWith(int(p[3]), conn)) elif (len(p) == 3): return Json.GetAll(POST(), conn)
def Serach(value): """ >>> Serach(123456789) Name: TestName ID: 123456789 Education: TestEducation Experience: TestExp Picture: TestPic Status: TestStatus Note: TestNotes <BLANKLINE> <BLANKLINE> """ data=Json.FetchJson() for c in data: if (str(value)==str(c['ID'])): print('Name: {}\nID: {}\nEducation: {}\nExperience: {}\nPicture: {}\nStatus: {}'.format(c['Name'], str(c['ID']),c['Education'],c['Experience'],c['Picture'],c['Status'],end='\n')) if c['Notes']: print ("Note:",c['Notes']) print('\n') if (str(value)==str(c['Education'])): print('Name: {}\nID: {}\nEducation: {}\nExperience: {}\nPicture: {}\nStatus: {}'.format(c['Name'], str(c['ID']),c['Education'],c['Experience'],c['Picture'],c['Status'], end='\n')) if c['Notes']: print ("Note:",c['Notes']) print('\n') if (str(value)==str(c['Experience'])): print('Name: {}\nID: {}\nEducation: {}\nExperience: {}\nPicture: {}\nStatus:{}'.format(c['Name'], str(c['ID']),c['Education'],c['Experience'],c['Picture'],c['Status'], end='\n')) if c['Notes']: print ("Note:",c['Notes']) print('\n')
def GetHoliday(self, p): if (len(p) == 5): return (HOLIDAY._FindNth(int(p[3]), int(p[4]), conn)) elif (len(p) == 4): return (HOLIDAY._GetAllWith(int(p[3]), conn)) elif (len(p) == 3): return Json.GetAll(HOLIDAY(), conn)
def _Find(id, conn): c = conn.cursor() x = c.execute('SELECT * FROM EMPLOYEE WHERE id=' + str(id)) e = EMPLOYEE() Json.GetOne(x, e, c.fetchone()) c.close() return e
def _Find(EMPLOYEE, id, conn): c = conn.cursor() x = c.execute('select * from EMPLOYEE where id=' + str(id)) e = EMPLOYEE() Json.GetOne(x, e, c.fetchone()) c.close() return e
def _FindNth(POST, id, n, conn): c = conn.cursor() x = c.execute('select * from POST where empid=' + str(id)) h = HOLIDAY() if n > 1: for i in range(1, n): c.fetchone() text = Json.GetOne(x, h, c.fetchone()) c.close() return text
def sftp_upload(path, filename): logger_4.info('Sending file ...') host, username, password = Json.get_config_sftp() port = 22 transport = paramiko.Transport((host, port)) transport.connect(username=username, password=password) sftp = paramiko.SFTPClient.from_transport(transport) sftp.put(path, filename) # Upload file to root FTP folder sftp.close() transport.close() logger_4.info('File sent')
def main(): clear_spisok = [] try: vk_session = vk_api.VkApi(token=Config.vk_token) vk = vk_session.get_api() response = vk.newsfeed.search(q='#Python', count=200) s = [] for i in response['items']: s.append('https://vk.com/wall' + str(i['owner_id']) + '_' + str(i['id'])) Json.encode_json('Spisok_postov', s) except (UnicodeEncodeError): pass return s
def test_simple_object3(): serializer = Json.Json() strdata = serializer.dumps(sum) print(strdata, 'hi') fromstrdata = serializer.loads(strdata) print(fromstrdata) import inspect print(inspect.getsource(fromstrdata)) print(fromstrdata(5)) print(fromstrdata, '3 usual') print(dir(fromstrdata), '5 dir') print(dir(fromstrdata), '5 dir') print(dir(sum), '6 dir') assert roune._function_equals(sum, fromstrdata)
def create_data(locations): """Creates the data.""" data = {} data['API_key'] = Json.get_config_api_key() string = '' temp = [] for x in locations: string = '' for y in x: string = string + str(y) + "+" string = string[:-1] temp.append(string) data['addresses'] = temp return data
def EditCV(id, value, type='n'): """" >>> EditCV(123456789,"TestName",'n') The Name is update >>> EditCV(123456789,"TestEducation",'e') The Education is update >>> EditCV(123456789,"TestExp",'exp') The Experience is update """ data = Json.FetchJson() for c in data: if (str(id) == str(c['ID'])): if (type.lower() == 'n'): c['Name'] = value print("The Name is update") elif (type.lower() == 'e'): c['Education'] = value print("The Education is update") elif (type.lower() == 'exp'): c['Experience'] = value print("The Experience is update") Json.SaveJason(data)
def run_thread(day, schools): for i in schools: group = one.build_time_pool(day, i) for y in group: locations = one.select_all_addresses(i, day, y) vehicle_data, location_data, driver_indices, passenger_indices, drivers, passengers = one.locations( i, day, y) # potential bug when requests to google distance matrix api are synchronized (DDoS attack) # https://developers.google.com/maps/documentation/distance-matrix/web-service-best-practices#synchronized-requests matrix, time_matrix = createDistanceMatrix.main( one.select_all_addresses(i, day, y)) routes, dropped_nodes, durations = Algorithm.main( vehicle_data, location_data, matrix, time_matrix) routes_temp = copy.deepcopy(routes) urls = url_constructer.construct_route_url(locations, routes_temp) for u in urls: print(u) temp1, temp2 = Json.build_list(urls, routes, dropped_nodes, driver_indices, passenger_indices, drivers, passengers, day, y, durations) filepath, filename = Json.fill_data_matrix(i, day, y, temp1, temp2) Communication.sftp_upload(filepath, filename) sleep(120)
def deserialize(self, format, filepath): self.deserialize = None if format == 'JSON': self.deserialize = Json.Json() elif format == 'TOML': self.deserialize = Toml.TomlSerializer() elif format == 'YAML': self.deserialize = Yaml.Yaml() elif format == 'PICLE': self.deserialize = Pickle.Pickle() with open(filepath, 'rb') as f: return self.deserialize.load(f) else: logging.error("Unsuported type from dump ") exit() return self.deserialize.load(filepath)
def serialize(self, obj, format, filepath): self.serialize = None print(format) if format == 'JSON': self.serialize = Json.Json() elif format == 'TOML': self.serialize = Toml.TomlSerializer() elif format == 'YAML': self.serialize = Yaml.Yaml() elif format == 'PICLE': self.serialize = Pickle.Pickle() with open(filepath, 'wb') as f: return self.serialize.dump(obj, f) else: logging.error("Unsuported type from load ") exit() return self.serialize.dump(obj, filepath)
class Yaml: # According to yaml spec json is valid yaml, so we just can use json # https://yaml.org/spec/1.2/spec.html json_parsers = Json.Json() def dump(self,string,filepath): return self.json_parsers.dump(string,filepath) def dumps(self,string): return self.json_parsers.dumps(string) def loads(self,string, complex_convert=True): return self.json_parsers.loads(string, complex_convert) def load(self,filepath, complex_convert=True): return self.json_parsers.load(filepath, complex_convert)
def recommend(nazvanie_filma): spisok = Json.decode_json('res') recommendation = {} film_genre = [] if nazvanie_filma in spisok.keys(): for gnr in spisok[nazvanie_filma]['genres']: film_genre.append(gnr['name']) else: return {} for film in spisok.keys(): if film == nazvanie_filma: next for genre in spisok[film]['genres']: if genre['name'] in film_genre: recommendation[film] = spisok[film] break return recommendation
def CheckMyStatus(id): """ >>> CheckMyStatus(123456789) Name: TestName ID: 123456789 Education: TestEducation Experience: TestExp Picture: TestPic Status: TestStatus <BLANKLINE> """ data = Json.FetchJson() for c in data: if (str(id) == str(c['ID'])): print( 'Name: {}\nID: {}\nEducation: {}\nExperience: {}\nPicture: {}\nStatus: {}\n' .format(c['Name'], str(c['ID']), c['Education'], c['Experience'], c['Picture'], c['Status']))
def _publishSwarmCommand(self, json_msg): '''Method for publishing a SwarmCommand ROS message''' msg = Json.json2SwarmCommand(json_msg) self._swarmCommandPublisher.publish(msg)
def _publishOdometry(self, json_msg): '''Helper method to publish decoded JSON messages''' odometryMsg = Json.json2SwarmOdometry(json_msg) self._odometryPublisher.publish(odometryMsg)
def _publishStatus(self, json_msg): '''Helper method to publish decoded JSON messages''' uavStatusMsg = Json.json2SwarmStatus(json_msg) self._statusPublisher.publish(uavStatusMsg)
def _readHeader(self, msg): return Json.json2Header(msg['header'])
class SQLHandler: # gets sensitive login data from a json config file user, password, host, database = Json.get_config_db() sql = None # initiates the class by connecting to the db def __init__(self): self.connect() # connects to the mysql db def connect(self): try: self.sql = conn.connection.MySQLConnection(user=self.user, password=self.password, host=self.host, database=self.database) logger_5.info("Connected successfully") except conn.Error: logger_5.info("Connection failed") # prints the result of a select query def select(self, query): cursor = self.sql.cursor() cursor.execute(query) result = cursor.fetchall() logger_5.info(result) def select_capacities(self, school_id, day, time): capacities = [] cursor = self.sql.cursor() cursor.execute( "SELECT seats FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) result = cursor.fetchall() for i in result: capacities.append(int(i[0])) return capacities # returns a list of lists of the result of a query, # where a row is represented as one list and the result is the list containing the row lists def select_all_locations(self, query): locations = [] cursor = self.sql.cursor() cursor.execute(query) result = cursor.fetchall() for x in result: locations.append(list(x)) # print(locations) return locations # returns all addresses(school, drivers and passengers) for the given parameters(school_id, day, time) in a list of lists # in the format: output = school address + drivers addresses + passengers address def select_all_addresses(self, school_id, day, time): passengers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) drivers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) depot = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM schools WHERE id={}" .format(school_id)) locations = depot + drivers + passengers return locations # parameters day, school_id # format ("monday", int) # returns times (list(str)) def build_time_pool(self, day, school_id): pool = [] cursor = self.sql.cursor() cursor.execute( "SELECT DISTINCT {} FROM timetable, users WHERE users.school_id={} AND users.id=timetable.id GROUP BY users.id" .format(day, school_id)) result = cursor.fetchall() for x in result: pool.append(Time.time_for_timedelta(x[0])) return pool # parameters timezone # format (int) # returns school indices (list(int)) def build_school_pool(self, timezone): pool = [] cursor = self.sql.cursor() cursor.execute( "SELECT DISTINCT id FROM schools WHERE timezone={}".format( timezone)) result = cursor.fetchall() for x in result: pool.append(int(x[0])) return pool # parameters None # returns timezones (list(int)) def build_timezone_pool(self): pool = [] cursor = self.sql.cursor() cursor.execute("SELECT DISTINCT timezone FROM schools") result = cursor.fetchall() for x in result: pool.append(x[0]) return pool # parameters school_id, day, time # format (int, "monday", "080000") # returns driver indices(list(int)) def get_driver_indices(self, school_id, day, time): pool = [] cursor = self.sql.cursor() cursor.execute( "SELECT timetable.id FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) result = cursor.fetchall() for x in result: pool.append(int(x[0])) return pool # parameters school_id, day, time # format (int, "monday", "080000") # returns passenger indices(list(int)) def get_passenger_indices(self, school_id, day, time): pool = [] cursor = self.sql.cursor() cursor.execute( "SELECT timetable.id FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) result = cursor.fetchall() for x in result: pool.append(int(x[0])) return pool # parameters school_id, day, time # format (int, "monday", "080000") # returns passenger indices(list(int)) and driver indices(list(int)) def get_user_indices(self, school_id, day, time): return self.get_driver_indices(school_id, day, time), self.get_passenger_indices( school_id, day, time) def driver_name(self, driver_id): cursor = self.sql.cursor() cursor.execute( "SELECT forename, name FROM users WHERE id={} AND seats IS NOT NULL" .format(driver_id)) result = cursor.fetchone() return result[0], result[1] # parameters school_id, day, time # format (int, "monday", "080000") # returns vehicle_data(dict), location_data(dict), drivers relative indices(list(int)), # passengers relative indices(list(int)), drivers real indices(list(int)) and passengers real indices(list(int)) def locations(self, school_id, day, time): depot = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM schools WHERE id={}" .format(school_id)) drivers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NOT NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) passengers = self.select_all_locations( "SELECT street, streetNumber, locality, region, zipcode, country FROM users, timetable WHERE {}=\"{}\" AND school_id={} AND users.id=timetable.id AND timetable.status=1 AND seats IS NULL GROUP BY users.id" .format(day, Time.new_time_string_for_time(time), school_id)) depot_index = [] drivers_indices = [] passengers_indices = [] depot_index.append(0) for x in range(1, len(drivers) + len(depot)): drivers_indices.append(x) for z in range((len(drivers) + len(depot)), (len(passengers) + len(drivers) + len(depot))): passengers_indices.append(z) logger_5.info(depot_index) logger_5.info(drivers_indices) logger_5.info(passengers_indices) locations_indices = depot_index + drivers_indices + passengers_indices logger_5.info(locations_indices) location_data = { 'num': len(locations_indices), 'starts': drivers_indices } vehicle_data = { 'num': len(drivers_indices), 'capacities': self.select_capacities(school_id, day, time) } temp1, temp2 = self.get_user_indices(school_id, day, time) return vehicle_data, location_data, drivers_indices, passengers_indices, temp1, temp2 # closes the connection to the db def close(self): self.sql.close()
def logsreg(loadTrainingFilePath, sc): # Load training data in LIBSVM format loadTrainingFilePath = '/Users/Jacob/repository/SparkService/data/sample_libsvm_data.txt' data = MLUtils.loadLibSVMFile(sc, loadTrainingFilePath) # Split data into training (60%) and test (40%) traindata, testdata = data.randomSplit([0.6, 0.4], seed = 11L) traindata.cache() # Load testing data in LIBSVM format #testdata = MLUtils.loadLibSVMFile(sc, loadTestingFilePath) # Run training algorithm to build the model model = LogisticRegressionWithLBFGS.train(traindata, numClasses=3) # Compute raw scores on the test set predictionAndLabels = testdata.map(lambda lp: (float(model.predict(lp.features)), lp.label)) Json.generateJson("LogisticRegression", "12345678", traindata, predictionAndLabels); print 'Completed.' # Instantiate metrics object # metrics = MulticlassMetrics(predictionAndLabels) # # Overall statistics # precision = metrics.precision() # recall = metrics.recall() # f1Score = metrics.fMeasure() # #confusion_matrix = metrics.confusionMatrix().toArray() # print("Summary Stats") # print("Precision = %s" % precision) # print("Recall = %s" % recall) # print("F1 Score = %s" % f1Score) # # Statistics by class # labels = traindata.map(lambda lp: lp.label).distinct().collect() # for label in sorted(labels): # print("Class %s precision = %s" % (label, metrics.precision(label))) # print("Class %s recall = %s" % (label, metrics.recall(label))) # print("Class %s F1 Measure = %s" % (label, metrics.fMeasure(label, beta=1.0))) # # Weighted stats # print("Weighted recall = %s" % metrics.weightedRecall) # print("Weighted precision = %s" % metrics.weightedPrecision) # print("Weighted F(1) Score = %s" % metrics.weightedFMeasure()) # print("Weighted F(0.5) Score = %s" % metrics.weightedFMeasure(beta=0.5)) # print("Weighted false positive rate = %s" % metrics.weightedFalsePositiveRate) # #return model parameters # res = [('1','Yes','TP Rate', metrics.truePositiveRate(0.0)), # ('2','Yes','FP Rate', metrics.falsePositiveRate(0.0)), # ('3','Yes','Precision', metrics.precision(0.0)), # ('4','Yes','Recall', metrics.recall(0.0)), # ('5','Yes','F-Measure', metrics.fMeasure(0.0, beta=1.0)), # ('1','Yes','TP Rate', metrics.truePositiveRate(1.0)), # ('2','Yes','FP Rate', metrics.falsePositiveRate(1.0)), # ('3','Yes','Precision', metrics.precision(1.0)), # ('4','Yes','Recall', metrics.recall(1.0)), # ('5','Yes','F-Measure', metrics.fMeasure(1.0, beta=1.0)), # ('1','Yes','TP Rate', metrics.truePositiveRate(2.0)), # ('2','Yes','FP Rate', metrics.falsePositiveRate(2.0)), # ('3','Yes','Precision', metrics.precision(2.0)), # ('4','Yes','Recall', metrics.recall(2.0)), # ('5','Yes','F-Measure', metrics.fMeasure(2.0, beta=1.0))] # #save output file path as JSON and dump into dumpFilePath # rdd = sc.parallelize(res) # SQLContext.createDataFrame(rdd).collect() # df = SQLContext.createDataFrame(rdd,['Order','CLass','Name', 'Value']) #tempDumpFilePath = dumpFilePath + "/part-00000" #if os.path.exists(tempDumpFilePath): # os.remove(tempDumpFilePath) #df.toJSON().saveAsTextFile(hdfsFilePath) #tmpHdfsFilePath = hdfsFilePath + "/part-00000" #subprocess.call(["hadoop","fs","-copyToLocal", tmpHdfsFilePath, dumpFilePath]) # Save and load model #clusters.save(sc, "myModel") #sameModel = KMeansModel.load(sc, "myModel")
def Random_Forest(trainFile, testFile, taskid, sc): # filename = "/Users/Jacob/SparkService/data/sample_libsvm_data.txt" # Load and parse the data file into an RDD of LabeledPoint. trainData = MLUtils.loadLibSVMFile(sc, trainFile) testData = MLUtils.loadLibSVMFile(sc, testFile) labelNum = trainData.map(lambda lp: lp.label).distinct().count() # Split the data into training and test sets (30% held out for testing) # (trainingData, testData) = data.randomSplit([0.7, 0.3]) # Train a RandomForest model. # Empty categoricalFeaturesInfo indicates all features are continuous. # Note: Use larger numTrees in practice. # Setting featureSubsetStrategy="auto" lets the algorithm choose. model = RandomForest.trainClassifier(trainData, numClasses=3, categoricalFeaturesInfo={}, numTrees=labelNum, featureSubsetStrategy="auto", impurity='gini', maxDepth=4, maxBins=32) # Evaluate model on test instances and compute test error predictions = model.predict(testData.map(lambda x: x.features)) labelsAndPredictions = testData.map(lambda lp: lp.label).zip(predictions) Json.generateJson("LogisticRegression", taskid, trainData, labelsAndPredictions); # predictionAndLabels = testData.map(lambda lp: (float(model.predict(lp.features)), lp.label)) # Instantiate metrics object # metrics = MulticlassMetrics(predictionAndLabels) # metrics = MulticlassMetrics(labelsAndPredictions) # # Overall statistics # precision = metrics.precision() # recall = metrics.recall() # f1Score = metrics.fMeasure() # #confusion_matrix = metrics.confusionMatrix().toArray() # print("Summary Stats") # print("Precision = %s" % precision) # print("Recall = %s" % recall) # print("F1 Score = %s" % f1Score) # # Statistics by class # labels = trainData.map(lambda lp: lp.label).distinct().collect() # for label in sorted(labels): # print("Class %s precision = %s" % (label, metrics.precision(label))) # print("Class %s recall = %s" % (label, metrics.recall(label))) # print("Class %s F1 Measure = %s" % (label, metrics.fMeasure(label, beta=1.0))) # # Weighted stats # print("Weighted recall = %s" % metrics.weightedRecall) # print("Weighted precision = %s" % metrics.weightedPrecision) # print("Weighted F(1) Score = %s" % metrics.weightedFMeasure()) # print("Weighted F(0.5) Score = %s" % metrics.weightedFMeasure(beta=0.5)) # print("Weighted false positive rate = %s" % metrics.weightedFalsePositiveRate) # testErr = labelsAndPredictions.filter(lambda (v, p): v != p).count() / float(testData.count()) # print('Test Error = ' + str(testErr)) # print('Learned classification forest model:') # print(model.toDebugString()) # Save and load model #model.save(sc, "target/tmp/myRandomForestClassificationModel") #sameModel = RandomForestModel.load(sc, "target/tmp/myRandomForestClassificationModel")
def get(self, cat=False, catid=0, cat2=False): if self.request.get('token'): #check whether the request token is available token = self.request.get('token') params = self.request.arguments() # gets all the url parameters app_secret = dataprovider.getAppSecret({"token":token}) electoral_dist = ['county', 'constituency', 'ward'] elective_post = ['electivepost', 'president', 'governor', 'senator', 'women_rep', 'mp', 'councillor'] if self.request.get('key'):#check whether the request key is available key = self.request.get('key') urlstr= '' urldict = {} filters = {} if app_secret:#check whether a matching app secret for the token was found for param in params: if param != "key": if param !="token": filters["%s"%param] = self.request.get(param) urlstr += "%s=%s&" %(param, self.request.get(param)) urldict[param] = self.request.get(param) urlstr = urlstr.rstrip() testKey = hmac.new(app_secret, urlstr.rstrip("&"), hashlib.sha256).hexdigest() # check to see the key(url parameters hmac'd using appsecret) passed by the user matches the testKey a value derived by hmacing the url parameters using the app secret strored in the datastore if testKey: #the testKey and key match """This is the portion that fetches the data as per the users request. The results will all depend on what is returned by the classes handling the various data requests.""" output = "" ##This conditional handles the requests made for electoral district data """ if cat in electoral_dist: if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.ElectoralDistrict("%s" % cgi.escape(cat), "", filters).getList() else: response = dataprovider.ElectoralDistrict("%s" % cgi.escape(cat), "%s" % cgi.escape(catid), filters).getDetailList() ##This conditional handles the requests made for elective post data """ elif cat == "post": if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.ElectivePost().getList() else: response = dataprovider.ElectivePost("%s" % cgi.escape(catid)).getDetailList() ##This conditional handles the requests made for elective post data """ elif cat == "pollingstation": if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.PollingStation("",filters).getList() else: response = dataprovider.PollingStation("%s" % cgi.escape(catid), filters).getDetailList() ##This conditional handles the requests made for candidate data """ elif cat == "candidate": if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.Candidates("", filters).getList() else: response = dataprovider.Candidates("%s" % cgi.escape(catid), filters).getDetailList() ##This conditional handles the requests made for party data """ elif cat == "party": if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.Party().getList() else: response = dataprovider.Party("%s" % cgi.escape(catid)).getDetailList() ##This conditional handles the requests made for contest data""" elif cat == "contest": if catid == 0 or catid =="" or catid ==" " or catid ==" ": response = dataprovider.Contests("", filters).getList() else: response = dataprovider.Contests("%s" % cgi.escape(catid), filters).getList() ##This conditional handles the requests made for party data """ elif cat == "voter": response = dataprovider.Voter("%s" % cgi.escape(catid), self.request.get('type')).getList() ##This conditional handles the requests made for party data """ elif cat == "results": if cat2 is not False: response = dataprovider.Results("%s" % cgi.escape(catid), "%s" % cgi.escape(cat2), filters).getList() else: response = dataprovider.Results("%s" % cgi.escape(catid), "", filters).getList() else: response['status'] = "Unknown Request Type" response['messsage'] = "Unknown category specified." self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) """This marks the end of the portion that handles user responses""" else:#the keys do not match meaning that this is an invalid request response = { #the response when no key is available "status":"VERIFICATION_FAILED", "message":"Keys did not match. Confirm that you have the correct appsecret. %s" % urlstr } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else:#if no matching app secret for the token was found, respond with an error response = { #the response when no appsecret is returned "status":"VERIFICATION_FAILED", "message":"The token supplied did not match any authorized apps." } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else: response = { #the response when no key is available "status":"NO_KEY", "message":"No key passed. Request can not be verified" } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response)) else: response = { #the response when no token is available "status":"NO_TOKEN", "message":"No token passed." } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(Json.encode(response))