def get(self): gql_query = Metadata.gql("ORDER BY receivedDateTime DESC"); metadata_list = gql_query.fetch(10) for metadata in metadata_list: assert isinstance(metadata, Metadata) assert isinstance(metadata.sender, Sender) all_data = {} all_data_string = "" for data_key in metadata.dataList: assert isinstance(data_key, Key) data = Data.get(data_key) assert isinstance(data, Data) logger.debug(data.field) logger.debug(data.string) all_data_string = all_data_string + data.field + " = " + data.string + "<br/>" all_data[data.field] = data.string if all_data.has_key("longitude") and all_data.has_key("latitude"): break all_data_string = "received at " + str(metadata.receivedDateTime) + "<br/>" + all_data_string template_values = {"longitude": all_data["longitude"], "latitude": all_data["latitude"], "all_data":all_data, "all_data_string": all_data_string} logging.debug(str(template_values)) self.writeWithTemplate(template_values, "Last")
def get(self): gql_query = Metadata.gql("ORDER BY receivedDateTime DESC") metadata_list = gql_query.fetch(10) for metadata in metadata_list: assert isinstance(metadata, Metadata) assert isinstance(metadata.sender, Sender) all_data = {} all_data_string = "" for data_key in metadata.dataList: assert isinstance(data_key, Key) data = Data.get(data_key) assert isinstance(data, Data) logger.debug(data.field) logger.debug(data.string) all_data_string = all_data_string + data.field + " = " + data.string + "<br/>" all_data[data.field] = data.string if all_data.has_key("longitude") and all_data.has_key("latitude"): break all_data_string = "received at " + str( metadata.receivedDateTime) + "<br/>" + all_data_string template_values = { "longitude": all_data["longitude"], "latitude": all_data["latitude"], "all_data": all_data, "all_data_string": all_data_string } logging.debug(str(template_values)) self.writeWithTemplate(template_values, "Last")
def get(self): self.sender = GetSender(self.request) self.raw_data = putRawData(self.request) # self.data_list = putDataList(self.request) self.data_list = Data.storeRequest(self.request) self.metadata = putMetadata(self.sender, self.raw_data, self.data_list) assert isinstance(self.response, Response) self.response.headers['Content-Type'] = "text/plain" for key in self.data_list: data = db.get(key) self.response.out.write("field:" + data.field + " string:" + data.string + "\n")
def get(self): template_values = {} template_values["all_data"] = [] gql = Data.gql("ORDER BY dataId DESC LIMIT 100") recent = gql.run() #all_raw_data = RawData.all() logging.info(recent) for data in recent: data_dict = {"dataId": data.dataId, "field": data.field, "string": data.string } logging.info(data_dict) template_values["all_data"].append(data_dict) self.writeWithTemplate(template_values, "Data")
def get(self): template_values = {} template_values["all_data"] = [] gql = Data.gql("ORDER BY dataId DESC LIMIT 100") recent = gql.run() #all_raw_data = RawData.all() logging.info(recent) for data in recent: data_dict = { "dataId": data.dataId, "field": data.field, "string": data.string } logging.info(data_dict) template_values["all_data"].append(data_dict) self.writeWithTemplate(template_values, "Data")
def get(self): self.sender = GetSender(self.request) self.raw_data = putRawData(self.request) # self.data_list = putDataList(self.request) self.data_list = Data.storeRequest(self.request) self.metadata = putMetadata(self.sender, self.raw_data, self.data_list) assert isinstance(self.response, webapp.Response) self.response.headers["Content-Type"] = "text/plain" for key in self.data_list: data = db.get(key) if data.field == "productName": self.productName = data.string if data.field == "serialNumber": self.serialNumber = data.string if data.field == "moduleId": self.moduleId = data.string # self.response.out.write("field:" + data.field + " string:" + data.string + "\n") try: relays = Relays(self.productName, self.serialNumber, self.moduleId) assert isinstance(relays, Relays) l = [] for k, v in relays.iteritems(): assert isinstance(v, Relay) r = { "relayId": v.relayId, # "scheduledDateTime" : v.scheduledDateTime, "scheduledEpoch": nativeToEpoch(v.scheduledDateTime), "expectedState": v.expectedState, } l.append(r) o = {"relayStates": l} self.response.out.write(dumps(o)) except AttributeError, e: l = map(lambda key: db.get(key), self.data_list) j = dumps(l) self.response.out.write(j)
def get(self): self.sender = GetSender(self.request) self.raw_data = putRawData(self.request) # self.data_list = putDataList(self.request) self.data_list = Data.storeRequest(self.request) self.metadata = putMetadata(self.sender, self.raw_data, self.data_list) assert isinstance(self.response, webapp.Response) self.response.headers['Content-Type'] = "text/plain" for key in self.data_list: data = db.get(key) if data.field == "productName": self.productName = data.string if data.field == "serialNumber": self.serialNumber = data.string if data.field == "moduleId": self.moduleId = data.string #self.response.out.write("field:" + data.field + " string:" + data.string + "\n") try: relays = Relays(self.productName, self.serialNumber, self.moduleId) assert isinstance(relays, Relays) l = [] for k, v in relays.iteritems(): assert isinstance(v, Relay) r = { "relayId": v.relayId, #"scheduledDateTime" : v.scheduledDateTime, "scheduledEpoch": nativeToEpoch(v.scheduledDateTime), "expectedState": v.expectedState } l.append(r) o = {"relayStates": l} self.response.out.write(dumps(o)) except AttributeError, e: l = map(lambda key: db.get(key), self.data_list) j = dumps(l) self.response.out.write(j)
for idx in range(batch_size): pred = [ data.reverse_tag_to_idx[get_instance(pred_tag[idx][idy])] for idy in range(seq_len) if mask[idx][idy] != 0 ] pred_label.append(pred) return pred_label seed_num = 123 random.seed(seed_num) torch.manual_seed(seed_num) np.random.seed(seed_num) if __name__ == '__main__': data = Data() data.load('./data/PoSTagger.data') predict_config_path = './predict.config' data.readConfig(predict_config_path) printParameterSummary(data) predict_instances = getDataLoader(data.infer_path, data) device = torch.device("cuda:" + data.GPU if torch.cuda.is_available() else "cpu") model = SequenceModel(data) model = torch.load(data.model_save_path) model.eval() words = pd.read_csv(data.infer_path,
def create_data(data_id, field, string): data = DataDb() data.dataId = data_id data.field = field data.string = string return data.put()
import os from controller.Controller import Controller from util.util import read from flask import Flask from flask_cors import CORS from dotenv import load_dotenv load_dotenv() logging.basicConfig() logging.root.setLevel(logging.INFO) data = None if os.environ.get('CACHE', True) == True: data = read('data.pickle') if data is None: data = Data() dataService = DataService(data) dataService.getAndCacheAllData() else: data = Data() dataService = DataService(data) dataService.getAllData() app = Flask(__name__) CORS(app) Controller(app, data)
if data.NER: print("Training finished. Best f_score is {:0.4f}.".format(best_score)) else: print("Training finished. Best accuracy is {:0.4f}.".format(best_score)) seed_num = 123 random.seed(seed_num) torch.manual_seed(seed_num) np.random.seed(seed_num) if __name__ == '__main__': data = Data() train_config_path = './train.config' data.readConfig(train_config_path) data.buildDictionary() data.getPretrainedEmbedding() # print parameter summary printParameterSummary(data) # build dataloaders training_instances = getDataLoader(data.training_path, data) validation_instances = getDataLoader(data.validation_path, data) evaluation_instances = getDataLoader(data.evaluation_path, data) data.saveData() device = torch.device("cuda:"+data.GPU if torch.cuda.is_available() else "cpu")