def __init__(self): print("Start normalizing data") self.last_batch_index = 0 self.dal = data_access.DataAccess() self.train = self.convert_data(self.dal.train_data) self.test = self.convert_data(self.dal.test_data) print("End normalizing data")
def generate(startTime, endTime): ''' 生成每日邮件内容 ''' startTime = '{} 00:00:00'.format(startTime) endTime = '{} 23:59:59'.format(endTime) dataAccess = DataAccess() time = '{} 至 {}'.format(startTime, endTime) orderNum, OrderpriceNum, detailsail = dataAccess.getdetailSailOrders( startTime, endTime) purchaseNum, purchasepriceNum, detailpur = dataAccess.getdetailPurchases( startTime, endTime) billprice, detailbills = dataAccess.getdetaillBills(startTime, endTime) purchaseprice, detailpays = dataAccess.getdetailPays(startTime, endTime) saleOrdersMoneyExpired = dataAccess.getSaleOrders_MoneyExpired() productsShipExpired = dataAccess.getProducts_ShipExpired() text = '''时间:{0}<br/><b>今日销售订单情况</b><br/>共<a style="color:blue ">{1}</a>单,合计金额<a style="color:blue ">{2}</a>元。 <br><b>今日采购情况订单</b><br/>共<a style="color:red ">{3}</a>单,合计金额<a style="color:red ">{4}</a>元。<br/> <b>今日收付款情况</b><br/>合计付款金额<a style="color:red ">{5}</a>元<br/>合计收款金额<a style="color:blue ">{6}</a>元。<br /><h3>销售清单</h3><br> {7}<br /><h3>采购清单</h3><br>{8}<br /><h3>收款清单</h3><br>{9}<br /><h3>付款清单</h3><br>{10}<br /> <h3>到期未收款订单信息</h3><br>{11}<br /> <h3> 到期未发货的产品信息</h3>{12} '''.format(time, orderNum, OrderpriceNum, purchaseNum, purchasepriceNum, purchaseprice, billprice, detailsail, detailpur, detailbills, detailpays, saleOrdersMoneyExpired, productsShipExpired) return text
def run(self): for i in self.Seq: modelname = readModelnameFromParamsFile(os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i)))) runBonnmotionModel(Config().readConfigEntry('bonnmotionvalidatepath'), i, modelname) paramsfilename = os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i))) movementsfilename = os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputmovementsfile').replace('INDEX', str(i))) try: #open movements file f = gzip.open(movementsfilename, 'rb') movements_content = f.read() f.close() except IOError as ex: print >> sys.stderr, ex print >> sys.stderr, "that means bonnmotion has not generated any output" print >> sys.stderr, "parameter file:" fd = open(os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i)))) print >> sys.stderr, fd.read() fd.close() sys.exit(1); p = {} p['identifier'] = modelname #read parameters f2 = open(paramsfilename) p['bmparamsfile'] = f2.read() f2.close() #create checksum Hashes().calcHashes(p, movements_content) p['user'] = getpass.getuser() p['datetime'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") #save in DB DataAccess().save(p)
def generateByWeek(startTime,endTime): ''' 生成每周邮件内容 ''' startTime = '{} 00:00:00'.format(startTime) endTime = '{} 23:59:59'.format(endTime) dataAccess = DataAccess() sailOrderNum,sailOrderPrice,_ = dataAccess.getdetailSailOrders(startTime,endTime) #获取销售订单数量,价格 purchaseNum,purchasePrice,_ =dataAccess.getdetailPurchases(startTime,endTime) #获取采购订单数量,价格 paysAmount,_= dataAccess.getdetailPays(startTime,endTime) #获取付款数目 billsAmount,_ = dataAccess.getdetaillBills(startTime,endTime) #获取收款数目 records = dataAccess.getSaleRecordOfPerson(startTime,endTime) #获取每位员工的销售情况 time = '{} 至 {}'.format(startTime,endTime) text = ''' 时间:{0}<br/> <b>本周销售订单情况</b><br/>共<a style="color:blue ">{1}</a>单, 合计金额<a style="color:blue ">{2:,}</a>元。 <br> <b>本周采购情况订单</b><br/>共<a style="color:red ">{3}</a>单, 合计金额<a style="color:red ">{4:,}</a>元。 <br/> <b>本周收付款情况</b><br/> 合计付款金额<a style="color:red ">{5:,}</a>元<br/>合计收款金额<a style="color:blue ">{6:,}</a>元。 <br /> <h3>人员销售业绩</h3><br>{7} '''.format(time,sailOrderNum,sailOrderPrice,purchaseNum,purchasePrice,paysAmount,billsAmount,records) return text
def update_lenguage(self, Lenguage): parameters = (Lenguage.name_lenguage, Lenguage.type_lenguage, Lenguage.id_lenguage) sql = """ UPDATE Lenguage SET name = %s, type = %s WHERE id = %s; """ execute = DataAccess().execute_query(sql, parameters) return execute
def delete_lengauje(self, id): parameters = (id, ) print(id) sql = """ DELETE FROM Lenguage WHERE id = %s; """ execute = DataAccess().execute_query(sql, parameters) return execute
def get(self, collectionName, sid): commonDo = DataAccess('TheBoss', collectionName) records = commonDo.select({'sid': sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: content = records.next() return JSONEncoder().encode(content), 201
def run(self): for i in self.Seq: for case in self.Cases: paramsfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry( 'tempoutputparamsfile').replace('INDEX', str(i))) movementsfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry( 'tempoutputmovementsfile').replace( 'INDEX', str(i))) outputfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputname') + str(i)) if 'appparams' in case: runBonnmotionApp( Config().readConfigEntry('bonnmotionvalidatepath'), i, self.App, case['appparams']) else: runBonnmotionApp( Config().readConfigEntry('bonnmotionvalidatepath'), i, self.App, '') ordering = [] content = '' for ext in case['extensions']: ordering.append(ext) #open file if ext != 'NULL': f = open(outputfilename + '.' + ext) content = content + f.read() f.close() #read parameters f2 = open(paramsfilename) params = f2.read() f2.close() p = {} if 'appparams' in case: p['appparameters'] = case['appparams'] else: p['appparameters'] = '' p['identifier'] = self.App p['bmparamsfile'] = params Hashes().calcHashes(p, content) p['user'] = getpass.getuser() p['datetime'] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M") tmp = '' for y in ordering: tmp = tmp + y + ',' p['ordering'] = tmp[0:-1] #save in DB DataAccess().save(p)
def __init__(self): self.ip = "" self.username = "" self.password = "" self.command = "" self.db_array = "" self.method = "" self.data = builder() self.dataAccess=DataAccess()
def insert_lenguage(self, Lenguage): parameters = (Lenguage.id_lenguage, Lenguage.name_lenguage, Lenguage.type_lenguage) sql = """ INSERT INTO Lenguage (id, name, type) VALUES (%s, %s, %s); """ execute = DataAccess().execute_query(sql, parameters) return execute
def __init__(self, id=None): if isinstance(id, int): parameters = {'int': id} sql = ("SELECT id, name, type FROM Lenguage WHERE id = %(int)s;") lengauge = DataAccess().get_query(sql, parameters) self.id_lenguage = lengauge[0][0] self.name_lenguage = lengauge[0][1] self.type_lenguage = lengauge[0][2]
def create_map(start, end, ar_obs, ar_instr, sp_obs, sp_instr): # setting active regions data = DataAccess(start, end, 'AR', ar_obs, ar_instr) chain_encoded = prep.decode_and_split(data.get_chain_code()) ar_carr_synthesis, ar_pix_synthesis = ar.get_shapes(chain_encoded, data.get_pixel_start_x(), data.get_pixel_start_y(), data.get_filename(), data.get_noaa_number(), data.get_ar_id(), data.get_date()) # setting sunspots sp_data = DataAccess(start, end, 'SP', sp_obs, sp_instr) sp_chain_encoded = prep.decode_and_split(sp_data.get_chain_code()) sp_carr, sp_pix = sp.get_shapes(sp_chain_encoded, sp_data.get_pixel_start_x(), sp_data.get_pixel_start_y(), sp_data.get_filename(), sp_data.get_sp_id(), sp_data.get_date()) sp_synthesis = sp.make_sp_synthesis(ar_contour=ar_carr_synthesis, sp_carr=sp_carr) prep.display_object(ar_carr_synthesis, sp_synthesis)
def __init__(self, inst=1000, data=1000): self.InsMEM = [0 for i in range(inst)] # 주소 : 0x400000 self.DataMEM = [0 for i in range(data)] # 주소 : 0x10000000 self.StackMEM = [2] self.Regis = [0 for i in range(32)] self.Regis[29] = 0x7ffff52c self.PC = 0 self.DecodeAssem = DecodeAssem(self) self.DecodeBinary = DecodeBinary(self) self.DataAccess = DataAccess(self)
def put(self, collectionName): commonDo = DataAccess('TheBoss', collectionName) content = request.data jsonContent = json.loads(content) sid = getRandomID() jsonContent['sid'] = sid print "before==" print jsonContent commonDo.insert(jsonContent) return JSONEncoder().encode(jsonContent), 201
def get_lengauges(self, id=None): parameters = {} arguments = '' if id != None: parameters.update({'int': id}) if len(parameters) == 1: arguments = "WHERE id = %(int)s" sql = "SELECT id, name, type FROM Lenguage " + arguments + ";" lst_lengauge = DataAccess().get_query(sql, parameters) return lst_lengauge
def post(self, collectionName, sid): currentContent = {} commonDo = DataAccess('TheBoss', collectionName) records = commonDo.select({'sid': sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: currentContent = records.next() del currentContent['_id'] content = request.data updateContent = json.loads(content) result = mergeDict(currentContent, updateContent) commonDo.update({'sid': sid}, {"$set": result}) return JSONEncoder().encode(result), 201 commonDo.update({'sid': sid}, {"$set": result})
def trainModel(self): url=parameter.get("database","url") masterKey=parameter.get("database","masterkey") dbName=parameter.get("database","db") collName=parameter.get("database","collection") obj = DataAccess(url,masterKey,dbName) documentlist = obj.findAll(collName) arr = [] for doc in documentlist: temp = {} temp ['domain'] = doc['$v']['domain']['$v'] temp ['keywords'] = doc['$v']['keywords']['$v'] arr.append(temp) print('***train data***',arr) now = datetime.datetime.now() dateTime = str(now.day)+'_'+str(now.month)+'_'+str(now.year)+'_'+str(now.hour)+'_'+str(now.minute)+'_'+str(now.second) classifier.saveModel(arr,'../model/backup/wbg'+'_'+dateTime) classifier.saveModel(arr,'../model/wbg') return 'process completed'
def my_index_post(): text = request.form['text'] date_picked = request.form['date_picked'] print("DATEPICKED: " + date_picked + " ") #pprint(date_picked) processed_text = text.upper() #today = datetime.datetime.now() results_list = DataAccess(processed_text, date_picked) service_list = [] for items in results_list: result = [] insert_return = "\n" for item in items.items(): result.append(item[1]) print(item[0]) cservice = CityService(result) service_list.append(cservice) print(cservice.SERVICE_NAME) if (len(results_list) == 0): empty_list = [] return render_template('logged_in.html', M_List=empty_list) return render_template('logged_in.html', S_List=service_list)
def test_active_regions(ar_id, grav_long, grav_lat): # Active regions position testing if __name__ == '__main__': ar_data = DataAccess('2003-09-26T00:00:00', '2003-09-26T01:00:00', 'AR', 'SOHO', 'MDI') ar_chain_encoded = prep.decode_and_split(ar_data.get_chain_code()) ar_id = ar_data.get_ar_id() ar_centers_lon = ar_data.get_grav_center_long() ar_centers_lat = ar_data.get_grav_center_lat() ar = ar_carr_synthesis[3] prep.display_object([ar], "") ar2 = np.array(ar) x = ar2[0] y = ar2[1] length = len(ar2[0]) + len(ar2[1]) centroid = (sum(x) / len(ar2[0]), sum(y) / len(ar2[1])) print(centroid)
for chains in chain_codes: if type(chains) is bytes: chains = chains.decode("utf-8") splitted_chain = list(map(int, str(chains))) codes.append(splitted_chain) return codes if __name__ == '__main__': # http://voparis-helio.obspm.fr/hfc-gui/showmap.php?date=2010-01-01%2000:03:02&feat=ar&style=pixel # http://voparis-helio.obspm.fr/helio-hfc/HelioQueryService?FROM=VIEW_AR_HQI&STARTTIME=2010-01-01T00:00:00&ENDTIME=2010-01-01T01:00:00&WHERE=OBSERVAT,SOHO;INSTRUME,MDI from DataAccess import DataAccess data = DataAccess('2010-01-01T00:00:00', '2010-01-01T02:59:00', 'AR') chain_encoded = encode_and_split([data.get_chain_code()[0]]) sp_data = DataAccess('2010-01-01T00:00:00', '2010-01-01T02:59:00', 'SP') sp_chain = encode_and_split([sp_data.get_chain_code()[0]]) ar = chain_code(chain_encoded[0], data.get_pixel_start_x()[0], data.get_pixel_start_y()[0]) sp = chain_code(sp_chain[0], sp_data.get_pixel_start_x()[0], sp_data.get_pixel_start_y()[0]) draw(ar, sp)
class JSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, ObjectId): return str(o) return json.JSONEncoder.default(self, o) app = Flask(__name__) api = Api(app) ALL_PERSON_LIST = {} ALL_SERVICE_LIST = {} parser = reqparse.RequestParser() parser.add_argument('task', type=str) serviceData = DataAccess('TheBoss', 'service') personData = DataAccess('TheBoss', 'person') # merge new to origin, replace the value in string and array, recursive handle t he value in dict def mergeDict(origin, new): for key in new.keys(): if origin.has_key(key): if type(origin[key]) is dict: mergeddict = mergeDict(origin[key], new[key]) origin[key] = mergeddict else: origin[key] = new[key] else: origin[key] = new[key]
def delete(self, collectionName, sid): commonDo = DataAccess('TheBoss', collectionName) commonDo.delete({'sid': sid}) return 201
def encode_and_split(chain_codes): print("encode_and_split() START") codes = [] for chains in chain_codes: if type(chains) is bytes: chains = chains.decode("utf-8") splitted_chain = list(map(int, str(chains))) codes.append(splitted_chain) return codes if __name__ == '__main__': # http://voparis-helio.obspm.fr/hfc-gui/showmap.php?date=2010-01-01%2000:03:02&feat=ar&style=pixel # http://voparis-helio.obspm.fr/helio-hfc/HelioQueryService?FROM=VIEW_AR_HQI&STARTTIME=2010-01-01T00:00:00&ENDTIME=2010-01-01T01:00:00&WHERE=OBSERVAT,SOHO;INSTRUME,MDI dir = [ '5656565554554555455556556554545454554444544444445444444445445445454544444332333223231222221110111111111100000100100010111000011000000111' '100112220711232235334544345654544532212234434343232222222222111211211110000006770666677007010707077677676767767776776776667666666666' ] from DataAccess import DataAccess data = DataAccess('2011-07-30T00:00:24', '2011-07-30T00:00:24') chain_encoded = encode_and_split(data.get_chain_code()) chain_code(chain_encoded[0], data.get_pixel_start_x()[0], data.get_pixel_start_y()[0])
def main(): config = Config() config.setArgsConfig(parseScriptArguments(sys.argv[1:])) if config.argsconfig['task'] is Task.DETERMINEMODEL: for filename in listOfModelTestFiles( Config().argsconfig['inputDirOrFile']): generator = BonnmotionParamsGenerator() generator.setModeltestFilename(filename) generator.createBonnmotionParamsFiles() if config.argsconfig['delete'] == True: DataAccess().cleartable(generator.modelname) log("starting model-determination of " + generator.modelname) md = ModelDeterminationDispatcher(generator.noOfFilesCreated) log("done. " + str(generator.noOfFilesCreated) + " hashes saved.") md.cleanup() elif config.argsconfig['task'] is Task.VALIDATEMODEL: md5 = [] sha1 = [] result = [] if config.argsconfig.has_key('arg'): DataAccess().get(config.argsconfig['arg'], result, md5, sha1) #get parameters and hashes from database log("starting model-validation of " + config.argsconfig['arg']) else: DataAccess().get4(result, md5, sha1) n = 0 for x in result: f = open( os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), config.readConfigEntry('tempoutputparamsfile').replace( 'INDEX', str(n))), 'w') f.write(x['bmparamsfile']) f.close() n += 1 mv = ModelValidationDispatcher(n, md5, sha1) log("done. " + str(n) + " hashes checked.") elif config.argsconfig['task'] is Task.DETERMINEAPP: for filename in listOfAppTestFiles( config.argsconfig['inputDirOrFile']): generator = BonnmotionParamsGenerator() generator.setApptestFilename(filename) params = generator.parseApptestFile() if config.argsconfig['delete'] == True: DataAccess().cleartable(params['app']) ##APP-DATATABLE if params['inputDirOrFile'] is InputDirOrFile.FILE: generator2 = BonnmotionParamsGenerator() generator2.setModeltestFilename(params['paramsfile']) generator2.createBonnmotionParamsFiles() log("starting app determination: " + filename + ", modeltest: " + params['paramsfile']) bmd = BonnmotionDispatcher( generator2.noOfFilesCreated, config.readConfigEntry('bonnmotionvalidatepath')) add = AppDeterminationDispatcher(generator2.noOfFilesCreated, params) log("done") add.cleanup() elif params['inputDirOrFile'] is InputDirOrFile.DIRECTORY: for file in os.listdir(params['paramsfile']): if file.endswith('.modeltest'): generator2 = BonnmotionParamsGenerator() generator2.setModeltestFilename( os.path.join(params['paramsfile'], file)) generator2.createBonnmotionParamsFiles() log("starting app determination: " + filename + ", modeltest: " + file) bmd = BonnmotionDispatcher( generator2.noOfFilesCreated, config.readConfigEntry('bonnmotionvalidatepath')) add = AppDeterminationDispatcher( generator2.noOfFilesCreated, params) add.cleanup() log("done") elif config.argsconfig['task'] is Task.VALIDATEAPP: result = [] if config.argsconfig.has_key('arg'): DataAccess().get2(config.argsconfig['arg'], result) #get parameters and hashes from database log("starting app-validation of " + config.argsconfig['arg']) else: DataAccess().get3(result) n = 0 for x in result: f = open( os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), config.readConfigEntry('tempoutputparamsfile').replace( 'INDEX', str(n))), 'w') f.write(x['bmparamsfile']) f.close() n += 1 bmd = BonnmotionDispatcher( n, config.readConfigEntry('bonnmotionvalidatepath')) AppValidationDispatcher(n, result).cleanup() log("done. " + str(n) + " hashes checked.")
# active region then sunspot is added to array proportion = len(result) / len(sp_zip) if proportion == 1.0: sunspots.append(sp) break return sunspots if __name__ == '__main__': # Active region + Sunspot testing from DataAccess import DataAccess import ActiveRegion as ar # setting active regions data = DataAccess('2003-10-21T00:00:00', '2003-10-24T00:00:00', 'AR', 'SOHO', 'MDI') chain_encoded = prep.decode_and_split(data.get_chain_code()) ar_carr_synthesis, ar_pix_synthesis = ar.get_shapes( chain_encoded, data.get_pixel_start_x(), data.get_pixel_start_y(), data.get_filename(), data.get_noaa_number(), data.get_ar_id(), data.get_date()) # setting sunspots sp_data = DataAccess('2003-10-21T00:00:00', '2003-10-24T00:00:00', 'SP', 'SOHO', 'MDI') sp_chain_encoded = prep.decode_and_split(sp_data.get_chain_code()) sp_carr, sp_pix = get_shapes(sp_chain_encoded, sp_data.get_pixel_start_x(), sp_data.get_pixel_start_y(), sp_data.get_filename(), sp_data.get_sp_id(),
minor=True) ax.set_xticks(np.arange(longitude_start, longitude_end, break_between)) ax.set_yticks(np.arange(latitude_start, latitude_end, break_between)) ax.grid(which='both') ax.add_collection(p) # push grid lines behind the elements ax.set_axisbelow(True) plt.show() if __name__ == '__main__': from DataAccess import DataAccess data = DataAccess('2010-01-01T00:03:02', '2010-01-01T04:03:02') chain_encoded = encode_and_split(data.get_chain_code()) cords2 = get_shapes(chain_encoded, data.get_pixel_start_x(), data.get_pixel_start_y(), "2.fits") display_object(cords2) # # coordinates - numpy array with coordinates of the contour of the object # # Function convets from pixel coordinates to carrington # # Return - numpy array with carrington coordinates # def convert_to_carrington(coordinates, filename): # np_carrington_array = [] # # for c in coordinates:
# if the longitude difference is smaller than width of ar # and latitude is smaller than height of ar then is success # otherwise fail if lon_diff < width and lat_diff < height: print("SUCCESS", lon_diff) success += 1 else: print("FAIL", lon_diff) print("long_center:", long) print("calculated:", centroid[0]) fail += 1 # prep.display_object(carr_coords, []) else: ignored += 1 print("successes = ", success) print("fail = ", fail) print("ignored = ", ignored) # Active regions position testing if __name__ == '__main__': ar_data = DataAccess('2003-09-28T00:00:00', '2003-10-23T01:00:00', 'AR', 'SOHO', 'MDI') ar_chain_encoded = prep.decode_and_split(ar_data.get_chain_code()) ar_id = ar_data.get_ar_id() ar_centers_lon = ar_data.get_grav_center_long() ar_centers_lat = ar_data.get_grav_center_lat() test_active_regions(ar_id, ar_centers_lon, ar_centers_lat)
import numpy as np import keras from keras.models import Sequential from keras.layers import Dense, Dropout, Activation from keras.preprocessing.text import Tokenizer import DataAccess.DataAccess as data_access import string dal = data_access.DataAccess() def convert_data(data): x = list() y = list() for z in data: x.append(convert_word_to_vec(z.x)) y.append(convert_label_to_vec(z.y)) return {"x": np.array(x), "y": np.array(y)} def convert_word_to_vec(word): vec = np.zeros(26) for c in word.lower(): i = string.ascii_lowercase.index(c) vec[i] += 1 return vec
def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() else: return super(Encoder, self).default(obj) if __name__ == '__main__': # DataAccess + Database testing from DataAccess import DataAccess ar = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'AR', 'SOHO', 'MDI') sp = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'SP', 'SOHO', 'MDI') fil = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'FIL', 'MEUDON', 'SPECTROHELIOGRAPH') ar_id = ar.get_ar_id()[0] sp_id = sp.get_sp_id()[0] fil_id = fil.get_fil_id()[0] print("load_ar_from_database() TEST", load_ar_from_database(ar_id)) print("load_sp_from_database() TEST", load_sp_from_database(sp_id)) print("load_fil_from_database() TEST", load_fl_from_database(fil_id)) print("----------------------------------------------------------------")
def main(): filename = request_file() data_access = DataAccess(filename) repo = Repository(data_access) repo.get_post_order_tree()