def open_table(self): data = self.read()[0] ctx = self._context.copy() if data['start_date'] and data['end_date']: date_now = datetime.now() if data['start_date'] > datetime.date(date_now): raise exceptions.Warning('开始日期不能大于当前日期') elif data['start_date'] > data['end_date']: raise exceptions.Warning('开始日期不能大于结束日期') elif data['end_date'] > datetime.date(date_now): raise exceptions.Warning('结束日期不能大于当前日期') ctx['start_date'] = data['start_date'] ctx['end_date'] = data['end_date'] else: raise exceptions.Warning('查询日期不能为空') ctx['werks'] = data['werks'] ctx['vtweg'] = data['vtweg'] ctx['vkorgtext'] = data['vkorgtext'] ctx['kunnr'] = data['kunnr'] ctx['ywy'] = data['ywy'] domain_list = [] sql1 = ('LFADT', '>=', ctx['start_date']) sql2 = ('LFADT', '<=', ctx['end_date']) domain_list.append(sql1) domain_list.append(sql2) if ctx['werks'] and ctx['werks'] != '0000': werks_query = ('werks', '=', ctx['werks']) domain_list.append(werks_query) if ctx['vtweg']: vtweg_query = ('vtweg', '=', ctx['vtweg'][0]) domain_list.append(vtweg_query) if ctx['vkorgtext']: vkorgtext_query = ('vkorgtext', 'ilike', ctx['vkorgtext'][0]) domain_list.append(vkorgtext_query) if ctx['kunnr']: kunnr_query = ('kunnr', '=', ctx['kunnr'][0]) domain_list.append(kunnr_query) if ctx['ywy']: ywy_query = ('ywy', '=', ctx['ywy'][0]) domain_list.append(ywy_query) # 删除查询时创建的数据行 self.init_outbound_date(ctx) return { 'name': '出库报表查询', 'view_type': 'form', 'view_mode': 'tree,graph', 'res_model': 'outbound.final', 'type': 'ir.actions.act_window', 'context': ctx, 'domain': domain_list, # 'views': [[2812, 'dashboard'], ], # 'context': ctx.update({'dashboard_view_ref': 'outbound_report.outbound_report_dashboard_view'}), }
def userinsertPurchase(): try: if adminLoginSession() == 'user': purchaseVO = PurchaseVO() purchaseDAO = PurchaseDAO() purchase_LoginId = session['session_loginId'] purchaseDate = datetime.date(datetime.now()) purchaseTime = datetime.time(datetime.now()) purchase_PackageId = request.args.get('purchase_PackageId') purchaseVO.purchase_LoginId = purchase_LoginId purchaseVO.purchaseDate = purchaseDate purchaseVO.purchaseTime = purchaseTime purchaseVO.purchase_PackageId = purchase_PackageId purchaseDAO.insertPurchase(purchaseVO) return redirect(url_for('userViewPurchase')) else: return adminLogoutSession() except Exception as ex: print(ex)
def compare_dates(husb_death, wife_death, divorce): if (husb_death == 'NA'): wife_death = datetime.strptime(str(wife_death), "%d %b %Y") wife_death = datetime.date(wife_death) divorce = datetime.strptime(str(divorce), "%d %b %Y") divorce = datetime.date(divorce) if (divorce > wife_death): return True else: return False elif (wife_death == 'NA'): husb_death = datetime.strptime(str(husb_death), "%d %b %Y") husb_death = datetime.date(husb_death) divorce = datetime.strptime(str(divorce), "%d %b %Y") divorce = datetime.date(divorce) if (divorce > husb_death): return True else: return False else: husb_death = datetime.strptime(str(husb_death), "%d %b %Y") husb_death = datetime.date(husb_death) wife_death = datetime.strptime(str(wife_death), "%d %b %Y") wife_death = datetime.date(wife_death) divorce = datetime.strptime(str(divorce), "%d %b %Y") divorce = datetime.date(divorce) if (divorce > husb_death or divorce > wife_death): return True else: return False
def start(self): self.curfile = None self.date_today = [ int(i) for i in str(datetime.date(datetime.now())).split('-') ] self.date_list, self.date_list1, self.date_list2 = [], [], [] self.second_name_constructor = [] self.second_name_checker = [] self.second_name_approv = [] self.menu_list = [('File', [('Новые настройки', self.onNew), ('Загрузить настройки', self.load), ('Сохранить настройки', self.save), ('Сохранить как', self.save_as), ('Выйти', sys.exit)])] self.data_save = [ ('constructor', self.second_name_constructor), ('checker', self.second_name_checker), ('approver', self.second_name_approv), ('construscor_date', lambda: self.date_to_list(self.date_list, self.date)), ('chceker_date', lambda: self.date_to_list(self.date_list1, self.date_1)), ('approv_date', lambda: self.date_to_list(self.date_list2, self.date_2)), ('detail_number', lambda: self.detail_number.toPlainText()), ('detail_name', lambda: self.detail_name.toPlainText()), ('factory_name', lambda: self.factory_name.toPlainText()), ('scale', lambda: self.scale.currentIndex()) ] self.data_load = [ ('constructor', self.second_name_constructor), ('checker', self.second_name_checker), ('approver', self.second_name_approv), ('construscor_date', self.date_list), ('chceker_date', self.date_list1), ('approv_date', self.date_list2), ('detail_number', lambda k: self.detail_number.setPlainText(k)), ('detail_name', lambda k: self.detail_name.setPlainText(k)), ('factory_name', lambda k: self.factory_name.setPlainText(k)), ('scale', lambda k: self.scale.setCurrentIndex(k)) ] self.scale_list = [ '100:1', '50:1', '40:1', '20:1', '10:1', '5:1', '4:1', '2,5:1', '2:1', '1:1', '1:2', '1:2,5', '1:4', '1:5', '1:10', '1:15', '1:20', '1:25', '1:40', '1:50', '1:75', '1:100', '1:200', '1:400', '1:500', '1:800', '1:1000' ]
def __init__(self): QtWidgets.QDialog.__init__(self) self.construct_class = MakeWidgets() self.date_today = [ int(i) for i in str(datetime.date(datetime.now())).split('-') ] self.setupUi(self) self.except_folders_list = [] self.constructor_list = [] self.checker_list = [] self.add_default_watermark = False self.auto_save_folder = False self.watermark_path = '' self.watermark_position = [] self.sort_files = False self.load_settings = self.get_settings() if self.load_settings: self.apply_settings()
def crawl(request): menu_db = Menu.objects.all() menu_db.delete() #menus = geupsik.menuParsing(datetime.date.year, datetime.date.month, datetime.date.day) get_menu(tday) f = open("menuPaper.txt", 'r') lines = f.readlines() sl = "" sd = "" for l in lines: l = l.split("!") if l[0] is datetime.date(): sl = l[1] sd = l[2] create_menu_db(tday, sl, sd) return 0
def main(): # Ignore warnings: import warnings warnings.simplefilter("ignore") args = sys.argv config_file_path = '{}/elasticsearch_backup_config.yaml'.format(config_dir) config = load_config(config_file_path) hostport = config['HOSTPORT'] repo_name = config['REPO'] limit = config['LIMIT'] # So ngay rotate snapshot use_searchguard = config['USE_SEARCHGUARD'] os.environ['NO_PROXY'] = hostport if len(args) == 1: snapshot_name = 'snapshot-' + str(datetime.date(datetime.now())) take_snapshot(hostport, repo_name, snapshot_name, use_searchguard) rotate_snapshot(hostport, repo_name, limit, use_searchguard) else: if '--list-all-snapshots' in args: print(get_all_snapshot_names(hostport, repo_name, use_searchguard)) elif '--check-one-snapshot' in args: if len(args) < 3: print('Missing snapshot name !') sys.exit(1) else: check_snapshot(hostport, repo_name, args[2], use_searchguard) elif '--delete-one-snapshot' in args: if len(args) < 3: print('Missing snapshot name !') sys.exit(1) else: delete_snapshot(hostport, repo_name, args[2], use_searchguard) elif '--take-snapshot' in args: if len(args) < 3: print('Missing snapshot name !') sys.exit(1) else: take_snapshot(hostport, repo_name, args[2], use_searchguard) else: print('Invalid argument !')
def calculate_age(): try: name = name_entry.get() bday = Person( name, datetime.date(int(year_entry.get()), int(month_entry.get()), int(date_entry.get()))) textArea = tk.Text(master=win, height=10, width=30) textArea.grid(column=1, row=6) answer = "Heyy {bady}!!!.You are {age} years old!!! ".format( bady=name, age=bday.age()) textArea.insert(tk.END, answer) # Clear Entry Box after click sumit Button name_entry.delete(0, tk.END) date_entry.delete(0, tk.END) except ValueError: return messagebox.askretrycancel( 'Warning', 'Plzz Enter Detail and Currect DOB ?')
def reportToExcel(itemResult): # 实例化一个Workbook()对象(即excel文件) wbk = xlwt.Workbook() # 新建一个名为Sheet1的excel sheet。此处的cell_overwrite_ok =True是为了能对同一个单元格重复操作。 sheet = wbk.add_sheet('Sheet1', cell_overwrite_ok=True) # 遍历result中的没个元素。 i = 0 if len(itemResult) > 0: for item in itemResult: try: if 'answer' in item and 'question' in item: sheet.write(i, 0, item['question']) sheet.write(i, 1, item['answer']) i = i + 1 except: pass # 获取当前日期,得到一个datetime对象如:(2016, 8, 9, 23, 12, 23, 424000) today = datetime.today() # 将获取到的datetime对象仅取日期如:2016-8-9 today_date = datetime.date(today) # 以传递的name+当前日期作为excel名称保存。 wbk.save("oushen-" +keyword + "-"+ str(today_date) + '.xls')
szzs_df = jq.get_price('000001.XSHG', end_date=d, frequency='daily', fields=['open', 'close', 'high', 'low', 'volume'], skip_paused=True, fq='pre') szzs_df.reset_index(inplace=True, drop=False) szzs_list = szzs_df.values.tolist() total = 0 sql10 = "truncate table szzs" cursor.execute(sql10) connect.commit() for j in range(len(szzs_list)): try: sql = "INSERT INTO szzs (date,open,close,high,low,volume) VALUES ( '%s', %.2f ,%.2f ,%.2f ,%.2f,%.2f )" date = datetime.date( datetime.fromtimestamp(szzs_list[j][0].timestamp())) data = (date, szzs_list[j][1], szzs_list[j][2], szzs_list[j][3], szzs_list[j][4], szzs_list[j][5]) cursor.execute(sql % data) except Exception as e: connect.rollback() # 事务回滚 continue else: connect.commit() # 事务提交 total = total + cursor.rowcount print('上证指数历史数据更新完成,共更新了', total, '条数据') #更新日期表 total = 0 date_list = [] sql = "SELECT date FROM szzs order by id "
dm_new_list=df_new.values.tolist() #获取最新日期表 #1.更新上证指数 szzs_df=jq.get_price('000001.XSHG', end_date=d, frequency='daily', fields=['open', 'close', 'high', 'low', 'volume'], skip_paused=True, fq='pre') szzs_df.reset_index(inplace=True,drop=False) szzs_list=szzs_df.values.tolist() total=0; sql10="truncate table szzs" cursor.execute(sql10) connect.commit() for j in range(len(szzs_list)): try: sql = "INSERT INTO szzs (date,open,close,high,low,volume) VALUES ( '%s', %.2f ,%.2f ,%.2f ,%.2f,%.2f )" date = datetime.date(datetime.fromtimestamp(szzs_list[j][0].timestamp())) data = (date,szzs_list[j][1],szzs_list[j][2],szzs_list[j][3],szzs_list[j][4],szzs_list[j][5]) cursor.execute(sql % data) except Exception as e: connect.rollback() # 事务回滚 continue else: connect.commit() # 事务提交 total=total+cursor.rowcount print('上证指数历史数据更新完成,共更新了',total,'条数据') #2.更新日期表 total=0; date_list=[] sql = "SELECT date FROM szzs order by id "
sql3 = "SELECT dm FROM dmb order by id " cursor.execute(sql3) for row in cursor.fetchall(): if row[0].startswith('6'): r = row[0] + '.XSHG' else: r = row[0] + '.XSHE' dm_list.append(r) for i in range(0, len(dm_list)): df1 = jq.get_price(dm_list[i], end_date='2019-05-10', frequency='daily', fields=['open', 'close', 'high', 'low', 'volume'], skip_paused=True, fq='pre') df1.reset_index(inplace=True, drop=False) list = df1.values.tolist() for j in range(len(list)): sql = "INSERT INTO %s (date,open,close,high,low,cjl) VALUES ( '%s', %.2f ,%.2f ,%.2f ,%.2f,%.2f )" date = datetime.date(datetime.fromtimestamp(list[j][0].timestamp())) data = ('TB' + dm_list[i][:6], date, list[j][1], list[j][2], list[j][3], list[j][4], list[j][5]) cursor.execute(sql % data) connect.commit() # 事务提交 print(dm_list[i][:6] + '历史收盘价获取完成') print('所有历史收盘价获取完成') # 关闭连接 cursor.close() connect.close()
from bs4 import BeautifulSoup import requests import weathercom from _datetime import datetime, timedelta import json #def updateBoard(): #NOT DONE start_date = str(datetime.date(datetime.now())).split("-") end_date = start_date ### #ide Predictions: /api/datagetter?product=predictions&application=NOS.COOPS.TAC.WL&begin_date=20200726&end_date=20200727&datum=MLLW&station=8637624&time_zone=lst_ldt&units=english&interval=hilo&format=json ### def loading(loadingWhat): print("Getting " + loadingWhat, end="") for i in range(100): print(".", end="") print("\n") def getTides(loadingWhat): #DONE AND UPDATING loading(loadingWhat) #JC=8636735 #GW=8636142 #GP=8637624 areas = {"JC": "8636735", "GW": "8636142", "GP": "8637624"} for key in areas.keys(): tidesURL = requests.get( "https://tidesandcurrents.noaa.gov/api/datagetter?product=predictions&application=NOS.COOPS.TAC.WL&begin_date={0}{1}{2}&end_date={0}{1}{2}&datum=MLLW&station={3}&time_zone=lst_ldt&units=english&interval=hilo&format=json"
else: r = row[0] + '.XSHE' dmb_list.append(r) for o in range(0, len(dmb_list)): zjl_df = jq.get_money_flow([dmb_list[o]], start_date='2015-01-01', end_date='2019-05-10', fields=[ 'date', 'change_pct', 'net_amount_xl', 'net_amount_l', 'net_amount_m', 'net_amount_s' ]) zjl_list = zjl_df.values.tolist() total = 0 for p in range(0, len(zjl_list)): sql = "update %s set zdf=%.2f, cdd=%.2f , dd=%.2f , zd=%.2f , xd=%.2f where date='%s'" data = ('TB' + dmb_list[o][:6], zjl_list[p][1], zjl_list[p][2], zjl_list[p][3], zjl_list[p][4], zjl_list[p][5], datetime.date( datetime.fromtimestamp(zjl_list[p][0].timestamp()))) cursor.execute(sql % data) connect.commit() # 事务提交 total = total + cursor.rowcount print(dmb_list[o], '数据插入完毕,', '共更新了', total, '条数据') print("所有数据插入完成") # 关闭连接 cursor.close() connect.close()
def US11(): HID = [] WID = [] HName = [] WName = [] div_date = [] marr_date = [] SP_ID = [] check_list = [] #conn = sqlite3.connect('DATA.db') cur = conn.cursor() cur.execute("SELECT SPOUSE FROM INDIVIDUAL WHERE LENGTH(SPOUSE)>5") rows = cur.fetchall() for spid in rows: SP_ID = list(spid) SP_ID = SP_ID[0].split(',') for spid in SP_ID: cur.execute( "SELECT MARRIED,DIVORCED,HUSBAND_ID,HUSBAND_NAME,WIFE_ID,WIFE_NAME FROM FAMILY WHERE ID=?", (spid, )) rows1 = cur.fetchall() for m, d, hid, hnm, wid, wnm in rows1: marr_date.append(re.sub(r'[^0-9a-zA-Z ]', '', str(m))) div_date.append(re.sub(r'[^0-9a-zA-Z ]', '', str(d))) HID.append(re.sub(r'[^@0-9a-zA-Z ]', '', str(hid))) HName.append(re.sub(r'[^0-9a-zA-Z ]', '', str(hnm))) WID.append(re.sub(r'[^@0-9a-zA-Z]', '', str(wid))) WName.append(re.sub(r'[^0-9a-zA-Z ]', '', str(wnm))) if len(HID) != len(set(HID)): for p, id in enumerate(set(HID)): div = div_date[p] if div != "NA": # do something when there is divorce date for the individual print(div) mdt1 = datetime.strptime(str(marr_date[p]), "%d %b %Y") mdt1 = datetime.date(mdt1) mdt2 = datetime.strptime(str(marr_date[p + 1]), "%d %b %Y") mdt2 = datetime.date(mdt2) div = datetime.strptime(str(div), "%d %b %Y") div = datetime.date(div) if mdt1 < mdt2: end = mdt2 start = mdt1 else: end = mdt1 start = mdt2 if start < div < end: continue else: check_list.append(id) print("ERROR: US11: ID-", id, "is in a bigamous relationship!") else: mdt1 = datetime.strptime(str(marr_date[p]), "%d %b %Y") mdt1 = datetime.date(mdt1) mdt2 = datetime.strptime(str(marr_date[p + 1]), "%d %b %Y") mdt2 = datetime.date(mdt2) if mdt1 < mdt2: pos = p end = mdt2 start = mdt1 else: pos = p + 1 end = mdt1 start = mdt2 cur.execute("SELECT DEATH FROM INDIVIDUAL WHERE ID=?", (WID[pos], )) rr = cur.fetchall() for d in rr: d = re.sub(r'[^0-9a-zA-Z ]', '', str(d)) if d != "NA": d = datetime.strptime(str(d), "%d %b %Y") d = datetime.date(d) if start < d < end: continue else: check_list.append(id) print("ERROR: US11: ID-", id, "has bigamous relationship!") else: print("ERROR: Insufficient data - The individual", id, " may or may not have a bigamous relationship") if len(WID) != len(set(WID)): for p, id in enumerate(set(WID)): div = div_date[p] if div != "NA": print(div) # do something when there is divorce date for the individual mdt1 = datetime.strptime(str(marr_date[p]), "%d %b %Y") mdt1 = datetime.date(mdt1) mdt2 = datetime.strptime(str(marr_date[p + 1]), "%d %b %Y") mdt2 = datetime.date(mdt2) div = datetime.strptime(str(div), "%d %b %Y") div = datetime.date(div) if mdt1 < mdt2: end = mdt2 start = mdt1 else: end = mdt1 start = mdt2 if start < div < end: continue else: check_list.append(id) print("ERROR: US11: ID-", id, "is in a bigamous relationship!") else: mdt1 = datetime.strptime(str(marr_date[p]), "%d %b %Y") mdt1 = datetime.date(mdt1) mdt2 = datetime.strptime(str(marr_date[p + 1]), "%d %b %Y") mdt2 = datetime.date(mdt2) if mdt1 < mdt2: pos = p end = mdt2 start = mdt1 else: pos = p + 1 end = mdt1 start = mdt2 cur.execute("SELECT DEATH FROM INDIVIDUAL WHERE ID=?", (HID[pos], )) rr = cur.fetchall() for d in rr: d = re.sub(r'[^0-9a-zA-Z ]', '', str(d)) if d != "NA": d = datetime.strptime(str(d), "%d %b %Y") d = datetime.date(d) if start < d < end: continue print( "No individual is in a bigamous relationship!") else: check_list.append(id) print("ERROR: US11: ID-", id, "has bigamous relationship!") else: print("ERROR: Insufficient data - The individual", id, "may or may not have a bigamous relationship")
def getAveragesLongTime(time, key, location, typeOfLocation, sortOperators): #This function is used to calculate long time averages, set to 180 days. #Data is fetched from database data = getFromDataBase(time, key, location, typeOfLocation, -180, sortOperators) data = [(item[0], item[1], item[2], item[3], item[4], item[5][:5], item[6]) for item in data] data.sort(key=lambda x: (x[0])) if not data: return False averages = [] ''' #The max and min download values from data are deleted. #2.5% of highest and lowest measures. maxVal = max([i[3] for i in data]) minVal = 0.025*maxVal maxVal = 0.975*maxVal data = [i for i in data if i[3] < maxVal and i[3] > minVal] ''' #Averages for each day are calculated from data, and added to a new list. #The new list contains datetime, download ave, upload ave, latency ave and number of measurements. for item in data: #If datetime is not in averages yet, it will be appended. if datetime.date(item[0].year, item[0].month, item[0].day) not in [i[0] for i in averages]: averages.append([]) averages[-1].append(datetime.date(item[0].year, item[0].month, item[0].day)) averages[-1].append(item[3]) averages[-1].append(item[4]) averages[-1].append(item[2]) averages[-1].append(1) #Otherwise, measured values are added. else: i = [i[0] for i in averages].index(datetime.date(item[0].year, item[0].month, item[0].day)) averages[i][1] += item[3] averages[i][2] += item[4] averages[i][3] += item[2] averages[i][4] += 1 #And then averages are calculated. for item in averages: item[1] = item[1] / item[4] item[2] = item[2] / item[4] item[3] = item[3] / item[4] #After this, moving average is calculated to smoothen the data. #The time window is set to 11 days, +-5 days from the day of calculation. windowSize = 15 #Movingaverage is called to download, upload and latency separately. movingAve = movingaverage([i[1] for i in averages], windowSize) for i in range(len(averages)): averages[i][1] = movingAve[i] movingAve = movingaverage([i[2] for i in averages], windowSize) for i in range(len(averages)): averages[i][2] = movingAve[i] movingAve = movingaverage([i[3] for i in averages], windowSize) for i in range(len(averages)): averages[i][3] = movingAve[i] #First and last 5 entries from averages are deleted. This is because #convolution lowers their values. del averages[:7] del averages[-7:] return averages
counter += 1 if temp[5]: temp[2] = temp[2]/counter temp[3] = temp[3]/counter temp[4] = temp[4]/counter averagesNew.append(temp) averages = averagesNew if filter: for line in averages: line.append(float(0)) return averages if __name__ == '__main__': #time = datetime.date object time = datetime.date(2016,6,1) #key = uid or 0 key = 0 #location: city or postal code location = "Espoo" #typeOfData as follows: # 0 = db query w/0 area restriction # 1 = db query by postal code # 2 = db query by city typeOfLocation = 2 #from how many days datais acquired timeWindow = -70 #averages are calculated for every [resolution] hours, values 1-24 resolution = 1 #use filter calculation or not, true/false filter = 1
from _datetime import datetime,date,time print(datetime.now()) today = datetime.now() print(datetime.date(today)) print(datetime.time(today)) print(datetime.ctime(today)) print(datetime.utcnow())
stu_1 = Student1("ABC", "def", 12) stu_2 = Student1("DFSF", "deDSFSF", 120) print(stu_1) print(stu_1 + stu_2) stu_1.fullName() stu_2.fullName() print(stu_1.rollNo) print("") stu_1.updateRollNo() print("Student1 roll number ", stu_1.rollNo) print("") print("Everything in stu1 object", stu_1.__dict__) print("") print("Everything in Student1 Class", Student1.__dict__) print("") Student1.set_raise_rollno(100) print("") print("Student 1 roll number update", stu_1.rollNoUpdate) print("Student 2 roll number update", stu_2.rollNoUpdate) print("") print("No of Students in Student1 Class", Student1.noOfStudentz) #datetime is imported here import datetime d = datetime.date(2018, 9, 22) Student1.schoolHoliday(d) class WorkingStudent(Student1): pass
def start_tracking_order_book(self): self.ob_filename = self.out_dir + 'order-book-' + \ str(datetime.date()) + '.json' self.obfp = open(filename, 'w')