def success(name): import time from datetime import datetime as dat # change hosts path according to your OS hosts_path = "C:\Windows\System32\Drivers\etc\hosts" # localhost's IP localIP = "127.0.0.1" website_list = [name] while True: if dat(dat.now().year, dat.now().month, dat.now().day, 1) < dat.now() < dat(dat.now().year, dat.now().month, dat.now().day, 11): print("Process Executed...") with open(hosts_path, 'r+') as file: content = file.read() for website in website_list: if website in content: pass else: file.write(localIP + " " + website + "\n") else: with open(hosts_path, 'r+') as file: content = file.readlines() file.seek(0) for line in content: if not any(website in line for website in website_list): file.write(line) # removing hostnmes from host file file.truncate() print("Process not executing...") time.sleep(5)
def run(self): with ThreadPoolExecutor(max_workers=2) as pool: q = { pool.submit(self.stat, self.name, dat(self.year, m, 1)): m for m in range(1, 13) }
def passesLeapDay(today): thisYear = today.timetuple()[0] if isLeapYear(thisYear): sixMonthsAgo = today - datetime.timedelta(days=183) leapDay = datetime.dat(thisYear, 2, 29) return leapDay >= sixMonthsAgo else: return False
def get_all_hisData(interval="1d",startin=dat(2020,1,1,00,1,1),endin=dat.now()): address = update_Main_Chart() df =pd.read_csv(address) addressBook=[] for x in range(0,df.size-1): tempdata={} symbol=df["symbol"][x] interval="1d" symdata=get_Kline_csv(symbol,interval=interval,startin=startin,endin=dat.now()) tempdata={"symbol":symbol,"address":symdata,"start":startin.strftime("%Y-%m-%d %H:%M:%S"),"end":endin.strftime("%Y-%m-%d %H:%M:%S")} addressBook.append(tempdata) return addressBook
def get_mean_performance(df): """ Take the filtered dataframe and calculate mean performance per mouse on each day. :param df: :return: """ result = [] i = 0 for mouse in df.mouse.unique(): i = 0 # Get subset of dataset for each mouse curr_mouse = df.loc[df['mouse'] == mouse] for date in curr_mouse.session_date.unique(): # For each mouse, get a subset of every session curr_sess = curr_mouse.loc[curr_mouse['session_date'] == date] # Get the distance in days from stroke surgery (data is sorted to that there are 5 prestroke sessions if i < 5: norm_day = i - 5 stroke_day = dat(int(date[:4]), int(date[4:6]), int(date[6:])) else: norm_day = (dat(int(date[:4]), int(date[4:6]), int(date[6:])) - stroke_day).days # Store the data in a pd.Series which is concatenated to one DF in the end result.append( pd.DataFrame( { "mouse": mouse, "date": date, "performance": curr_sess["licking_binned"].mean(), "norm_day": norm_day }, index=[i])) i += 1 return pd.concat(result)
def get_Kline_csv(name,interval="1h",startin=dat(2020,1,1,00,1,1),endin=dat.now()): start=startin.strftime("%d %m,%Y") startff=startin.strftime("%d%m%Y") endff=endin.strftime("%d%m%Y") end=endin.strftime("%d %m,%Y") dirc=mainPath+"/data/csvs/"+name+"/"+interval filename="data" fileformat=".csv" ##info = client.get_symbol_info(name) ## check for is trading or not ! print("from "+start+" -> "+end) ##get_historical_klines(symbol, interval, start_str, end_str=None, limit=500) flag=1 while flag==1: try: data=client.get_historical_klines(name,interval,start,end) flag=0 except: print("check your connection - ctrl-c for exit - we are retrying") print("date resived!!!") for x in range(len(data)): data[x][0]=dat.strptime(dat.fromtimestamp(int(data[x][0])/1000).strftime('%Y-%m-%d %H:%M:%S'),"%Y-%m-%d %H:%M:%S") for xx in range(1,len(data[0])-1): data[x][xx]=float(data[x][xx]) dfdata=[] for x in range(len(data)): themplis=[] for xx in range(0,len(data[0])-1): if(xx<=5 or xx==8): themplis.append(data[x][xx]) dfdata.append(themplis) if not os.path.exists(dirc): os.makedirs(dirc) print("path successfuly created") else: print("folder found ") ## noft -> number of trades address=dirc+"/"+filename+fileformat df=pd.DataFrame(columns=["date","open","high","low","close","volume","noft"],data=dfdata) df.set_index("date",drop=True,inplace=True) df.to_csv(address,mode="w") print("successfuly created") return address
def stat(self, ind, d): a = [] for i in range(1, 13): start_date = dat(self.year, i, 1) a.append(stat(start_date, self.name)) d[ind] = a
import backtrader as bt import backtrader.feeds as btfeeds import pandas as pd import requests as req import time from analisys.control import TestStrategy from notfication.control import notfication # ---------------------------------- bot = notfication() # --------------------------- if __name__ == '__main__': datapath = ctrl.dirc data = btfeeds.GenericCSVData(dataname=get_Kline_csv(name="BTCUSDT", interval="1h"), fromdate=dat(2021, 1, 1), todate=dat.now(), nullvalue=0.0, dtformat=('%Y-%m-%d %H:%M:%S'), datetime=0, open=3, high=1, low=2, close=4, volume=5, openinterest=-1) # Create a Data Feed cerebro = bt.Cerebro() cerebro.addstrategy(TestStrategy) cerebro.broker.setcash(100000) cerebro.adddata(data)
def date(dd, mm, yyyy): try: dat(year=yyyy, month=mm, day=dd) return True except ValueError: return False
import time from datetime import datetime as dat hostp = "hosts" host_path="/etc/hosts" redirect = "127.0.0.1" websites = ["www.facebook.com", "facebook.com", "www.primevideo.com", "primevideo.com"] while True: #creating datetime object if dat(dat.now().year, dat.now().month, dat.now().day, 8) < dat.now() < dat(dat.now().year, dat.now().month, dat.now().day, 17,15): print("Study hours.....") with open(host_path,'r+') as file: info = file.read() for website in websites: if website in info: pass else: file.write(redirect+" "+website+"\n") else: with open(host_path,'r+') as file: info = file.readlines() file.seek(0) for line in info: if not any(website in line for website in websites): file.write(line) # deletes the content of the file from current point and downwards file.truncate() print("Time to relax..")