def __init__(self, ticker_list, seed_money, start_date = 1592294400, build_database = False): self.gd = getData() self.portfolio = {ticker: {'S':[0,0],'C':[0,0],'P':[0,0]} for ticker in ticker_list} self.balance = seed_money self.cost_basis = pd.DataFrame(columns=['Ticker', 'Type', 'Quantity', 'Cost', 'Time', 'Cash']) if build_database: self.build_database(start_date) self.data = pd.read_csv('data.csv').drop(columns=['Unnamed: 0']) self.data['datetime'] = self.data['datetime'].apply(lambda x: datetime.datetime.strptime( x,"%Y-%m-%d %H:%M:%S"))
def full(username): data = api.getData(username) # follows = data["follows"]["count"] # followed_by = data["followed_by"]["count"] # bio = data["biography"] # images = [] # media = data["media"]["nodes"] # for image in media: # images.append({"image": image["display_src"], "likes": image["likes"]["count"]}) # formatted = {"follows": follows, "followed_by": followed_by, "bio": bio, "images": images} return str(data)
def ratings(self): if cherrypy.request.method == 'POST': input_json = cherrypy.request.json api.add(input_json) api.print(input_json) return 'Succes' elif cherrypy.request.method == 'GET': ratings = api.getData()[:100] return ratings elif cherrypy.request.method == 'DELETE': api.delete() return 'Succes'
def getUsage(type,phase,room): """ type - percent -> percentage of your bandwidth used - left -> quantity in GB of your bandwidth left - usage -> quantity in GB of your bandwidth usage - all -> summary of your usage phase must be 1, 2 or 3 room must be an existing room in the block """ return api.getData(phase,room,datetime.now().month) if type == "percent": return "{:0.2f}%".format(pct) if type == "left": return "{:0.2f}GB".format(left) if type =="usage": return "{:0.2f}GB".format(usage/1024) if type =="all": return "Used :\t\t{:0.2f}GB ({:0.2f}%)\nLeft :\t\t{:0.2f}GB ({:0.2f}%)\nTotal :\t\t{:0.2f}GB".format(usage/1024,pct,left,100-pct,max/1024) raise Exception('Must choose between "percent" and "left" ')
def upload(): # Get the name of the uploaded file file = request.files['file'] username = request.form['username'] data = api.getData(username) follows = data["follows"]["count"] followers = data["followed_by"]["count"] # Check if the file is one of the allowed types/extensions if file and allowed_file(file.filename): # Make the filename safe, remove unsupported chars filename = secure_filename(file.filename) # Move the file form the temporal folder to # the upload folder we setup file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) # Redirect the user to the uploaded_file route, which # will basicaly show on the browser the uploaded file image = ClImage(file_obj=open('uploads/' + filename, 'rb')) results = [] classes = [] probs = [] imageinfo = model.predict([image]) for i in imageinfo['outputs'][0]['data']['concepts']: classes.append(i['name']) probs.append(i['value']) # results.append({'result': {'tag': {'classes': classes, 'probs': probs}}}) tag_pool = [] # for result in results: # result = result["result"]["tag"] # tag_pool.extend(result["classes"]) # users_omega['naimmiah08'] = results #needs to be changed for username getTags = db.tags_pool.find() for tags in getTags: # print tags['tags'] tag_pool.extend(tags['tags']) tag_pool = set(tag_pool) # db.tags_pool.update({'id': 1}, { '$set' : {'tags': list(tag_pool)}}) image = [] for tag in tag_pool: if tag in classes: image.append(1) idx = classes.index(tag) image.append(probs[idx]) else: image.append(0) image.append(0) # print follows, followers image.append(follows >= follows_median) image.append(followers >= followers_median) likes = linearClassifier.predict(image) # print len(image) # print likes # for user in users_omega: # data = api.getData(user) # follows = data["follows"]["count"] # followed_by = data["followed_by"]["count"] # bio = data["biography"] # media = api.getPictures(user) # media = data["media"]["nodes"] # results = users_omega[user] # i = 0 # for result in results: # features = [] # result = result["result"]["tag"] # item = media[i] # likes.append(item["likes"]["count"]) # caption = item["caption"] # classes = result["classes"] # probs = result["probs"] # for tag in tag_pool: # if tag in classes: # features.append(1) # idx = classes.index(tag) # features.append(probs[idx]) # else: # features.append(0) # features.append(0) # features.append(follows) # features.append(followed_by) # following.append(follows) # followers.append(followed_by) # i = i + 1 # images.append(features) # images = dataset[0] # likes = dataset[1] # print images # print likes return render_template("result.html", image=filename, username=username, likes=ceil(likes[0])) return "jam ciemny jest wschrod"
def lambda_handler(event,context): s3_client = boto3.client('s3', aws_access_key_id=config.AWS_ACCESS_KEY, aws_secret_access_key=config.AWS_SECRET_KEY) csv_obj = s3_client.get_object(Bucket = config.AWS_S3_BUCKET,Key = config.AWS_S3_FILE_KEY) body = csv_obj['Body'] json_string = body.read().decode('utf-8') staffRecord = json.loads(json_string) body.close() frame = { "frameId" : event['frameId'], "timestamp" : event['eventTimestamp'], "imageUrl" : event['imageUrl'], "site" : event['site'] } recordList = [] eventList = api.getData(event) for member in config.memberList: flag = 0 for i in eventList: if member in i: flag = 1 if flag ==1: continue print(member) if member not in staffRecord: continue judge = staffRecord[member] # member last Record In not trigger this function eventProfile = { "frameId" : judge['frameId'], "eventTimestamp" : judge['eventTimestamp'], "name" : member, "frameUrl":judge['frameUrl'], "site" :"OUT" } staffRecord[eventProfile['name']] = eventProfile api.writeData(staffRecord) frame2 = { "frameId" : judge['frameId'], "timestamp" :judge['eventTimestamp'], "imageUrl" : judge['frameUrl'], "site" : "OUT" } behaviorDetection = { "personId" : member, "inTime" : 0, "outTime" : judge['eventTimestamp'], "isMember" :1, "stayTime" : judge['eventTimestamp']- event['eventTimestamp'], "coordinate_x" : 0.0, "coordinate_y" : 0.0 } fraudModel = { "frame" :[frame2], "behaviorDetection" : behaviorDetection } recordList.append(fraudModel) if len(eventList) !=0: print(len(eventList)) for count in range(len(eventList)): eventProfile = { "frameId" : eventList[count][0], "eventTimestamp" : eventList[count][1], "name" : eventList[count][2], "frameUrl":eventList[count][3], "site" :eventList[count][4] } print(eventProfile) if eventProfile['name'] not in staffRecord: staffRecord[eventProfile['name']] = eventProfile api.writeData(staffRecord) continue recordProfile = staffRecord[eventProfile['name']] print(recordProfile) frame2 = { "frameId" : recordProfile['frameId'], "timestamp" :recordProfile['eventTimestamp'], "imageUrl" : recordProfile['frameUrl'], "site" : recordProfile['site'] } if eventProfile['site'] == 'IN': # last Time is "OUT" # stay Time = in -out #[frame,frame2] = [IN,OUT] behaviorDetection = { "personId" : eventProfile['name'], "inTime" : eventProfile['eventTimestamp'], "outTime" : recordProfile['eventTimestamp'], "isMember" :1, "stayTime" : recordProfile['eventTimestamp'] - eventProfile['eventTimestamp'], "coordinate_x" : 0.0, "coordinate_y" : 0.0 } fraudModel = { "frame" :[frame,frame2], "behaviorDetection" : behaviorDetection } staffRecord[eventProfile['name']] = eventProfile api.writeData(staffRecord) recordList.append(fraudModel) return recordList
tag_pool = [] users_omega = {} images = [] likes = [] following = [] followers = [] max_clarifai_limit = 128 # predict with the model for user in users: print "Doing user " + user data = api.getData(user) media = api.getPictures(user) # media = data["media"]["nodes"] image_links = [] results = [] for mediaItem in media: image_links.append(mediaItem["display_src"]) imageinfo = model.predict_by_url(mediaItem["display_src"]) classes = [] probs = [] for i in imageinfo['outputs'][0]['data']['concepts']: classes.append(i['name']) probs.append(i['value']) results.append({'result': {'tag': {'classes': classes, 'probs': probs}}}) classes = [] probs = []
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat May 30 01:48:15 2020 @author: apolat """ from api import getData import pandas as pd from datetime import datetime gd = getData() ticker = 'AAPL' def option_chain_summary(ticker, strikeCount = 5): option_data = gd.OptionChain(ticker, strikeCount = strikeCount) df = [] if option_data['callExpDateMap'].keys() == option_data['putExpDateMap'].keys(): for key in option_data['callExpDateMap']: for strike in option_data['callExpDateMap'][key]: l=[] l.append(key) l.append(option_data['callExpDateMap'][key][strike][0]['last']) l.append(option_data['callExpDateMap'][key][strike][0]['totalVolume']) l.append(float(strike)) l.append(option_data['putExpDateMap'][key][strike][0]['last']) l.append(option_data['putExpDateMap'][key][strike][0]['totalVolume'])
def list(): ratings = api.getData()[:100] return json.dumps(ratings)