def test_total_expected_earnings(self): position_1 = Position(stock=Stock('ticker', 'name', 20, 30), quantity=2) position_2 = Position(stock=Stock('ticker', 'name', 20, 40), quantity=2) portfolio = Portfolio(position_1, position_2) self.assertEqual(Decimal('1.5'), portfolio.total_expected_earnings)
def new_game(self, request): """This function creates a new game. Input: Two Usernames; Only The first is required, if the second is not given, the first player will be playing with an Automate (Computer)""" player = utils.get_by_username(request.player) if not request.opponent: opponent = Player.query(Player.username == "computer").get() else: opponent = utils.get_by_username(request.opponent) newgame = Game(player=player.key, opponent=opponent.key) newgame.put() # initialize players position Position(game=newgame.key, player=player.key, position=0).put() Position(game=newgame.key, player=opponent.key, position=0).put() # update score Score(game=newgame.key, player=player.key, score=0).put() Score(game=newgame.key, player=opponent.key, score=0).put() return NewGameForm(player=player.username, opponent=opponent.username, urlsafekey=newgame.key.urlsafe())
def from_db_object(self, db_object): return Position(mac=Mac(db_object['mac']), location=Location(x=db_object['location']['x'], y=db_object['location']['y'], z=db_object['location']['z']), created_at=Time(int(db_object['created_at'])), _id=db_object['_id'])
def new_position(): if "user_id" not in session: return redirect("/") if session["user_id"]["role"] != "org": return redirect("/logout") req_langs = request.form.getlist("pos_lang") for i in range(0, len(req_langs)): req_langs[i] = int(req_langs[i]) print("languages", req_langs) is_valid = True if not len(request.form["title"]) >= 2: is_valid = False flash("You Job Title must be at least 3 characters long") if not request.form["description"]: is_valid = False flash("You cannot post an empty job description.") if len(request.form["description"]) > 1000: flash("Job Description must be under 1000 characters.") is_valid = False if is_valid: new_position = Position(name=request.form["title"], description=request.form["description"], owner_id=session["user_id"]["id"]) db.session.add(new_position) db.session.commit() print(new_position.position_id) cur_pos = Position.query.get(new_position.position_id) for i in range(0, len(req_langs)): lang_to_add = Language.query.get(req_langs[i]) cur_pos.langs.append(lang_to_add) db.session.commit() return redirect("/org_landing") return redirect("/org_add_pos")
def portfolio(): creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server() # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('sheets', 'v4', credentials=creds) # Call the Sheets API sheet = service.spreadsheets() result = sheet.values().get(spreadsheetId=SPREADSHEET_ID, range=RANGE).execute() values = result.get('values', []) positions = [ Position(symbol=row[0], quantity=float(scrub(row[2]))) for row in values if len(row) > 1 ] return Portfolio(positions)
def insert(engine): fake = Faker('ru_RU') Session = sessionmaker(bind=engine) session = Session() for i in range(CANDIDATE_NUMBER): session.add( Candidate(id=i, fio=fake.name(), gender=random.choice(list(GenderEnum)), birth=fake.date_this_decade(), deputat=fake.pybool())) for i in range(POSITION_NUMBER): session.add(Position(id=i, name=fake.job())) session.add( PositionDetails(id=i, staff_cat=random.choice(list(StaffCatEnum)), salary=fake.pyint(), position_id=i)) for i in range(CANDIDATE_NUMBER * 2): session.add( Employee(id=i, tab_num=fake.isbn10(), candidate_id=(fake.pyint() % CANDIDATE_NUMBER), position_id=(fake.pyint() % POSITION_NUMBER))) session.commit()
def appliances_save(request, house_id): appliances_list = ApplianceLink.objects.filter( floor=int(request.POST['floor_id'])) appliances = [] for appliance in appliances_list: appliances.append(appliance.pk) for i in range(int(request.POST['n'])): if (long(request.POST.get('appliance_link_pk' + str(i), 0)) > 0): f = ApplianceLink(pk=long(request.POST['appliance_link_pk' + str(i)])) appliances.remove(f.pk) else: f = ApplianceLink() center = Position() center.x = request.POST['appliance_link_center_x' + str(i)] center.y = request.POST['appliance_link_center_y' + str(i)] center.save() f.center = center f.appliance = ApplianceType.objects.get( pk=request.POST['appliance_link_appliance_pk' + str(i)]) f.floor = Floor.objects.get(pk=int(request.POST['floor_id'])) f.save() for appliance in appliances: ApplianceLink(pk=appliance).delete() return HttpResponse('')
def save_data_to_db(rssi_file, position_file, session_file): rssi_data = list(csv.reader(open(rssi_file, 'r'))) position_data = list(csv.reader(open(position_file, 'r'))) with open(session_file, 'r') as f: session_name, session_date = [x.rstrip() for x in f] with database.atomic(): sess = CaptureSession() sess.name = session_name sess.date = session_date sess.save() for row in rssi_data[1:]: rssi = RSSIValue() rssi.beacon_uuid = row[0] rssi.beacon_major = int(row[1]) rssi.beacon_minor = int(row[2]) rssi.rssi = int(row[3]) rssi.timestamp = int(row[4]) rssi.capture_session = sess rssi.save() for row in position_data[1:]: pos = Position() pos.x = float(row[0]) pos.y = float(row[1]) pos.timestamp = int(row[2]) pos.capture_session = sess pos.save()
def add_position(): if request.method == "POST": position = Position(rank=request.form['rank'], title=request.form['title']) db.session.add(position) db.session.commit() return render_template('add_position.html')
def getTable(season, gamesPlayed): """Creates a table of team positions after all teams have played some number of games.""" matchDict, teamDict = readMatchData(season) positionDict = {} for team in teamDict.values(): positionDict[team] = Position(gamesPlayed, team, matchDict) for team in sorted(positionDict, key=positionDict.get, reverse=True): print(team)
def setUpClass(cls): print("Setting up") conn = mongoengine.connect(db=DB_NAME, host=DB_HOST) for n in range(1, 11): invoice = Invoice(x=n, invoice_number=2) position = Position(**POSITION).save() #invoice.positions = [InvoiceItem(position_id=str(position.id))] invoice.positions = [InvoiceItem(position=position)] invoice.save()
def extract_position_of_item(item, data_row_dict): p_x = data_row_dict[item.name + '_p_x'] p_y = data_row_dict[item.name + '_p_y'] p_z = data_row_dict[item.name + '_p_z'] r_x = data_row_dict[item.name + '_r_x'] r_y = data_row_dict[item.name + '_r_y'] r_z = data_row_dict[item.name + '_r_z'] r_w = data_row_dict[item.name + '_r_w'] return Position(p_x, p_y, p_z, r_x, r_y, r_z, r_w)
def test_loading_invoice_yaml_to_model(self): import yaml invoice_path = os.path.join("..", "documents", "invoice", "data", "data.yml") with open(invoice_path) as config_file: data = yaml.load(config_file.read()) for position in data['positions']: p = Position(**position) print(p)
def walls_save(request, house_id): Wall.objects.filter(floor=int(request.POST['floor_id'])).delete() s = house_id for i in range(int(request.POST['n'])): f = Wall() start = Position() start.x = request.POST['wall_start_x' + str(i)] start.y = request.POST['wall_start_y' + str(i)] start.save() f.start = start end = Position() end.x = request.POST['wall_end_x' + str(i)] end.y = request.POST['wall_end_y' + str(i)] end.save() f.end = end f.insulating_size = request.POST['wall_insulating_size' + str(i)] f.wall_size = request.POST['wall_wall_size' + str(i)] f.floor = Floor.objects.get(pk=int(request.POST['floor_id'])) f.save() return HttpResponse(s)
def mutate(root, info, **input): pb = Position(currency_pair=input['currency_pair'], held_currency=input['held_currency'], amount=input['amount'], date=input['date'], full_info=input['full_info'], type=input['type']) user = User.objects(id=input['user_id']).first() user.balance.append(pb).save() return AddPosition(position=pb)
def crypto_tokens(): """ Crypto currency tokens """ try: with open('crypto.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') positions = [Position(symbol=row[0], quantity=row[1]) for row in reader] return Portfolio(positions) except IOError: return Portfolio()
def portfolio(): """ Stock market portfolio """ try: with open('portfolio.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') positions = [Position(symbol=row[0], quantity=row[2]) for row in reader ] return Portfolio(positions) except IOError as err: return Portfolio()
def scrape_single_position_job(self, job: Job, job_element: WebElement): summary_element = self.find_summary_element(job_element, 'pv-entity__summary-info') job.company.name = self.canonize_company_name( summary_element.find_element_by_class_name( 'pv-entity__secondary-title').text) position = Position() position.title = summary_element.find_element_by_class_name( 't-16').text.strip() position.date_range = self.scrape_date_range(job_element) position.location = self.scrape_location(job_element) job.add_position(position)
def generate_demo_csv(): position_a = Position(quantity=2, ticker='AA', name='A & A', current_value=20, expected_value=30) position_b = Position(quantity=3, ticker='BB', name='B & B', current_value=30, expected_value=40) position_c = Position(quantity=4, ticker='CC', name='C & C', current_value=40, expected_value=50) position_d = Position(quantity=5, ticker='DD', name='D & D', current_value=50, expected_value=60) portfolio = Portfolio(position_a, position_b, position_c, position_d) portfolio.write_to_file(os.path.join(PORTFOLIO_DIR, 'demo.csv'))
def locate(self): start = time.perf_counter() measures = self.fetch_measures() end = time.perf_counter() print("{} measures fetched in {}s".format(len(measures), end - start)) for device_mac, measure in measures.items(): if len(measure) >= 3: start = time.perf_counter() res = self.engine.locate(measure) end = time.perf_counter() print("engine localised {} in {}s".format( device_mac, end - start)) self.publisher.publish({ "mac": device_mac, "location": res.to_db_object() }) self.position_dao.save(Position(Mac(device_mac), res))
def windows_save(request, house_id): Window.objects.filter(floor=int(request.POST['floor_id'])).delete() s = house_id for i in range(int(request.POST['n'])): f = Window() center = Position() center.x = request.POST['window_center_x' + str(i)] center.y = request.POST['window_center_y' + str(i)] center.save() f.center = center f.width = request.POST['window_width' + str(i)] f.orientation = request.POST['window_orientation' + str(i)] f.type = request.POST['window_type' + str(i)] f.height = request.POST['window_height' + str(i)] f.floor = Floor.objects.get(pk=int(request.POST['floor_id'])) f.save() return HttpResponse(s)
def contribute_post(): stripped_ctags = [ tag.split(' - ', 1)[1] for tag in request.form.getlist('ctags') ] current_user.dtags = DemogTag.query.filter( DemogTag.label.in_(request.form.getlist('dtags'))).all() new_post = Post( author=current_user, category=Category.query.filter( Category.label == request.form.get('category')).first(), ctags=ContentTag.query.filter( ContentTag.label.in_(stripped_ctags)).all(), q_name=request.form.get('q_name'), q_about=request.form.get('q_about'), q_interest=request.form.get('q_interest'), q_challenges=request.form.get('q_challenges'), q_change=request.form.get('q_change'), q_helpful=request.form.get('q_helpful'), q_other=request.form.get('q_other')) for r in range(3): role, org = request.form.get('role' + str(r)), request.form.get('org' + str(r)) if not (role == '' and org == ''): check_pos = Position.query.filter(Position.title == role, Position.org == org) try: pos = check_pos.first() pos.users.append(current_user) pos.posts.append(new_post) except: Position(title=role, org=org, users=[current_user], posts=[new_post]) db.session.add(new_post) db.session.commit() flash('Thank you for your contribution!') return render_template('contribute.html', categories=category_dict, demographics=demog_tags, sample=sample, roles=3)
def save_portfolio(self, portfolio, name, date): #NOTE ndict annoying stuff session = self.manager.db.Session() # Cleaning session.execute( "delete from Positions where Positions.PortfolioName = '{}'". format(name)) #session.execute("delete from Portfolios where Portfolios.Name = '{}'".format(name)) pf_object = Portfolio(name=name, date=date.strftime(format='%Y-%m-%d %H:%M'), startdate=portfolio.start_date, cash=portfolio.cash, startingcash=portfolio.starting_cash, returns=portfolio.returns, capital=portfolio.capital_used, pnl=portfolio.pnl, portvalue=portfolio.portfolio_value, posvalue=portfolio.positions_value) positions = json.loads( str(portfolio.positions).replace('Position(', '').replace(')', '').replace("'", '"')) assert isinstance(positions, dict) #FIXME In remote mode with lot of portfolios: make it crash ! for ticker in positions: positions[ticker]['name'] = name positions[ticker]['date'] = date session.add(Position(**positions[ticker])) #NOTE bug: 'not list-like object', but not an issue ? #pf_object.Positions = Position(**positions[ticker]) #name=name, #ticker='pouet', #amount=0, #last_sale_price=0, #cost_basis=0) session.add(pf_object) session.commit() session.close()
def scrape_multi_position_job(self, job: Job, job_element: WebElement): summary_element = self.find_summary_element( job_element, 'pv-entity__company-summary-info') # <h3 class="t-16 t-black t-bold"> # <span class="visually-hidden">Company Name</span> # <span>University of Colorado Boulder</span> # </h3> # company_element = summary_element.find_element_by_class_name('t-16') company_element = summary_element.find_element_by_tag_name('h3') company_spans = company_element.find_elements_by_tag_name('span') job.company.name = self.canonize_company_name(company_spans[1].text) duration_element = summary_element.find_element_by_tag_name('h4') duration_spans = duration_element.find_elements_by_tag_name('span') job.total_duration = duration_spans[1].text.strip() # <ul class="pv-entity__position-group mt2"> positions_element = job_element.find_element_by_class_name( 'pv-entity__position-group') # <li class="pv-entity__position-group-role-item"> positions_items = positions_element.find_elements_by_class_name( 'pv-entity__position-group-role-item') for position_item in positions_items: position = Position() # <div class="pv-entity__summary-info-v2 pv-entity__summary-info--background-section pv-entity__summary-info-margin-top mb2"> position_element = position_item.find_element_by_class_name( 'pv-entity__summary-info--background-section') # <h3 class="t-14"> <span>Title</span> <span>Web Designer</span> title_element = position_element.find_element_by_tag_name('h3') title_spans = title_element.find_elements_by_tag_name('span') position.title = title_spans[1].text.strip() position.date_range = self.scrape_date_range(position_element) position.location = self.scrape_location(position_element) position.duration = self.scrape_duration(position_element) job.add_position(position)
def get_stock_values(portfolio): query = { 'symbols': ",".join(portfolio.tickers), 'types': 'quote,news,chart', 'range': '1m', 'last': 5 } url = HOSTNAME +\ '/stock/market/batch?' +\ query_s(query) resp = requests.get(url) params = resp.json() return Portfolio([ Position(symbol=vals['quote']['symbol'], price=vals["quote"]["latestPrice"], quantity=portfolio[ticker].quantity) for ticker, vals in params.iteritems() ])
def add_position(): # 从前端Ajax请求中获取数据 name = request.json.get('name', None) enname = request.json.get('enname', None) status = request.json.get('status', None) description = request.json.get('description', None) position = Position(name=name, enname=enname, status=status, description=description) status_code = None try: db.session.add(position) db.session.commit() status_code = 200 except Exception as err: print(err) status_code = 500 return jsonify(), status_code
def periodic_task(): with app.app_context(): # Request data about zones url = 'https://datos.cdmx.gob.mx/api/records/1.0/download/?dataset=prueba_fetchdata_metrobus' r = requests.get(url, allow_redirects=True) decoded_content = r.content.decode('utf-8') cr = csv.reader(decoded_content.splitlines(), delimiter=';') # Skips headers next(cr) # Insert all records for row in cr: exists = Vehicle.query.filter( Vehicle.api_id == row[1]).scalar() is not None # Create vehicle if doesnt exist (lookup by api_id) if not exists: newVehicle = Vehicle( api_id=row[1], label=row[2], ) db.session.add(newVehicle) db.session.commit() veh_id = Vehicle.query.filter(Vehicle.api_id == row[1]).first().id # Create shapely shape to and convert to geoalchemy2 shape geom = from_shape(Point(float(row[5]), float(row[4])), srid=4326) newPosition = Position(last_updated=row[0], location=geom, vehicle_id=veh_id) db.session.add(newPosition) db.session.commit()
def load_position(position_data, comments, parties): """Return a new position instance for position_data.""" party_data = parties[position_data["party"]] party = party_instances[party_data["name"]] party.name = party_data["name"] party.long_name = party_data["longname"] # Im Datensatz werden Antworten mit einem Schlüssel in answer.json # kodiert. Da Metawahl davon ausgeht, dass Antworten immer zustimmend, # ablehnend oder neutral sind, wird der Schlüssel hier hart kodiert, assert position_data["answer"] in [0, 1, 2] value = {0: 1, 1: -1, 2: 0}[position_data["answer"]] position = Position(value=value, party=party) comment_id = position_data["comment"] if comment_id and comments[comment_id]["text"] not in INVALID_POSITION_TEXTS: raw_text = comments[comment_id]["text"] raw_text = raw_text[1:-1] if raw_text.startswith('"') else raw_text position.text = raw_text return position
def position_1_1(self): grid = Grid(5, 5) return Position(1, 1, grid)
def position_5_0(self): grid = Grid(5, 5) return Position(5, 0, grid)