def set_predictions(user, predictions): for prediction in predictions: try: db_prediction = Prediction.query.filter_by( user_id=user.id, matchday=prediction["matchday"], home_team=prediction["home_team"], away_team=prediction["away_team"] ).first() if db_prediction is None: db_prediction = Prediction( matchday=prediction["matchday"], home_team=prediction["home_team"], home_score=prediction["home_score"], away_team=prediction["away_team"], away_score=prediction["away_score"] ) db.session.add(db_prediction) user.predictions.append(db_prediction) else: db_prediction.home_score = prediction["home_score"] db_prediction.away_score = prediction["away_score"] except: db.session.rollback() raise db.session.commit() return True
def set_predictions(user, predictions): for prediction in predictions: try: db_prediction = Prediction.query.filter_by( user_id=user.id, matchday=prediction["matchday"], home_team=prediction["home_team"], away_team=prediction["away_team"]).first() if db_prediction is None: db_prediction = Prediction(matchday=prediction["matchday"], home_team=prediction["home_team"], home_score=prediction["home_score"], away_team=prediction["away_team"], away_score=prediction["away_score"]) db.session.add(db_prediction) user.predictions.append(db_prediction) else: db_prediction.home_score = prediction["home_score"] db_prediction.away_score = prediction["away_score"] except: db.session.rollback() raise db.session.commit() return True
def add_predictions(self, user, predictions): for prediction in predictions: try: db_prediction = Prediction.query.filter_by( user_id=user.id, matchday=prediction['matchday'], home_team=prediction['home_team'], away_team=prediction['away_team']).first() if db_prediction is None: db_prediction = Prediction( matchday=prediction['matchday'], home_team=prediction['home_team'], home_score=prediction['home_score'], away_team=prediction['away_team'], away_score=prediction['away_score']) db.session.add(db_prediction) user.predictions.append(db_prediction) else: db_prediction.home_score = prediction['home_score'] db_prediction.away_score = prediction['away_score'] except Exception: db.session.rollback() raise db.session.commit() return True
def fill_prediction(provider, confidences=None, error=None): if error: return Prediction(provider=provider, error=error) predominant_emotion = get_predominant_emotion(confidences) return Prediction( provider=provider, confidences=confidences, predominant_emotion=predominant_emotion )
def testBuyTradeLikelihood(self): prediction_key = Prediction(contract_one=0.00, contract_two=0.00, liquidity=100, statement="Test", end_time=datetime.datetime.now()).put() user_key = Profile(balance=100).put() profile = user_key.get() prediction = prediction_key.get() probability = 90 trade_likeliness = calculate_trade_from_likelihood( probability, prediction, profile) self.assertEqual(29.789603833999628, trade_likeliness.quantity)
def GetPredictions(): """Returns all of the predictions (and can filter by org).""" org_filter = request.args.get('org', False) if org_filter: predictions = Prediction.query(Prediction.org == org_filter).fetch() else: predictions = Prediction.query().fetch() for prediction in predictions: # TODO(goldhaber): add these to the datastore prediction.url = 'predictions/' + prediction.key.urlsafe() prediction.price = GetPriceByPredictionId( prediction.key.urlsafe()) * 100 return render_template('predictions.html', predictions=predictions)
def NewPrediction(): try: prediction = Prediction(liquidity=float(request.form['liquidity']), info=request.form['info'], statement=request.form['statement'], end_time=datetime.datetime.strptime( request.form['endtime'], "%Y-%m-%d"), org=request.form['org']) prediction_id = prediction.put() flash('You created a new prediction!') return redirect('/predictions/' + prediction_id.urlsafe()) except: flash('error') return redirect('/predictions/create')
def update_predictions(): predictions = Prediction.select().where((Prediction.actual == 0)) for p in predictions: # Monday = 0, Sunday = 6 # If Friday or Saturday or Sunday then search for Monday if 4 <= p.time.weekday() <= 6: for i in [0, 1, 2, 3, 4, 5, 6]: price = Price.select(Price.current).where( (Price.time > p.time) & (fn.WEEKDAY(Price.time) == i) & (Price.company == p.company)).order_by( Price.id.desc()).limit(1) if len(price) > 0 or (p.time + timedelta(days=i)) > datetime.now(): break else: for i in [1, 2, 3, 4, 5, 6]: weekday = (p.time.weekday() + i) % 7 price = Price.select( Price.current, Price.time).where((Price.time > p.time) & (fn.WEEKDAY(Price.time) == weekday) & (Price.company == p.company)).order_by( Price.id.desc()).limit(1) if len(price) > 0 or p.time + timedelta( days=i) > datetime.now(): break price = price[0] if len(price) > 0 else None if price is not None: p.actual = price.current p.actual_price_time = price.time p.save()
def db_insert(prediction, length, human): if not os.path.exists('predictions.db'): db.create_all() p = Prediction(prediction=prediction, length=length, human=human) db.session.add(p) db.session.commit()
def predict_view(request): if request.method == "GET": end_time = timezone.now() start_time = end_time + relativedelta(hours=-1) predicitons = Prediction.objects.filter(date__range=(start_time, end_time)) if len(predicitons) == 0: new_prediction = Prediction(current_power=40000) new_prediction.save() all_predictions = Prediction.objects.all() return HttpResponse( json.dumps({ 'total': float( reduce(lambda x, y: x.current_power + y.current_power, all_predictions).current_power) / len(all_predictions) }))
def testPriceForBuyTrade(self): prediction_key = Prediction(contract_one=0.00, contract_two=0.00, liquidity=100, statement="Test", end_time=datetime.datetime.now()).put() user_key = Profile().put() trade = Trade(prediction_id=prediction_key, user_id=user_key, direction='BUY', contract='CONTRACT_ONE', quantity=10.00) priceBuy = get_price_for_trade(prediction_key.get(), trade) self.assertEqual(5.124947951362557, priceBuy) prediction_key = Prediction(contract_one=10.00, contract_two=0.00, liquidity=100, statement="Test", end_time=datetime.datetime.now()).put() trade = Trade(prediction_id=prediction_key, user_id=user_key, direction='SELL', contract='CONTRACT_ONE', quantity=10.00) priceSale = get_price_for_trade(prediction_key.get(), trade) self.assertEqual(-5.124947951362557, priceSale)
def fixture_stats(): """ Calculate the predicted home and away goals for a fixture and store those values in the database. """ fixtures = db.session.query(Fixture).filter_by(completed=False) for fixture in fixtures: league_stats = db.session.query(League_Stats).filter_by( league_id=fixture.league_id).one() home_stats = db.session.query(Team_Stats).filter_by( team_id=fixture.home_team_id).one() away_stats = db.session.query(Team_Stats).filter_by( team_id=fixture.away_team_id).one() home_goals = (home_stats.home_attack_strength) * ( away_stats.away_defense_strength) * (league_stats.avg_home_goals) away_goals = (away_stats.away_attack_strength) * ( home_stats.home_defense_strength) * (league_stats.avg_away_goals) prediction = db.session.query(Prediction).filter_by( fixture_id=fixture.id).first() if not prediction: prediction = Prediction(fixture) prediction.home_goals = home_goals prediction.away_goals = away_goals prediction = _fixture_probabilities(fixture, prediction) odds = db.session.query(Odds).filter_by(fixture_id=fixture.id).first() if not odds: odds = Odds(fixture) db.session.commit()
def test(): # If this file is executed directly, the below examples will be run and tested: visualizer = MatPlotLibVisualizer() last_run = now = datetime.now() minute = timedelta(minutes=1) test_predictions = [ Prediction('AAPL', now, now + minute, 100, 101), Prediction('TSLA', now, now + minute, 300, 296), Prediction('GME', now, now + minute, 30, 31), Prediction('AAPL', now + minute, now + minute * 2, 103, 105), Prediction('TSLA', now + minute, now + minute * 2, 290, 296), Prediction('GME', now + minute, now + minute * 2, 33, 35), ] last_predictions = test_predictions minutes_passed = 5 while True: new_predictions = [ Prediction('AAPL', now + minute * minutes_passed, now + minute * (minutes_passed + 1), 100 + randint(0, 10), 100 + randint(0, 10)), Prediction('TSLA', now + minute * minutes_passed, now + minute * (minutes_passed + 1), 280 + randint(0, 20), 280 + randint(0, 20)), Prediction('GME', now + minute * minutes_passed, now + minute * (minutes_passed + 1), 30 + randint(0, 5), 30 + randint(0, 5)) ] visualizer.update_predictions_plot(last_predictions + new_predictions) visualizer.update_traders_plot(now + minute * minutes_passed, [ SafeTrader("Trader 1", randint(500, 1000)), SafeTrader("Trader 2", randint(500, 1000)) ], {}) last_predictions = new_predictions minutes_passed += 1 sleep(1) print("Last run difference: ", datetime.now() - last_run) last_run = datetime.now()
def testScoringCase(self): prediction_key = Prediction(contract_one=0.00, contract_two=0.00, liquidity=100, resolved=False, outcome='CONTRACT_ONE', statement='Test', end_time=datetime.datetime.now()).put() user_key = Profile(balance=100, user_ledger=[ LedgerRecords( prediction_id=prediction_key.urlsafe(), contract_one=10.00, contract_two=0.00) ]).put() trade_key = Trade(prediction_id=prediction_key, user_id=user_key, direction='BUY', contract='CONTRACT_ONE', quantity=10).put() user = user_key.get() audit = scoring() self.assertEqual(10, audit[0]['earned'])
def kafkaconsumer(): consumer = KafkaConsumer(bootstrap_servers=BOOTSTRAP_SERVERS) tp = TopicPartition(TOPIC_NAME, 0) # register to the topic consumer.assign([tp]) # obtain the last offset value consumer.seek_to_end(tp) lastOffset = consumer.position(tp) consumer.seek_to_beginning(tp) for message in consumer: # print(message, "msg", message.offset, lastOffset) consumer_message = message message = json.loads(message.value) if message.get("type") == "photos": model_data = [ Photo(name=data.get("name"), data=data.get("data")) for data in message.get("data") ] elif message.get("type") == "matches": model_data = [ Match(name=data.get("name"), data=data.get("data")) for data in message.get("data") ] elif message.get("type") == "prediction": model_data = [ Prediction( photo=data.get("photo"), match=data.get("match"), status=data.get("status"), score=data.get("score"), ) for data in message.get("data") ] try: db.session.add_all(model_data) db.session.commit() except IntegrityError: db.session.rollback() if consumer_message.offset == lastOffset - 1: break consumer.close()
def predict_next_price(self, current_data_point: DataPoint) -> Prediction: inputs, labels = next( iter( self.make_dataset( np.concatenate( (self.last_batch, [self.data_point_to_array(current_data_point)]))))) predictions = self.model(inputs).numpy() last_batch = predictions[len(predictions) - 1] prediction = last_batch[len(last_batch) - 1][0] return Prediction( self.stock, current_data_point.timestamp, current_data_point.timestamp + self.data_interval, current_data_point.close_price, prediction * self.std[LABEL_INDEX] + self.mean[LABEL_INDEX])
def process_prediction(self, prediction_data): session = Session() predictions = [] if prediction_data == None: pass else: if type(prediction_data == list): try: for prediction in prediction_data: # # print("type of prediction list...", type(prediction)) predictions.append(Prediction(prediction)) except: pass for prediction in predictions: predicted_departure_dt = datetime.strptime( prediction.predicted_departure, "%Y-%m-%dT%H:%M:%S") prediction_datetime_dt = datetime.strptime( prediction.prediction_datetime, "%Y-%m-%dT%H:%M:%S") delta = predicted_departure_dt - prediction_datetime_dt if (delta.seconds > 600): # logging.warning("=====BAD TIMING DATA=====") continue # logging.warning("===Made it to has predictions====") # logging.warning("===I has this many predictions====") # logging.warning(len(predictions)) sql_predictions = [] for prediction in predictions: sql_predictions.append( sql_Predictions( stop_id=prediction.stop_id, trip_id=prediction.trip_id, vehicle_id=prediction.vehicle_id, route_name=prediction.route_name, predicted_delay=prediction.predicted_delay, predicted_departure=prediction.predicted_departure, prediction_datetime=prediction.prediction_datetime)) session.add_all(sql_predictions) session.commit()
def process_prediction(prediction_data): # logging.warning("===========PROCESS PRED METHOD CALLED=============") cur = get_cur() predictions = [] if prediction_data == None: # logging.warning("===========PREDICTION DATA IS NONE=============") pass else: if type(prediction_data == list): try: for prediction in prediction_data: # print("type of prediction list...", type(prediction)) # logging.warning("===========ADDING TO PREDICTION LIST THE FOLLOWING=============") logging.warning(prediction) predictions.append(Prediction(prediction)) except: pass # logging.warning("===========TIME TO MAKE SOME SQLLLLS=============") for prediction in predictions: predicted_departure_dt = datetime.strptime( prediction.predicted_departure, "%Y-%m-%dT%H:%M:%S") prediction_datetime_dt = datetime.strptime( prediction.prediction_datetime, "%Y-%m-%dT%H:%M:%S") delta = predicted_departure_dt - prediction_datetime_dt print("here is the time delta.... ", delta) if (delta.seconds > 600): print("Delta too much..... ", delta.seconds > 600) continue sql_string = """ INSERT INTO predictions VALUES ('{stop_id}', '{trip_id}', '{vehicle_id}', '{route_name}', '{predicted_delay}', '{predicted_departure}', '{prediction_datetime}'); """.format(stop_id=prediction.stop_id, trip_id=prediction.trip_id, vehicle_id=prediction.vehicle_id, route_name=prediction.route_name, predicted_delay=prediction.predicted_delay, predicted_departure=prediction.predicted_departure, prediction_datetime=prediction.prediction_datetime) logging.warning("=========SEEE SQL STRING===========") logging.warning(sql_string) cur.execute(sql_string)
def process_prediction(prediction_data): cur = get_cur() predictions = [] if prediction_data == None: pass else: if type(prediction_data == list): try: for prediction in prediction_data: # print("type of prediction list...", type(prediction)) predictions.append(Prediction(prediction)) except: pass bulk_template = "INSERT INTO predictions (stop_id, trip_id, vehicle_id, route_name, predicted_delay, predicted_departure, prediction_datetime ) VALUES" print("OG bs len", len(bulk_template)) print("HOW MANYT PREDS???", len(predictions)) for prediction in predictions: predicted_departure_dt = datetime.strptime( prediction.predicted_departure, "%Y-%m-%dT%H:%M:%S") prediction_datetime_dt = datetime.strptime( prediction.prediction_datetime, "%Y-%m-%dT%H:%M:%S") delta = predicted_departure_dt - prediction_datetime_dt print("here is the time delta.... ", delta) if (delta.seconds > 600): print("Delta too much..... ", delta.seconds > 600) continue sql_string = """ INSERT INTO predictions VALUES ('{stop_id}', '{trip_id}', '{vehicle_id}', '{route_name}', '{predicted_delay}', '{predicted_departure}', '{prediction_datetime}'); """.format(stop_id=prediction.stop_id, trip_id=prediction.trip_id, vehicle_id=prediction.vehicle_id, route_name=prediction.route_name, predicted_delay=prediction.predicted_delay, predicted_departure=prediction.predicted_departure, prediction_datetime=prediction.prediction_datetime) print(sql_string) cur.execute(sql_string) print("prediction inserted??")
def index(): raw_hurricane_data_from_sql = pd.read_sql_table('hurricane', con=engine) hurricane_data_from_sql = raw_hurricane_data_from_sql[[ 'identifier', 'name', 'num_pts', 'datetime', 'status', 'latitude', 'longitude' ]] if (hurricane_data_from_sql.empty and request.args.get('data_url') == None): return redirect('/update') elif hurricane_data_from_sql.empty: hurricane_data_url = request.args.get('data_url') hurricane_data = clean(hurricane_data_url) update_database(hurricane_data) return "<h1>Data loaded</h1>" elif request.method == 'POST': #validate and get location location = geolocator.geocode(request.form['location']) if not location: flash( 'Invalid Address - enter coordinates, address with city or just city', 'error') return render_template('index.html') # validate and get radius if not request.form['radius'].isnumeric(): flash('please enter a number for radius', 'error') return render_template('index.html') radius = (float(request.form['radius']) / 69) impact = float(request.form['impact']) my_prediction = Prediction(hurricane_data_from_sql, location, radius, impact) return render_template('/analysis.html', prediction=my_prediction) else: return render_template('/index.html')
def index(): if request.method == 'POST': if request.form['action'] == 'enter': confidence = request.form['confidence'] currency = request.form['currency'] target_price = request.form['target_price'] date_until = request.form['date_until'] confidence = request.form['confidence'] date_until_formated = datetime.datetime.strptime(date_until, '%Y-%m-%d').date() currency_info = cryptocompare.get_price(currency, currency='USD') starting_price = currency_info[currency]['USD'] price_difference = float(target_price) - starting_price if not currency and target_price and date_until: flash('please fill everything', 'danger') else: post = Prediction(currency=currency, date_to_target=date_until_formated, target_price=target_price, starting_price=starting_price, confidence=confidence,price_difference=price_difference) db.session.add(post) db.session.commit() return redirect(url_for('feed')) return redirect(url_for('feed')) return render_template("index.html")
def make_prediction(timestamp, stop_data): base = stop_data['MonitoredVehicleJourney'] base_prediction = base['MonitoredCall'] predict_dict = { 'response_time': date_parser(timestamp), 'recorded_time': date_parser(stop_data['RecordedAtTime']), 'line_ref': base['LineRef'], 'direction_ref': base['DirectionRef'], 'stop_point_ref': base_prediction['StopPointRef'], 'scheduled_arrival_time': date_parser(base_prediction['AimedArrivalTime']) } try: predict_dict['expected_arrival_time'] = date_parser( base_prediction['ExpectedArrivalTime']) except TypeError: log.error(TypeError) predict_dict['expected_arrival_time'] = None except ValueError: predict_dict['expected_arrival_time'] = None return Prediction(**predict_dict)
def scoring(): #go through all predictions, check if should be scored predictions = Prediction.query( ndb.AND(Prediction.outcome != "UNKNOWN", Prediction.resolved == False)).fetch() audit = [] # Get all trades by prediction_id for p in predictions: resolve = p.outcome trades = Trade.query(Trade.prediction_id == p.key).fetch() # Get user id from those trades users = [trade.user_id.get() for trade in trades] for u in users: # check user ledger, map outcome to 1 or 0 based on prediction outcome ledger = [i for i in u.user_ledger if i.prediction_id == p.key.urlsafe()] if resolve == 'CONTRACT_ONE': earned = ledger[0].contract_one else: earned = ledger[0].contract_two u.balance += earned audit.append({'user': u, 'earned': earned}) u.put() p.resolved = True p.put() return audit
def process_prediction(prediction_data): logging.warning("=======ARE YOU EVEN WORKING??=====") cur = get_cur() predictions = [] if prediction_data == None: pass else: if type(prediction_data == list): try: for prediction in prediction_data: # # print("type of prediction list...", type(prediction)) predictions.append(Prediction(prediction)) except: pass for prediction in predictions: predicted_departure_dt = datetime.strptime( prediction.predicted_departure, "%Y-%m-%dT%H:%M:%S") prediction_datetime_dt = datetime.strptime( prediction.prediction_datetime, "%Y-%m-%dT%H:%M:%S") delta = predicted_departure_dt - prediction_datetime_dt if (delta.seconds > 600): continue sql_string = """ INSERT INTO predictions VALUES ('{stop_id}', '{trip_id}', '{vehicle_id}', '{route_name}', '{predicted_delay}', '{predicted_departure}', '{prediction_datetime}'); """.format(stop_id=prediction.stop_id, trip_id=prediction.trip_id, vehicle_id=prediction.vehicle_id, route_name=prediction.route_name, predicted_delay=prediction.predicted_delay, predicted_departure=prediction.predicted_departure, prediction_datetime=prediction.prediction_datetime) cur.execute(sql_string)
def _setup_initial_prediction(user, prediction, competition): this_year = datetime.datetime(settings.CURRENT_SEASON, 1, 1) if not Prediction.objects.filter(year=this_year, user=user, competition=competition)\ .count(): prediction_obj = Prediction(year=this_year, user=user, name=user.email, competition=competition) prediction_obj.save() for t_id in prediction: prediction_obj.teams.add(Team.objects.get(pk=t_id)) prediction_obj.save() prediction_obj.calculateScore() prediction_obj.calculateGoalDiff() prediction_obj.calculatePosition() meta_competition = Competition.objects.get( pk=settings.CURRENT_META_COMPETITION_ID) runningscore = RunningScore.objects.create( name="Running score", user=user, competition=meta_competition)
def view_fixtures(request, tournament_id, user_id=None): # XXX: All of the following is very inefficient (hurriedly implemented to get things finished in # time for wc2010). I've begun work on a replacement view that uses joins instead of making # queries within loops; will merge in once its finished. if user_id: user = User.objects.get(id=user_id) else: user = request.user now = datetime.datetime.now() # TODO: Allow pardon of 1 minute maybe? if request.method == "POST": if user_id: raise PermissionDenied("Not allowed POST to this URL") # extract the pertinent POST values (i.e. those with numeric keys) predictions_req = dict([(k,v) for k,v in request.POST.items() if str(k).isdigit()]) for fixture_id, result_type_id in predictions_req.items(): try: fixture = Fixture.objects.get(pk=fixture_id) except ObjectDoesNotExist: # No such fixture continue if now > fixture.date: # Not allowed to update anymore continue try: result_type = ResultType.objects.get(pk=result_type_id) except ObjectDoesNotExist: # No such result type. continue # TODO: add a create_or_update() method for Prediction (i.e. PUT) # (this kind of idiom must exist in django already surely?) try: # update if prediction exists already. TODO: Perhaps add a unique constraint on (user, prediction) prediction = Prediction.objects.filter(user=user).get(fixture=fixture) prediction.result = result_type prediction.save() except ObjectDoesNotExist: # prediction doesn't exist, lets add it prediction = Prediction(user=user, fixture=fixture, result=result_type) # FIXME: This is quite inefficient. Generate a comma-separated insert query instead. prediction.save() return redirect(request.get_full_path()) all_result_types = ResultType.objects.all() # TODO: error handling if no result types found? knockout_result_types = ResultType.objects.filter(Q(pk=1) | Q(pk=2)) # FIXME: hack (hard-coded values) # TODO: move this above the POST block? try: tournament = Tournament.objects.get(pk=tournament_id) except ObjectDoesNotExist: return HttpResponse('Tournament %s does not exist. <a href="/tournaments">View Tournaments</a>' % tournament_id) fixtures = Fixture.objects.filter(tournament=tournament_id).order_by("-date") # FIXME: This is pretty poor.. should really use a single SQL query to retrieve this data games = [] # this array stores dicts containing a fixture, prediction, is_disabled bool val, and for fixture in fixtures: try: prediction = Prediction.objects.filter(fixture=fixture.id).get(user=user) except ObjectDoesNotExist: prediction = None is_disabled = False if now > fixture.date or fixture.result: is_disabled = True if fixture.fixture_type.is_drawable: result_types = all_result_types else: result_types = knockout_result_types games.append({"fixture":fixture, "prediction":prediction, "is_disabled":is_disabled, "result_types":result_types # FIXME: inefficient }) c = csrf(request) c.update(locals()) return render_to_response("fixtures.html", c, context_instance=RequestContext(request))
def test_linear_forecasting_for_the_whole_period(self): """ Попробуем линейное прогнозированите на всем периоде Идет потребление некоторых ресурсов постоянно но не очень системно. К примеру молоко. Покупается несколько раз внеделю. Изменять потребление не планируется. Попробуем линейно апроксимировать данную функцию. добавил нужгыеразмеры в нашем случае молоко употребляется в литрахт 'volume': 1 сначала получим среднее потребление молока в день. просумируем все потребление и поделимгачисло дней закоторое было произведено потребление получили в итоге производную далее попробуем полчить производную исключив один элемет потребления и сокративчисло дней до момента его приобретения затем рассчитам на сколько днейхватит послежней покупк если потребление будет таким же сейчас рассматриваем простую можель когда потребление рассчитываеися от даты первой покупки до даты последней покупки. прибавим к дельте котороая между первой и последней покупкой, количество дней накоторых хватит последей покупки и получим дату когда нужно установить хакупку в календаре. """ delta_days = 40 delta_days_2 = 35 list_buy_milk = self.__list_buy_milk() elements = [] for i in list_buy_milk: for j in i['items']: elements.append( Element(j['name'], i['dateTime'], j['volume'] * j['quantity'])) #self.assertEqual('', elements) self.assertEqual( 0.318, Prediction.average_weight_per_day_in_during_period( elements, delta_days)) pl = PredictionLinear(elements) self.assertEqual(30, pl.delta_days()) self.assertEqual(27, pl.without_last_delta_days()) self.assertEqual(0.424, pl.average_weight_per_day_in_during_period()) #self.assertEqual(0.3, Prediction.average_weight_per_day_in_during_period(elements[1:], delta_days_2)) delta = Prediction.average_weight_per_day_in_during_period( elements[1:], delta_days_2) self.assertEqual(0.278, delta) pl_2 = PredictionLinear(elements[1:]) delta = pl_2.average_weight_per_day_in_during_period() self.assertEqual(0.324, delta) #self.assertEqual(10.0, elements[0].get_weight() / delta) days_continue_for_last_buy = float("{0:.3f}".format( elements[0].get_weight() / delta)) self.assertEqual(9.259, days_continue_for_last_buy) delta_days_future = pl.days_future() self.assertEqual(8.333, delta_days_future) today_is = 44.259 #какойто день месяца в который долженсработать напоминание что сегоднявсе кончится. self.assertEqual(today_is, delta_days_2 + days_continue_for_last_buy)