def test_values(self): xs = np.array([1, 2, 3, 4]) low = 2 * xs close = 3 * xs candles = Series("pair", "1h", time=xs, close=close, low=low, open=xs, volume=xs, high=xs) idx = 0 for point in candles.values(): assert isinstance(point, Point) assert point.x == point.y / 3 idx += 1 assert idx == xs.size idx = 0 for point in candles.values("low"): assert isinstance(point, Point) assert point.x == point.y / 2 idx += 1 assert idx == xs.size
def create_item_series(user: InternalUserDTO, item_uuid: UUID, series_dto: SeriesDTO) -> SeriesDTO: item = Item.find_writeable_or_fail(user, item_uuid) series_dto.item_uuid = item.uuid # type: ignore series = Series(**series_dto.to_dict()) series.save() series.session.commit() return to_model(series, SeriesDTO)
def get_candles( pair: str, timeframe: str, limit: int, last_timestamp: Optional[int] = None, influx: InfluxDB = None, ) -> Series: measurement = pair + timeframe if not last_timestamp: last_timestamp = 9000000000 q = f""" SELECT * FROM ( SELECT median(close) AS close, median(high) AS high, median(low) AS low, median(open) AS open, median(volume) AS volume FROM {measurement} WHERE ("exchange" = 'binance' OR "exchange" = 'bitfinex' OR "exchange" = 'poloniex' OR "exchange" = 'bittrex') GROUP BY time({timeframe}) FILL(none) ) WHERE time < {last_timestamp}s ORDER BY time DESC LIMIT {limit} """ result = influx.query(q, epoch="s") df = pd.DataFrame(result.get_points(measurement=measurement)) if df.shape[0] == 0: return Series(pair=pair, timeframe=timeframe) candles = Series( pair=pair, timeframe=timeframe, time=df.time.values[::-1], open=df.open.values[::-1], close=df.close.values[::-1], high=df.high.values[::-1], low=df.low.values[::-1], volume=df.volume.values[::-1], ) return candles
def test_record(database): recorder = Recorder(uid="RecorderX") parameters = RecordingParameters(uid="ParametersX", duration=100.0) series = Series(uid="SeriesX", recorder_uid=recorder.uid, parameters_uid=parameters.uid) database.session.add_all([recorder, parameters, series]) record = Record(uid='SuperQualityRecord', start_time=time(), series_uid=series.uid) database.session.add(record) database.session.commit() assert record.uid == 'SuperQualityRecord' assert record.created_at is not None assert record.start_time < time() assert record.stop_time == record.start_time + parameters.duration assert record.uploaded_at is None assert record.series == series assert record.label is None assert not record.is_uploaded() assert record.filepath is not None assert not record.filepath.exists() # fake upload file record.filepath.parent.mkdir(parents=True, exist_ok=True) record.filepath.touch() record.uploaded_at = datetime.now() database.session.commit() assert record.is_uploaded() record.filepath.unlink() record.uploaded_at = None database.session.commit() assert not record.is_uploaded()
def get_series_metrics(user: InternalUserDTO, series_uuid: UUID, filter_params: dict) -> List[MetricDTO]: series = Series.find_or_fail(series_uuid) Item.find_readable_or_fail(user, series.item_uuid) metrics_dtos = Metric.find_by_series_uuid(series.uuid, filter_params) metrics = [Metric(**m.to_dict()) for m in metrics_dtos] return to_models(metrics, MetricDTO)
def delete_metric(user: InternalUserDTO, series_uuid: UUID, ts: datetime) -> None: series = Series.find_or_fail(series_uuid) Item.find_writeable_or_fail(user, series.item_uuid) metric = Metric.find_or_fail((series_uuid, ts)) metric.delete() metric.session.commit()
def test_future_time(self): xs = np.array([1, 2, 3, 4]) candles = Series("pair", "1h", 2 * xs, xs, xs, xs, xs, xs) assert candles.time.size == xs.size assert candles.future_time.size == BaseConfig.MARGIN assert candles.date.size == xs.size + BaseConfig.MARGIN
def update_series_by_uuid(user: InternalUserDTO, series_uuid: UUID, series_update: SeriesDTO) -> SeriesDTO: series = Series.find_or_fail(series_uuid) Item.find_writeable_or_fail(user, series.item_uuid) series.data = series_update.data series.save() series.session.commit() return to_model(series, SeriesDTO)
def main(): try: if sys.argv[1] == 'delete': from app.scrape.models import db from app.models import Character, Comic, Creator, Event, Series db.drop_all() db.create_all() newCharacter = Character( 1009146, "Abomination (Emil Blonsky)", "This is a test description", "http://i.annihil.us/u/prod/marvel/i/mg/9/50/4ce18691cbf04.jpg", 44, 2, 4) newCreator = Creator( 621, "Adriana Melo", "http://i.annihil.us/u/prod/marvel/i/mg/c/40/4bb45e7cc293b.jpg", 25, 4, 11) newSeries = Series( 7524, "Adam: Legend of the Blue Marvel (2008)", "Testing 123", 2008, 2015, "http://i.annihil.us/u/prod/marvel/i/mg/9/20/4bb4f0966a26a.jpg", 12, 16, 11, 8) newEvent = Event( 306, "Gene Colan Tribute (2008)", "This is a test description", "http://i.annihil.us/u/prod/marvel/i/mg/3/03/5109a0811486f.jpg", 23, 22, 11, 1) newComic = Comic( 428, "Ant-Man (2003) #4", 4, "This is a test description", "12345", 0, 2.99, "http://i.annihil.us/u/prod/marvel/i/mg/4/20/4bc697c680890.jpg", 2, 0, 0) db.session.add(newCharacter) db.session.add(newComic) db.session.add(newCreator) db.session.add(newSeries) db.session.add(newEvent) db.session.commit() print( "\nDatabase erased, reinitialized and prepped for TravisCI Build\n" ) else: print("\nNothing deleted. Have a good day.\n") except Exception: print("\nNothing deleted. Have a good day.\n")
def update_metric(user: InternalUserDTO, series_uuid: UUID, ts: datetime, metric_update: MetricDTO) -> MetricDTO: series = Series.find_or_fail(series_uuid) Item.find_writeable_or_fail(user, series.item_uuid) metric = Metric.find_or_fail((series_uuid, ts)) metric.data = metric_update.data metric.save() metric.session.commit() return to_model(metric, MetricDTO)
def create_series_metric(user: InternalUserDTO, series_uuid: UUID, metric_dto: MetricDTO) -> MetricDTO: series = Series.find_or_fail(series_uuid) Item.find_writeable_or_fail(user, series.item_uuid) metric_dto.series_uuid = series.uuid # type: ignore metric = Metric(**metric_dto.to_dict()) metric.save() metric.session.commit() return to_model(metric, MetricDTO)
def test_iteration(self): xs = np.array([1, 2, 3, 4]) candles = Series("pair", "1h", xs, xs, xs, xs, xs, xs) idx = 0 for candle in candles: assert isinstance(candle, Candle) idx += 1 assert idx == xs.size
def series(db, item, request): from app.models import Series series = Series.create( item_uuid=item["uuid"], data={ "name": "test-series1", "second_prop": "test-prop1" }, ) Series.session.commit() return series.to_dict()
def addSeries(): form = SeriesForm() if form.validate_on_submit(): series = Series(title=form.title.data, link=form.link.data, watching=form.watching.data, anime=form.anime.data, dubbed=form.dubbed.data, user_id=current_user.id) db.session.add(series) db.session.commit() return redirect(url_for('home')) return render_template('series_form.html', form=form)
def test_recorder(database): recorder = Recorder(uid='test_recorder', location_description='On table') database.session.add(recorder) database.session.commit() assert recorder.uid == 'test_recorder' assert recorder.location_description == 'On table' assert recorder.created_at is not None assert recorder.current_series_uid is None assert recorder.serieses == [] # test adding related serieses series1 = Series(recorder=recorder) series2 = Series(recorder=recorder) database.session.add(series1) database.session.add(series2) database.session.commit() assert len(recorder.serieses) == 2 # test setting current series from managed serieses recorder.current_series_uid = series1.uid database.session.commit() assert recorder.current_series_uid == series1.uid # test getting current series current_series = recorder.current_series assert current_series.uid == series1.uid # test setting non existing series as current series with pytest.raises(exc.NoResultFound): recorder.current_series_uid = 'non-existing-series' # test setting existing series but not maintained by current recorder other_recorder = Recorder(uid='other_recorder') seriesx = Series(recorder_uid=other_recorder.uid) database.session.add(other_recorder) database.session.add(seriesx) database.session.commit() with pytest.raises(ValueError): recorder.current_series_uid = seriesx.uid
def find_series(comic, db): series_title = comic.title[:-4] like = "{percent}{title}{percent}".format(percent="%", title=series_title) query = Series.query.filter(Series.title.like(like)).all() result = [x.title for x in query] if not result: new_series = Series(title=series_title, comics=[comic]) db.session.add(new_series) else: options = process.extract(series_title, result) maximum = max(options, key=itemgetter(1)) if maximum[1] < 85: new_series = Series(title=series_title, comics=[comic]) db.session.add(new_series) else: series = Series.query.filter_by(title=maximum[0]).one() comic.series_id = series.id db.session.commit()
def new_series(): series_data = request.get_json() get_object_or_404(Recorder, series_data['recorder_uid']) parameters = series_data.pop('parameters') try: try: uid = parameters.pop('uid') parameters_obj = get_object(RecordingParameters, uid) except orm.exc.NoResultFound: parameters_obj = RecordingParameters(uid=uid, **parameters) except KeyError: parameters_obj = RecordingParameters(**parameters) db.session.add(parameters_obj) db.session.commit() series = Series(parameters_uid=parameters_obj.uid, **series_data) db.session.add(series) db.session.commit() return series.to_dict() except exc.IntegrityError as ex: db.session.rollback() flask.abort(400, str(ex)) except ValueError as ex: flask.abort(400, str(ex)) return {}
def test_series(database): recorder = Recorder() parameters = RecordingParameters() database.session.add_all([recorder, parameters]) database.session.commit() series = Series(uid='SeriesX', description='Some series', parameters_uid=parameters.uid, recorder_uid=recorder.uid) database.session.add(series) database.session.commit() assert series.uid is not None assert series.created_at is not None assert series.parameters == parameters assert series.recorder == recorder assert series.records == []
def __init__(self, pair: str, timeframe: str, limit: int, last_timestamp: int) -> None: """ To return data without NaN values indicators are calculated on period of length: limit + magic_limit, however data returned by prepare() has length = limit """ self.magic_limit = config.BaseConfig.MAGIC_LIMIT self.margin = config.BaseConfig.MARGIN self.pair = pair self.timeframe = timeframe self.limit = max(limit, 20) self.last_timestamp = last_timestamp self.dates = [] self.data = Series(self.pair, self.timeframe) self.output = dict()
def add_series(form): name = form.name.data.strip() name = text_tools.fix_string(name, recase=False) stripped = nt.prepFilenameForMatching(name) have = AlternateNames.query.filter( AlternateNames.cleanname == stripped).all() rel_type = form.type.data.strip() if len(have) == 1: flash(gettext('Series exists under a different name!')) return redirect( url_for('renderSeriesIdWithoutSlug', sid=have[0].series)) elif have: flash( gettext( 'Have multiple candidate series that look like that name!')) return redirect(url_for('search', title=name)) else: new = Series( title=name, tl_type=rel_type, changetime=datetime.datetime.now(), changeuser=g.user.id, ) db.session.add(new) db.session.commit() # session must be committed before adding alternate names, # or the primary key links will fail. series_tools.updateAltNames(new, [name]) flash(gettext('Series Created!')) # return redirect(url_for('index')) return redirect(url_for('renderSeriesIdWithoutSlug', sid=new.id))
def get_create_series(seriesname, tl_type, changeuser, author_name=False): # print("get_create_series(): '%s', '%s', '%s'" % (seriesname, tl_type, author_name)) tries = 0 while 1: try: have = AlternateNames \ .query \ .filter(AlternateNames.name == seriesname) \ .order_by(AlternateNames.id) \ .all() # print("get_create_series for title: '%s'" % seriesname) # print("Altnames matches: ", have) # for item in have: # print((item.series_row.id, item.series_row.title, [tmp.name.lower() for tmp in item.series_row.author])) # print("Want:", author_name) # There's 4 options here: # - Update and have item has author -> # match, fail if match fails. # - Update has author, have does not -> # only allow matches after haves with authors exhausted. # - have has author, update does not -> # Glob onto series anyways. # - Update and have do not have author -> # do best match. # From the perspective of our approach, if we have a name, we try for that, then # look for empty items, finally return none if nothing present. # if we don't have a name, we look for # Try to match any alt-names we have. if not all([tmp.series_row for tmp in have]): db.session.commit() valid_haves = [ tmp for tmp in have if tmp.series_row and tmp.series_row.tl_type == tl_type ] # Try for author match first: if author_name: for item in [ tmp for tmp in valid_haves if tmp.series_row.author ]: if isinstance(author_name, list): if any([ auth_tmp.lower() in [ tmp.name.lower() for tmp in item.series_row.author ] for auth_tmp in author_name ]): # print("AuthorName match!") return item.series_row else: if author_name.lower() in [ tmp.name.lower() for tmp in item.series_row.author ]: return item.series_row for item in [ tmp for tmp in valid_haves if not tmp.series_row.author ]: return item.series_row else: # No author specified globs onto first possible match. for item in valid_haves: return item.series_row # print("No match found while filtering by author-name!") haveS = Series \ .query \ .filter(Series.title == seriesname) \ .limit(1) \ .scalar() if haveS and author_name: if isinstance(author_name, str): sName = "{} ({})".format(seriesname, author_name) else: sName = "{} ({})".format(seriesname, ", ".join(author_name)) elif haveS: if haveS.tl_type != tl_type: if tl_type == "oel": st = "OEL" else: st = tl_type.title() sName = "{} ({})".format(seriesname, st) else: # print("Wat? Item that isn't in the altname table but still exists?") return haveS else: sName = seriesname # We've built a new series title by appending the author/tl_type # Now we need to check if that exists too. if sName != seriesname: haveS = Series \ .query \ .filter(Series.title == seriesname) \ .limit(1) \ .scalar() return haveS print("Need to create new series entry for ", seriesname) new = Series( title=sName, changeuser= changeuser, # Hard coded RSS user ID. Probably a bad idea. changetime=datetime.datetime.now(), tl_type=tl_type, ) db.session.add(new) db.session.flush() if author_name: if isinstance(author_name, str): author_name = [ author_name, ] series_tools.setAuthorIllust(new, author=author_name) altn1 = AlternateNames( name=seriesname, cleanname=nt.prepFilenameForMatching(seriesname), series=new.id, changetime=datetime.datetime.now(), changeuser=changeuser) db.session.add(altn1) if sName != seriesname: altn2 = AlternateNames( name=sName, cleanname=nt.prepFilenameForMatching(seriesname), series=new.id, changetime=datetime.datetime.now(), changeuser=changeuser) db.session.add(altn2) db.session.commit() return new except sqlalchemy.exc.IntegrityError: print("Concurrency issue?") print("'%s', '%s', '%s'" % (seriesname, tl_type, author_name)) db.session.rollback() tries += 1 if tries > 3: raise except Exception: print("Error!") raise
def import_mail(mail, project_name=None): # some basic sanity checks if 'From' not in mail: return 0 if 'Subject' not in mail: return 0 if 'Message-Id' not in mail: return 0 message_id = mail.get('Message-Id').strip() if Patch.query.filter_by(msgid=message_id).count() or \ Comment.query.filter_by(msgid=message_id).count(): print('We have already imported a message with id "%s"' % message_id) return hint = mail.get('X-Patchwork-Hint', '').lower() if hint == 'ignore': return 0 header_parser = HeaderParser(mail) submitter = Submitter.get_or_create(name=header_parser.from_name, email=header_parser.from_email) if not project_name: project_name = header_parser.project_name project = find_project(project_name) if project is None: print 'No project for %s found' % project_name dump_mail(mail, header_parser.message_id) return 0 try: content_parser = ContentParser(project, mail) except: print 'Email %s is not parsable' % (header_parser.message_id) dump_mail(mail, header_parser.message_id) return 0 patch = None if content_parser.pull_url or content_parser.patch: subject_parser = SubjectParser(mail.get('Subject'), [project.linkname]) name = subject_parser.name tags = find_or_create_tags(subject_parser.tags) patch = Patch(name=name, pull_url=content_parser.pull_url, content=content_parser.patch, date=mail_date(mail), headers=mail_headers(mail), tags=tags) match = gitsendemail_re.match(header_parser.message_id) if match: (uid, num, email) = match.groups() series = Series.get_or_create(uid) else: series = Series.get_or_create(header_parser.message_id) series.patches.append(patch) db.session.add(series) patch.submitter = submitter patch.msgid = header_parser.message_id patch.project = project ancestor = find_ancestor(project, mail, patch) if ancestor: patch.ancestors.append(ancestor) if patch is None: patch = find_patch_for_mail(project, mail) if patch is not None: patch.state = PatchState.comments if patch is not None: db.session.add(patch) if content_parser.comment and patch is not None: comment = Comment(patch=patch, date=mail_date(mail), content=content_parser.comment, headers=mail_headers(mail)) comment.submitter = submitter comment.msgid = header_parser.message_id db.session.add(comment) db.session.commit() return 0
def __init__(self, series: Series): self._pair = series.pair self._timeframe = series.timeframe self.points = list(series.values(key="close"))
def get_metric(user: InternalUserDTO, series_uuid: UUID, ts: datetime) -> MetricDTO: series = Series.find_or_fail(series_uuid) Item.find_readable_or_fail(user, series.item_uuid) metrics_dto = Metric.find_or_fail((series_uuid, ts)) return to_model(Metric(**metrics_dto.to_dict()), MetricDTO)
def import_mail(mail): # some basic sanity checks if 'From' not in mail: return 0 if 'Subject' not in mail: return 0 if 'Message-Id' not in mail: return 0 hint = mail.get('X-Patchwork-Hint', '').lower() if hint == 'ignore': return 0 header_parser = HeaderParser(mail) submitter = Submitter.get_or_create(name=header_parser.from_name, email=header_parser.from_email) project = find_project(header_parser.project_name) if project is None: print 'No project for %s found' % header_parser.project_name dump_mail(mail, header_parser.message_id) return 0 try: content_parser = ContentParser(project, mail) except: print 'Email %s is not parsable' % (header_parser.message_id) dump_mail(mail, header_parser.message_id) return 0 patch = None if content_parser.pull_url or content_parser.patch: subject_parser = SubjectParser(mail.get('Subject'), [project.linkname]) name = subject_parser.name tags = find_or_create_tags(subject_parser.tags) patch = Patch(name=name, pull_url=content_parser.pull_url, content=content_parser.patch, date=mail_date(mail), headers=mail_headers(mail), tags=tags) match = gitsendemail_re.match(header_parser.message_id) if match: (uid, num, email) = match.groups() series = Series.get_or_create(uid) else: series = Series.get_or_create(header_parser.message_id) series.patches.append(patch) db.session.add(series) patch.submitter = submitter patch.msgid = header_parser.message_id patch.project = project ancestor = find_ancestor(project, mail, patch) if ancestor: patch.ancestors.append(ancestor) if patch is None: patch = find_patch_for_mail(project, mail) if patch is not None: patch.state = PatchState.comments comment = None if content_parser.comment: if patch is not None: comment = Comment(patch=patch, date=mail_date(mail), content=content_parser.comment, headers=mail_headers(mail)) if patch is not None: # we delay the saving until we know we have a patch. db.session.add(patch) if comment is not None: # looks like the original constructor for Comment takes the pk # when the Comment is created. reset it here. if patch: comment.patch = patch comment.submitter = submitter comment.msgid = header_parser.message_id db.session.add(comment) db.session.commit() return 0
def get_item_series(user: InternalUserDTO, item_uuid: UUID) -> List[SeriesDTO]: Item.find_readable_or_fail(user, item_uuid) series_dtos = Series.find_by_item_uuid(item_uuid) series = [Series(**s.to_dict()) for s in series_dtos] return to_models(series, SeriesDTO)
def delete_series_by_uuid(user: InternalUserDTO, series_uuid: UUID) -> None: series = Series.find_or_fail(series_uuid) Item.find_writeable_or_fail(user, series.item_uuid) series.delete() series.session.commit()
def get_series(user: InternalUserDTO, series_uuid: UUID) -> SeriesDTO: series_dto = Series.find_or_fail(series_uuid) Item.find_readable_or_fail(user, series_dto.item_uuid) return to_model(series_dto, SeriesDTO)