def test_create_activity_on_not_existing_offer_saves_an_insert_activity( self, app): # Given offer_activity_identifier = uuid4() patch = {'name': 'bar', 'type': 'foo'} activity = Activity(dateCreated=datetime.utcnow(), entityIdentifier=offer_activity_identifier, patch=patch, tableName='offer') # When ApiHandler.activate(activity) # Then all_activities = Activity.query.all() offer = Offer.query.filter_by( activityIdentifier=offer_activity_identifier).one() offer_activities = offer.__activities__ insert_offer_activity = offer_activities[0] assert len(all_activities) == 1 assert len(offer_activities) == 1 assert insert_offer_activity.entityIdentifier == offer.activityIdentifier assert insert_offer_activity.verb == 'insert' assert patch.items() <= insert_offer_activity.datum.items() assert patch.items() <= insert_offer_activity.patch.items() assert insert_offer_activity.datum['id'] == humanize(offer.id) assert insert_offer_activity.patch['id'] == humanize(offer.id)
def test_create_offer_saves_an_insert_activity(self, app): # Given offer_dict = {'name': 'bar', 'type': 'foo'} offer = Offer(**offer_dict) # When ApiHandler.save(offer) # Then all_activities = Activity.query.all() offer_activities = offer.__activities__ insert_offer_activity = offer_activities[0] insert_offer_activity_from_query = Activity.query \ .filter_by(entityIdentifier=offer.activityIdentifier) \ .one() assert len(all_activities) == 1 assert len(offer_activities) == 1 assert insert_offer_activity_from_query == insert_offer_activity assert offer.activityIdentifier == insert_offer_activity.entityIdentifier assert insert_offer_activity.oldDatum == {} assert insert_offer_activity.transaction == None assert insert_offer_activity.verb == 'insert' assert offer_dict.items() <= insert_offer_activity.patch.items() assert offer_dict.items() <= insert_offer_activity.datum.items() assert insert_offer_activity.datum['id'] == humanize(offer.id) assert insert_offer_activity.patch['id'] == humanize(offer.id)
def claim_verdicts_from_airtable(verdicts_to_sync=None, max_verdicts=None, sync_async=False): if verdicts_to_sync is None: query = Verdict.query.filter(Verdict.scienceFeedbackUrl != None) if max_verdicts is not None: query = query.limit(max_verdicts) verdicts = query.all() else: verdicts = verdicts_to_sync if max_verdicts is not None: max_verdicts = len(verdicts) urls = [verdict.scienceFeedbackUrl for verdict in verdicts][:max_verdicts] if sync_async: claim_reviews = map_asynchronous(claim_review_from_url, urls) else: claim_reviews = [claim_review_from_url(url) for url in urls] for (index, verdict) in enumerate(verdicts): claim_review = claim_reviews[index] if not claim_review: continue for conclusion in claim_review['conclusions']: try: tag = Tag.create_or_modify({ '__SEARCH_BY__': ['label', 'type'], 'label': conclusion, 'type': TagType.CONCLUSION }) if tag.id is None: logger.info('Saving tag {}'.format(as_dict(tag))) ApiHandler.save(tag) verdict_tag = VerdictTag.create_or_modify({ '__SEARCH_BY__': ['tagId', 'verdictId'], 'tagId': humanize(tag.id), 'verdictId': humanize(verdict.id) }) verdict.verdictTags = verdict.verdictTags + [verdict_tag] except IntegrityError as e: logger.error('IntegrityError: {}, Conclusion: {}'.format( e, conclusion)) except InvalidRequestError as e: logger.error('InvalidRequestError: {}, Conclusion: {}'.format( e, conclusion)) except NotNullViolation as violation: logger.error('NotNullViolation: {}, Conclusion: {}'.format( violation, conclusion)) return verdicts
def author_from_row(row, index=None): chunks = row.get('Name', '').split(' ') first_name = '{}test'.format(COMMAND_NAME).title() if IS_DEVELOPMENT \ else chunks[0] last_name = 'Author{}'.format(index) if IS_DEVELOPMENT \ else ' '.join(chunks[1:]).replace('\'', '') user_dict = { '__SEARCH_BY__': 'email', 'email': '{}.{}@{}.{}'.format( first_name.lower(), last_name.lower(), APP_NAME, TLD), 'firstName': first_name, 'lastName': last_name, 'scienceFeedbackIdentifier': row['airtableId'] } user = User.create_or_modify(user_dict) if not user.id: user.set_password(DEFAULT_USER_PASSWORD if IS_DEVELOPMENT else create_random_password()) role = Role.create_or_modify({ '__SEARCH_BY__': ['type', 'userId'], 'type': RoleType.AUTHOR, 'userId': humanize(user.id) }) user.roles = user.roles + [role] return user
def get_thumb_storage_id(self, index): if self.id is None: raise ValueError( "Trying to get get_thumb_storage_id for an unsaved object") return inflect.engine().plural(self.__class__.__name__.lower()) + "/"\ + humanize(self.id)\ + (('_' + str(index)) if index > 0 else '')
def store_public_object_from_sandbox_assets(folder, public_object, thumb_id, index=0): dir_path = Path(os.path.dirname(os.path.realpath(__file__))) plural_model_name = get_model_plural_name(public_object) thumb_path = dir_path\ / '..' / '..' / folder / plural_model_name\ / str(thumb_id) with open(thumb_path, mode='rb') as thumb_file: if folder == "thumbs": save_thumb(public_object, thumb_file.read(), index, convert=False, symlink_path=thumb_path) else: store_public_object(folder, plural_model_name + '/' + humanize(public_object.id), thumb_file.read(), MIMES_BY_FOLDER[folder], symlink_path=thumb_path) ApiHandler.save(public_object)
def reviewer_from_row(row, index=None): first_name = '{}test'.format(COMMAND_NAME).title() if IS_DEVELOPMENT \ else row['First name'] last_name = 'Reviewer{}'.format(index) if IS_DEVELOPMENT \ else row['Last name'] user_dict = { '__SEARCH_BY__': 'email', 'email': '{}.{}@{}.{}'.format( first_name.lower(), last_name.lower(), APP_NAME, TLD) if IS_DEVELOPMENT else row['Email'], 'firstName': first_name, 'lastName': last_name, 'scienceFeedbackIdentifier': row['airtableId'] } user = User.create_or_modify(user_dict) if not user.id: user.set_password(DEFAULT_USER_PASSWORD if IS_DEVELOPMENT else create_random_password()) role = Role.create_or_modify({ '__SEARCH_BY__': ['type', 'userId'], 'type': RoleType.REVIEWER, 'userId': humanize(user.id) }) user.roles = user.roles + [role] return user
def users_from_scrap(users_max=3): reviewers = scrap_reviewers(reviewers_max=users_max) users = [] for reviewer in reviewers: user = User.create_or_modify({ '__SEARCH_BY__': ['firstName', 'lastName'], **reviewer }) for publication in reviewer['publications']: content = content_from_url(publication['url']) content.tags = 'isValidatedAsPeerPublication' author_content = AuthorContent.create_or_modify({ '__SEARCH_BY__': ['authorId', 'contentId'], 'authorId': humanize(user.id), 'contentId': humanize(content.id) }) user.authorContents = user.authorContents + [author_content] return users
def test_create_activity_on_not_existing_offer_with_model_name(self, app): # Given offer_activity_identifier = uuid4() patch = {'name': 'bar', 'type': 'foo'} activity = Activity(dateCreated=datetime.utcnow(), entityIdentifier=offer_activity_identifier, modelName='Offer', patch=patch) # When ApiHandler.activate(activity) # Then offer = Offer.query.filter_by( activityIdentifier=offer_activity_identifier).one() insert_offer_activity = offer.__activities__[0] assert insert_offer_activity.entityIdentifier == offer.activityIdentifier assert insert_offer_activity.verb == 'insert' assert patch.items() <= insert_offer_activity.datum.items() assert patch.items() <= insert_offer_activity.patch.items() assert insert_offer_activity.datum['id'] == humanize(offer.id) assert insert_offer_activity.patch['id'] == humanize(offer.id)
def test_return_only_not_soft_deleted_stocks(self, app): # Given offer = Offer(name="foo", type="ThingType.JEUX_ABO") ApiHandler.save(offer) stock1 = Stock(price=1) stock1.offer = offer stock2 = Stock(price=2) stock2.offer = offer stock3 = Stock(price=3) stock3.offer = offer stock4 = Stock(price=4) stock4.offer = offer stock1.isSoftDeleted = True ApiHandler.save(stock1, stock2, stock3, stock4) # When result = get_result(Stock) data = result['data'] # Then assert data[0]['id'] == humanize(stock2.id) assert data[1]['id'] == humanize(stock3.id) assert data[2]['id'] == humanize(stock4.id)
def content_verdicts_from_scrap(verdicts_max=3): feedbacks = scrap_feedbacks(feedbacks_max=verdicts_max) verdicts = [] for feedback in feedbacks: verdict = Verdict.create_or_modify(feedback) content = content_from_url(feedback['article']['url']) for reviewer in verdict['reviewer']: reviewer = reviewer_from_url(reviewer['url']) user = User.create_or_modify({ '__SEARCH_BY__': ['firstName', 'lastName'], **reviewer }) role = Role.create_or_modify({ '__SEARCH_BY__': ['type', 'userId'], 'type': 'reviewer', 'userId': humanize(user.id) }) user.roles = user.roles + [role] review = Review.create_or_modify({ '__SEARCH_BY__': ['contentId', 'userId'], 'contentId': humanize(content.id), 'userId': humanize(user.id), **reviewer['review'] }) user.reviews = user.reviews + [review] verdict_user = VerdictUser.create_or_modify({ '__SEARCH_BY__': ['verdictId', 'userId'], 'verdictId': humanize(verdict.id), 'userId': humanize(user.id) }) verdict.verdictUsers = verdict.verdictUsers + [verdict_user] return verdicts
def _(article, column=None, includes: Iterable = ()): article_dict = as_dict.registry[ApiHandler](article, includes=includes) # REMOVE OTHER REVIEWERS REVIEWS # TODO: This will never enable to see all reviews. Remove. if 'reviews' in article_dict and\ current_user.is_authenticated and\ RoleType.reviewer in map(lambda role: role.type, current_user.roles): humanized_user_id = humanize(current_user.id) reviews = article_dict['reviews'] article_dict['reviews'] = [ review for review in article_dict['reviews'] if review['userId'] == humanized_user_id ] if len(article_dict['reviews']) == 1: article_dict['reviews'] = reviews return article_dict
def appearance_from_row(row, unused_index=None): reviewed_items = row.get('Item reviewed') if not reviewed_items: return None quoting_content = Content.create_or_modify({ '__SEARCH_BY__': 'url', 'url': row['url'].strip() }) medium_science_feedback_ids = row.get('Outlet') if medium_science_feedback_ids: medium = Medium.query.filter_by( scienceFeedbackIdentifier=medium_science_feedback_ids[0]).first() quoting_content.mediumId = medium.id author_science_feedback_ids = row.get('Authors') if author_science_feedback_ids: for author_science_feedback_id in author_science_feedback_ids: author = User.query.filter_by( scienceFeedbackIdentifier=author_science_feedback_id).first() author_content = AuthorContent.create_or_modify({ '__SEARCH_BY__': ['authorId', 'contentId'], 'authorId': humanize(author.id), 'contentId': humanize(quoting_content.id) }) quoting_content.authorContents = quoting_content.authorContents + [author_content] quoted_claim = Claim.query.filter_by( scienceFeedbackIdentifier=reviewed_items[0]).first() quoted_content = None if not quoted_claim: quoted_content = Content.query.filter_by( scienceFeedbackIdentifier=reviewed_items[0]).first() if not quoted_claim and not quoted_content: return None testifier_science_feedback_ids = row.get('Verified by') if not testifier_science_feedback_ids: return None testifier = User.query.filter_by( scienceFeedbackIdentifier=testifier_science_feedback_ids[0]).first() if not testifier: return None if IS_DEVELOPMENT: quoting_content.externalThumbUrl = API_URL + '/static/logo.png' if IS_DEVELOPMENT else None quoting_content.title = "/".join(quoting_content.url .replace('http://', '') \ .replace('https://', '') \ .split('/')[-2:]) \ .replace('-', ' ') appearance_dict = { '__SEARCH_BY__': 'scienceFeedbackIdentifier', 'quotedClaim': quoted_claim, 'quotedContent': quoted_content, 'quotingContent': quoting_content, 'scienceFeedbackIdentifier': row['airtableId'], 'testifier': testifier } return Appearance.create_or_modify(appearance_dict)
def _tokenify(indexes): return "".join([humanize(index) for index in indexes])
def thumbUrl(self): base_url = get_storage_base_url() thumb_url = base_url + "/thumbs" return '{}/{}/{}'.format(thumb_url, get_model_plural_name(self), humanize(self.id))
def assert_update_activity_match_model(patch, activity, offer): _assert_activity_match_model(patch, activity, offer) assert activity.verb == 'update' assert activity.oldDatum['id'] == humanize(offer.id)
def assert_insert_activity_match_model(patch, activity, offer): _assert_activity_match_model(patch, activity, offer) assert activity.verb == 'insert' assert activity.patch['id'] == humanize(offer.id)
def graph_from_entity(entity, depth=0, graph=None, limit=None, node_ids=None, shortcutted_types=None, source_entity=None): if shortcutted_types is None: shortcutted_types = [] if not graph: graph = { 'collectionName': inflect_engine.plural_noun(entity.__class__.__name__.lower()), 'entityId': humanize(entity.id), 'nodes': [], 'edges': [] } node_ids = [] seed(1) node_id = node_id_from(entity) has_added = False if limit and len(node_ids) >= limit: if not depth: return graph return graph, has_added if node_id not in node_ids: has_added = True node_type = node_type_from(entity) is_appended = node_type not in shortcutted_types if is_appended: node = { 'datum': as_dict(entity), 'label': label_from(entity), 'id': node_id, 'type': node_type, 'x': random(), 'y': random(), 'size': 3 } node_ids.append(node_id) graph['nodes'].append(node) for key in entity.__mapper__.relationships.keys(): sub_entities = getattr(entity, key) if not isinstance(sub_entities, list): sub_entities = [sub_entities] for sub_entity in sub_entities: if sub_entity: (unused_graph, has_added_sub_entity) = graph_from_entity( sub_entity, depth=depth + 1, graph=graph, limit=limit, node_ids=node_ids, shortcutted_types=shortcutted_types, source_entity=entity) if has_added_sub_entity: sub_node_id = node_id_from(sub_entity) source = node_id if is_appended else node_id_from( source_entity) edge = { 'id': '{}_{}'.format(source, sub_node_id), 'source': source, 'target': sub_node_id } graph['edges'].append(edge) if not depth: return graph return graph, has_added