def create(self, validated_data): user_ip = self.context['request'].user_ip short_url = get_random_string(length=6) link = Link( short_url=short_url, created=datetime.now(), author_ip=user_ip, redirect_location=validated_data["redirect_location"], ) link.save() return link
def handle(self, *args, **options): filename = "inputs.csv" Activity.objects.all().delete() Category.objects.all().delete() Link.objects.all().delete() with open("inputs.csv", newline="") as csvfile: csvreader = csv.DictReader(csvfile) for row in csvreader: if row["is_published"].lower() == "false": continue print(row["name"]) activity = Activity( name=row["name"], description=row["description"], date_created=datetime.datetime.strptime( row["date_created"], "%m/%d/%Y").date(), min_cost=row["min_cost"], max_cost=row["max_cost"], min_participants=row["min_participants"], max_participants=row["max_participants"], requirements=row["requirements"], ) activity.save() links = [link for link in row["links"].split("\n")] for link in links: if len(link): name, url = link.split(",") link = Link(name=name.strip(), url=url.strip(), activity=activity) link.save() categories_names = [ category.strip().lower() for category in row["categories"].split(",") if len(category) ] for category_name in categories_names: try: category = Category.objects.get(name=category_name) except Category.DoesNotExist: category = Category.objects.create(name=category_name) category.save() activity.categories.add(category)
def make_group(group_id: str) -> Group: return Group(id=group_id, links=[Link(link='0.3', occurrences=2)], members={ oneUrl: make_page(oneUrl), twoUrl: make_page(twoUrl) }, categories=[Category(name='Social', occurrence=2)])
def make_custom_page(url: str, links: List[str] = [], category: str = 'Social', content: str = 'Social page content') -> Page: return Page(url=url, categories=[Category(name=category, occurrence=1)], content=content, links=[Link(link=lin, occurrences=1) for lin in links])
def make_meta_group(group_id: str) -> MetaGroup: return MetaGroup( id=group_id, links=[Link(link='0.3', occurrences=2)], first_members=[make_page(oneUrl), make_page(twoUrl)], categories=[Category(name='Social', occurrence=2)], members_count=2, domains_count=2, )
def convert_link_aliases_to_keys(groups: List[Group]) -> List[Group]: for group in groups: converted_links = set() for link in group.links: reversed_link = labels_index[int(link.link)] converted_link = Link(link=reversed_link, occurrences=link.occurrences) converted_links.add(converted_link) group.links = list(converted_links) return groups
def factory_method(cls, **kwargs) -> Link: translation = { "url": "link_content", "url_md5": "link_md5", "identifier": "message_id", } translated_args = cls.translate_kwargs(cls, translation=translation, args=kwargs) return Link(**translated_args)
def get_links_from_json(json_links) -> List[Link]: links = {} if json_links: for json_link in json_links: link_url = json_link.get("link") if is_url_valid(link_url) and not link_url.startswith("/") and link_url not in links: link = Link(link=link_url, name=json_link.get("link_name"), occurrences=1) links[link_url] = link return [link for url, link in links.items()]
def post(self): title = request.json.get('title') desc = request.json.get('desc') url = request.json.get('url') schema = LinkSchema() has = schema.dump(Link.query.filter(or_(Link.title == title,Link.url == url)).first()) if has.data is None or len(has.data) == 0: db.session.add(Link(title=title, url=url, desc=desc)) db.session.commit() return success()
def view_past_records(): current_user = get_username_jwt() if current_user: try: startPage = int(request.args.get('start')) except: return bad_request("Error, invalid parameters!") past_submissions = Link.get_user_past_records(username = current_user, start = startPage) return make_response(jsonify(logged_in_as = current_user, past_submissions = past_submissions, page = startPage), 200) else: return bad_request('Please login to view this module!')
def get_past_content(): platform = request.args.get('platform') search_string = request.args.get('search_string') current_platforms = ['All', 'Facebook', 'Twitter', 'Instagram', 'LinkedIn', 'User'] if platform is None or search_string is None: return bad_request('Invalid input!') elif platform not in current_platforms: return bad_request('Platform is not supported!') else: results = Link().get_past_content(platform, search_string) return make_response(jsonify(results = results), 200)
def get_records(): try: startPage = int(request.args.get('start')) return make_response(jsonify(results = Link.get_past_records(start = startPage), page = startPage),200) except: # We throw an error for when the parameters or results are invalid. Now, we can also create custom messages for # many other scenarios, but at a brevity point of view, we've decided to create a global catch-all instead. # For example, it is possible to check if the json input string comprises of the start parameter, # or if the Link module returns no results. However, since this is a fairly simplistic read operation, # defensive programming is not imperative (no pun intended) in this case. # We can always refer to our logfile if needed. return bad_request('Parameters are invalid!')
def create_link(request, from_id, to_id): from_cap = Capsule.objects.get(pk=from_id) to_cap = Capsule.objects.get(pk=to_id) if request.method == "POST": link = Link(capsule=to_cap, **{k:v for k,v in request.POST.iteritems()}) if not Capsule.objects.filter(pk=from_cap.pk, links__capsule=to_cap): link.save() from_cap.links.add(link) return HttpResponse(json.dumps({'data': 'success'}), content_type="application/json") return HttpResponse(json.dumps({'data': 'link already exists'}), content_type='application/json') elif request.method == "DELETE": link = from_cap.links.get(capsule=to_cap) if link: from_cap.links.remove(link) link.delete() return HttpResponse(json.dumps({'data': 'success'}), content_type="application/json") return HttpResponse(json.dumps({'data': 'link does not exist'}), content_type='application/json')
def get_results(): results = Link.get_summarised_records() try: return make_response(jsonify(results = results), 200) except: # Note that since there has not been any errors with such a simple function, # we just maintain a global catch-all. And, in the unlikely event that there # should was an error, we will then go to our logfile and identify the error. # Checking for none for the function is illogical as it is essentially a # fetch, which well, fetches all of the records, and records can be empty. # The same concept is applied across all functions that involves a simple # get function. return bad_request('There has been an error, please try again')
def evaluate_link(): results = None url = request.get_json() try: url = str(url.get('search')).strip() if not url: # If the URL in the search parameter is blank return bad_request('Please enter a URL!') else: # We otherwise call the function to scrape the URLs results = scraper(url) except: #If the search parameter is not found, we throw an error return bad_request('Please check your parameters!') if results["is_valid"] is not True: return bad_request(results["text"]) # JWT based user details, we simply assign a default if user is not logged in # for this endpoint. current_user = get_username_jwt() if get_username_jwt() else 'Guest' if 'platform' in results and results["is_valid"]: # Check if the key is created by the scraper function # Only load models from disks if it not loaded to keep our overheads minimal. # The search in the locals and globals function ensure that the models are # only initialised once in the entire session if 'sentiment' in locals() or 'sentiment' in globals(): pass else: sentiment, log_model = load_models() # Commence analysis try: # Remove noise, and tokenize our text _text = remove_noise(word_tokenize(results['text'])) # Pull sentiment results using the model results['sentiment_result'] = sentiment.classify(dict([token, True] for token in _text)) _fraud_result = log_model.predict([results['text']]) # This is a binary classification model, but there is a predict_proba function through scikit # learn which permits us to get get the numerical probability of the fake or real news results['fraud_probab'] = max(log_model.predict_proba([results['text']])[0]) results['fraud_result'] = 'Fake' if _fraud_result[0] == 'fake' else 'Real' # We then use these results of the scraper and commit it into the DB added_link = Link.add_link(url = url, platform = results['platform'], text = results['text'],\ sentiment = results['sentiment_result'], fraud = results['fraud_result'],\ fraud_probability = results['fraud_probab'], username = current_user) return make_response(jsonify(results = results['text'], url = url, sentiment = results['sentiment_result'],\ fraud = results['fraud_result'], fraud_probability = results['fraud_probab'], \ id = added_link.id ), 200) except Exception as e: return bad_request(f'There has been an error evaluating your link, please validate your links again. Error: {e}')
def give_feedback(): current_user = get_username_jwt() if current_user is None: return bad_request('Please login to provide your feedback!') data = request.get_json() allowed_feedbacks = ['Neutral', 'Great', 'Poor'] if 'id' not in data or 'feedback_string' not in data: return bad_request('Feedback ID or feedback is missing.') elif data['feedback_string'] not in allowed_feedbacks: return bad_request('Invalid feedback!') else: feedback = Link().add_feedback(username = current_user, feedback_string = data['feedback_string'], id = data['id']) if feedback is None: return bad_request('Invalid feedback ID to feedback!') else: # Return 201 since CRUD is done return make_response(jsonify(message = f'Thank you for your feedback. {feedback.url} has been rated.'), 201)
def test_get_linked_groups_from_ids(self): """Returns groups with links pages""" url_zero = 'zero.onion' url_one = 'one.onion' url_two = 'two.onion' url_three = 'three.onion' links_zero = [url_zero, url_one, url_two, url_three] links_one = [url_zero, url_three] links_two = [url_two] links_three = [] page_one = make_custom_page(url=url_one, links=links_one) page_zero = make_custom_page(url=url_zero, links=links_zero) page_two = make_custom_page(url=url_two, links=links_two) page_three = make_custom_page(url=url_three, links=links_three) pages = {url_zero: page_zero, url_one: page_one, url_two: page_two, url_three: page_three} group_id_one = 1 group_id_two = 2 partition = {url_zero: group_id_one, url_one: group_id_one, url_two: group_id_two, url_three: group_id_one} parent_group_id = '0.2' expected = [ make_custom_group( group_id=parent_group_id + '.' + str(group_id_one), links=[Link(link='0.2.2', name='')], categories=[Category(name='Social', occurrence=3)], pages={url_zero: page_zero, url_one: page_one, url_three: page_three} ), make_custom_group( group_id=parent_group_id + '.' + str(group_id_two), links=[], categories=[Category(name='Social', occurrence=1)], pages={url_two: page_two} )] actual = get_linked_groups_from_ids(pages, partition, parent_group_id) self.assertEqual(len(expected), len(actual)) self.assertEqual(expected[0].id, actual[0].id) self.assertEqual(expected[0].categories[0].name, actual[0].categories[0].name) self.assertEqual(expected[0].categories[0].occurrence, actual[0].categories[0].occurrence) self.assertEqual(len(expected[0].links), len(actual[0].links)) self.assertEqual(len(expected[1].members), len(actual[1].members)) self.assertEqual(expected[1].id, actual[1].id) self.assertEqual(expected[1].categories[0].name, actual[1].categories[0].name) self.assertEqual(expected[1].categories[0].occurrence, actual[1].categories[0].occurrence) self.assertEqual(len(expected[1].links), len(actual[1].links)) self.assertEqual(len(expected[1].members), len(actual[1].members))
def get_linked_groups_from_ids(pages: Dict[str, Page], partition: Dict[str, int], parent_group_id: str = None) -> List[Group]: """ :param pages: the original pages from db :type pages: Dict[str, Page] :param partition: each page as the original page key with its respective group key :type partition:Dict[str, int] :param parent_group_id: the parent group key of which pages the subgroups will be created :type parent_group_id: str or None :return: a list of groups with links to original page keys :rtype: List[Group] """ reversed_partition = reverse_partition(partition) groups_with_links, nodes_of_groups = get_links_and_nodes_of_groups( pages, reversed_partition) groups = [] for group_id, g_nodes in nodes_of_groups.items(): group_links: Dict[str, Link] = {} links = groups_with_links[group_id] parent_key_prefix = parent_group_id + "." if parent_group_id else "" if links is not None: for link in links: link_to_group = partition.get(link.link) whole_id = str(parent_key_prefix) + str(link_to_group) if link_to_group is not None and link_to_group != group_id: if str(link_to_group) not in group_links: new_link = Link(link=whole_id, occurrences=link.occurrences) group_links[whole_id] = new_link else: group_links[whole_id].occurrences += link.occurrences whole_group_id = str(parent_key_prefix) + str(group_id) group_members = {node.url: node for node in g_nodes} categories = create_categories_for_nodes(g_nodes) group = Group(id=whole_group_id, links=[group_links[key] for key in group_links], members=group_members, categories=categories) groups.append(group) return groups
def get_links_and_nodes_of_groups( pages: Dict[str, Page], reversed_partition: Dict[int, List[str]] ) -> Tuple[Dict[int, List[Link]], Dict[int, List[Page]]]: """ :param pages: the original pages from db :type pages: Dict[str, Page] :param reversed_partition: each partition key with all of its nodes as original page keys :type reversed_partition: Dict[int, List[str]] :return: links to other nodes and pages of a group :rtype: Tuple[Dict[int, List[str]], Dict[int, List[Page]]] """ groups_with_links_searchable = {} nodes_of_groups = defaultdict(list) for group_key, node_keys in reversed_partition.items(): for node_key in node_keys: full_node = pages.get(node_key) if full_node: if group_key not in groups_with_links_searchable: groups_with_links_searchable[group_key] = {} nodes_of_groups[group_key].append(full_node) links = set(full_node.links) for full_link in links: link = full_link.link if link not in groups_with_links_searchable[group_key]: groups_with_links_searchable[group_key][ link] = full_link.occurrences else: groups_with_links_searchable[group_key][ link] += full_link.occurrences groups_with_links = {} for group_key, group_links in groups_with_links_searchable.items(): groups_with_links[group_key] = [ Link(link=link_key, occurrences=occurrence) for link_key, occurrence in group_links.items() ] return groups_with_links, nodes_of_groups
def create_link(created_by, parent_link_id): new_link = Link() if parent_link_id is not None: link = Link.query.filter_by(id=parent_link_id).first() new_link.parent_link_id = link.id if link.root_link_id is None: new_link.root_link_id = link.id #this link is the root link original_user = link.created_by else: root_link = Link.query.filter_by(id=link.root_link_id).first() new_link.root_link_id = root_link.id original_user = root_link.created_by else: new_link.parent_link = None new_link.root_link = None original_user = created_by #new link this is root url = Link.url_from_username(original_user, created_by) new_link.url = url new_link.desc = f"af link by {created_by}" new_link.created_by = created_by db.session.add(new_link) db.session.commit() new_link = Link.query.filter_by(url=url).first() return new_link
def get_trending(): try: trending = Link().get_trending() return make_response(jsonify(results = trending), 200) except: return bad_request('Parameters are invalid!')
import datetime from api.models import Link from api.views import random_string for i in range(10): l = Link( short_url=random_string(), redirect_location=f"http://127.0.0.1:8000/{i}", expiration_date=datetime.datetime.now(), created=datetime.datetime.now(), author_ip=f"127.0.0.{int(i / 2)}", is_active=True, ) l.save()