def setup(): common.connect_db() if len(authentication.User.objects()) == 0: session["user"] = {"role": "site_admin"} return render_template("setup.html") else: return redirect("/")
def user_id(username): if request.method == "POST": return send_from_directory(f"{CACHE_DIRECTORY}/certificates", filename="{}.pdf".format(username.lower())) else: common.connect_db() projects = list(scrape.Project.objects(author=username.lower())) studios = dict() keep_projects = list() for i, project in enumerate(projects): if project["studio_id"] not in studios: studio = scrape.Studio.objects( studio_id=project["studio_id"]).first() if studio is not None: studios[project["studio_id"]] = studio keep_projects.append(project) else: keep_projects.append(project) return render_template("username.html", projects=keep_projects, studios=studios, username=username)
def get_image_urls(studio_ids=None, credentials_file=settings.DEFAULT_CREDENTIALS_FILE): """Gets image URLs from database. Args: studio_ids (array-like): a set of studios from which to gather project images. Defaults to None, in which case will get all studios' project images. credentials_file (str): path to the database credentials file. Returns: A set of image URLs. """ common.connect_db(credentials_file) if studio_ids is None: studio_ids = scrape.Studio.objects( public_show=True).values_list("studio_id") projects = set( scrape.Project.objects(studio_id__in=studio_ids).values_list("image")) try: projects.remove(None) except: pass return projects
def studio(): if request.method == "GET": common.connect_db() return render_template("studio.html", schemas=list(schema.Challenge.objects().order_by("-modified"))) # yapf: disable else: scraper = Scraper() sid = scraper.get_id(request.form["studio"]) s = None if request.form["schema"] != "__none__": s = request.form["schema"] if request.form["studio"] == "__all__": scrape.rescrape_all.delay(cache_directory=CACHE_DIRECTORY) return "Started" elif sid is not None: scrape.add_studio.delay(sid, schema=s, show=("show" in request.form), cache_directory=CACHE_DIRECTORY) return redirect("/studio/{0}".format(sid)) else: return render_template( "studio.html", message="Please enter a valid studio ID or URL.")
def studio_id(sid): if sid == "": return redirect("/prompts") common.connect_db() studio = scrape.Studio.objects(studio_id=sid).first() if studio is None or (not (studio["public_show"] or authentication.session_active())): return redirect("/prompts") projects = list(scrape.Project.objects(studio_id=sid).order_by("author")) info = {"authors": list(), "project_ids": list(), "titles": list()} for project in projects: info["authors"].append(project["author"].lower()) info["project_ids"].append(project["project_id"]) info["titles"].append(project["title"].lower()) message = None if studio["status"] == "in_progress" or studio["status"] is None: message = "This studio is currently in the process of being downloaded and analyzed. <a href=''>Refresh page.</a>" return render_template("studio_id.html", info=info, projects=projects, studio=studio, message=message)
def feedback_owner(pid): try: common.connect_db() reflection = scrape.ProjectReflection.objects( project_id=pid).order_by("-timestamp").first() return reflection["gu_uid"] except: return ""
def init(log_name): ''' Function for initializing main logger, databse connector objects, darknet network, and temp file Input: log name Return: return True if all global variables are generated successfully, otherwise return False ''' global main_logger, oracle_db, mysql_db main_logger = False oracle_db = False mysql_db = False main_logger = common.get_logger(log_name) # initialize darknet yolo_handler.init() # initialize openpose openpose_handler.init() # connect to Oracle database main_logger.info('Connecting to Oracle database') oracle_conn_rt = common.connect_db( 'oracle') # conn_rt = [ True/False, log_info, database object ] if oracle_conn_rt[0] == False: main_logger.error(oracle_conn_rt[1]) else: main_logger.info(oracle_conn_rt[1]) oracle_db = oracle_conn_rt[2] # connect to Mysql database main_logger.info('Connecting to Mysql database') mysql_conn_rt = common.connect_db('mysql') if mysql_conn_rt[0] == False: main_logger.error(mysql_conn_rt[1]) else: main_logger.info(mysql_conn_rt[1]) mysql_db = mysql_conn_rt[2] # create a temp file to record image filenames # which needs to be saved # save_tmp=open('tmp/pict.sav', 'a+') # common.file_check(save_tmp, main_logger, 'Temp file tmp/pict.sav create failed!', 10) # main_logger.info('temp file {0} generate successfully'.format(save_tmp)) if main_logger != False and oracle_db != False and mysql_db != False: main_logger.info('Initializting process successfully') return True else: main_logger.error('Initializting process Failed!') if oracle_db != False: common.close_db(oracle_db, 'oracle') if mysql_db != False: common.close_db(mysql_db, 'mysql') return False
def schema_editor(id): data = { "min_instructions_length": 0, "min_description_length": 0, "min_comments_made": 0, "min_blockify": { "comments": 0, "costumes": 0, "sounds": 0, "sprites": 0, "variables": 0 }, "required_text": [], "required_block_categories": {}, "required_blocks": [], "stats": [], "text": {}, "comparison_basis": { "basis": "__none__", "priority": None } } if id != "__new__": common.connect_db() try: data = schema.Challenge.objects(id=id).first().to_mongo() except AttributeError: raise NotFound() blocks = parser.block_data block_list = list() block_dict = dict() for cat in blocks: block_list += blocks[cat].keys() for block in blocks[cat]: block_dict[blocks[cat][block].lower().replace(" ", "")] = block return render_template("admin/edit_schema.html", blocks=blocks, block_dict=block_dict, block_list=block_list, categories=list(blocks.keys()), data=data, schema_id=id, stats=scrape.get_default_studio_stats())
def project_feedback(pid): if ("_gu_uid" in request.cookies and "feelings" in request.json and "minutes" in request.json): # yapf: disable try: common.connect_db() reflection = scrape.ProjectReflection( project_id=pid, gu_uid=request.cookies.get("_gu_uid"), minutes=int(request.json["minutes"]), feelings=request.json["feelings"]) reflection.save() return "True" except: return "False" else: return "False"
def studio_list(sid): if sid == "": return "Must include a studio ID.", 400 common.connect_db() studio = scrape.Studio.objects(studio_id=sid).first() if studio is None: return "Studio does not exist.", 404 limit = 8 page = 0 order = "author" try: if "page" in request.args: page = int(request.args["page"]) if "order" in request.args: if request.args["order"] in { "author", "title", "id", "project_id" }: order = request.args["order"] if "limit" in request.args: if int(request.args["limit"]) <= 100: limit = int(request.args["limit"]) except: return "Invalid arguments", 400 skip = page * limit projects = scrape.Project.objects( studio_id=sid).order_by(order).skip(skip).limit(limit) info = {"projects": list()} for i, project in enumerate(projects): info["projects"].append({ "project_id": project["project_id"], "title": project["title"], "author": project["author"], "image": (project["image"] if "image" in project else ""), "modified": project["history"]["modified"] }) return Response(json.dumps(info), mimetype="application/json")
def get_unique_authors(studio_ids, credentials_file=settings.DEFAULT_CREDENTIALS_FILE): """Gets the unique authors of projects across studios. Args: studio_ids (array-like): the list of studio IDs for studios for which a set of unique authors is desired. credentials_file (str): path to the database credentials file. Returns: A set of unique authors of projects. """ common.connect_db(credentials_file) authors = set( scrape.Project.objects(studio_id__in=studio_ids).values_list("author")) return authors
def init(log_name): ''' Function for initializing main logger and databse connector objects Input: log name Return: return True if all global variables are generated successfully, otherwise return False ''' print('get_video initializing') global main_logger, oracle_db, mysql_db main_logger = False oracle_db = False mysql_db = False main_logger = common.get_logger(log_name) main_logger.info('Connecting to Oracle database') oracle_conn_rt = common.connect_db( 'oracle') # conn_rt = [ True/False, log_info, database object ] if oracle_conn_rt[0] == False: main_logger.error(oracle_conn_rt[1]) else: main_logger.info(oracle_conn_rt[1]) oracle_db = oracle_conn_rt[2] main_logger.info('Connecting to Mysql database') mysql_conn_rt = common.connect_db('mysql') if mysql_conn_rt[0] == False: main_logger.error(mysql_conn_rt[1]) else: main_logger.info(mysql_conn_rt[1]) mysql_db = mysql_conn_rt[2] if main_logger != False and oracle_db != False and mysql_db != False: main_logger.info('Initializting process successfully') return True else: main_logger.error('Initializting process Failed!') if oracle_db != False: common.close_db(oracle_db, 'oracle') if mysql_db != False: common.close_db(mysql_db, 'mysql') return False
def get_total_engagement(studio_ids, credentials_file=settings.DEFAULT_CREDENTIALS_FILE): """Gets engagement for projects within a studio. Args: studio_ids (array-like): a list of studio IDs for which to retrieve project engagement. credentials_file (str): path to the database credentials file. Returns: A dictionary mapping {views, loves, favorites} to integers representing the total counts in the studios chosen. """ common.connect_db(credentials_file) engagement = scrape.Project.objects( studio_id__in=studio_ids).values_list("engagement") stats = {"views": 0, "loves": 0, "favorites": 0} for e in engagement: stats["views"] += e["views"] stats["loves"] += e["loves"] stats["favorites"] += e["favorites"] return stats
def get_ordered_studios(credentials_file=settings.DEFAULT_CREDENTIALS_FILE): """Gets the studios ordered by short label and title. Args: credentials_file (str): path to the database credentials file. Returns: An ordered list of studios. Studios without schemas will be excluded. """ common.connect_db(credentials_file) # Get studios and schemas studios = scrape.Studio.objects(public_show=True) studio_ids = studios.values_list("studio_id") schema_ids = set(studios.values_list("challenge_id")) try: schema_ids.remove(None) except: pass schemas = schema.Challenge.objects(id__in=schema_ids).order_by("short_label", "title") # yapf: disable schema_order = [str(v) for v in schemas.values_list("id")] # Order studios by schema label and title studio_order = [None] * len(studios) for studio in studios: schema_id = str(studio["challenge_id"]) if schema_id != "None": studio_order[schema_order.index(schema_id)] = studio studio_order = [s for s in studio_order if s is not None] return studio_order
def prompts(): common.connect_db() studios = list(scrape.Studio.objects(public_show=True)) schema_ids = set() for studio in studios: if "challenge_id" not in studio: studios.remove(studio) break schema_ids.add(studio["challenge_id"]) schemas = schema.Challenge.objects(id__in=schema_ids).order_by("short_label", "title") # yapf: disable id_order = list(schemas.values_list("id")) for i in range(len(id_order)): id_order[i] = str(id_order[i]) schemas = schemas.as_pymongo() new_schemas = dict() for sc in schemas: new_schemas[str(sc["_id"])] = sc # Order the studios ordered_studios = [None] * len(studios) for studio in studios: studio["challenge_id"] = str(studio["challenge_id"]) try: ordered_studios[id_order.index(studio["challenge_id"])] = studio except ValueError: pass return render_template("prompts.html", challenges=ordered_studios, schemas=new_schemas)
def cleanup(request): with open("tests/fixtures/secure/db.json") as f: creds = json.load(f) db = common.connect_db(creds) db.drop_database("test_db")
def generate_certificate(): common.connect_db() authors = list(set(scrape.Project.objects().values_list("author"))) certificate.generate_certs.delay(authors) return redirect("/admin/utilities")