Ejemplo n.º 1
0
    def from_dict(self, dic):
        self.submissions = []

        for dic_submission in dic['submissions']:
            submission = Submission()
            submission.from_dict(dic_submission, self.reddit)
            self.submissions.append(submission)
Ejemplo n.º 2
0
    def submit(self, revision, data, mode_info, log = []):
        repo = mode_info["repo"]
        settings = {
            "treeherder": {
                'group_symbol': 'AWFY',
                'group_name': 'AWFY',
                'job_name': mode_info["job_name"],
                'job_symbol': mode_info["job_symbol"],
                "tier": mode_info["tier"],
                "platform": mode_info["platform"]
            }
        }

        logfile = create_log_item({
            "repo": repo,
            "revision": revision,
            "settings": settings,
            "perf_data": data,
            "extra_log_info": log
        })
        loglink = "https://arewefastyet.com/data.php?file=treeherder-logs/"+logfile

        th = Submission(repo, revision,
                        treeherder_url = awfy.th_host,
                        treeherder_client_id = awfy.th_user, 
                        treeherder_secret = awfy.th_secret,
                        settings = settings)

        job = th.create_job(None)
        th.submit_completed_job(job, data, loglink = loglink)
Ejemplo n.º 3
0
 def test_blank_name(self):
     sub = Submission()
     sub.id = 1
     try:
         sub.save()
     except KeyError, e:
         print 'Adding blank submission failed: %s' % e
Ejemplo n.º 4
0
 def test_creation(self):
     s = {'id': 45, 'completed': datetime.now(), 'score': 40, 'answers':
             {'q1': 'Correct', 'q2': 'Wrong', 'q3': 'Wrong', 'q4': 'Correct', 'q5': 'Wrong'}
         }
     sub = Submission(s)
     assert sub.uid == s['id'] and sub.completed == s['completed'] and sub.score == s['score'] and sub.answers == s['answers']
     sub.save()
Ejemplo n.º 5
0
 def loading_data():
     '''
     Creates objects from DB and then a list of all users to loop through for login and password validation.
     '''
     Assignments.assignments_list = Assignments.create_objects_list_from_database('assignements')
     Attendance.attendances_list = Attendance.create_objects_list_from_database('attendance')
     Submission.create_objects_list_from_database('submission')
     Student.add_attendance_to_student(Attendance.attendances_list) # add attendance obj to a specific student
     Student.create_teams_list()
Ejemplo n.º 6
0
    def __init__(self, reddit_settings={}):
        self.reddit = praw.Reddit(
            client_id='Vyw-20ZFtH4msA',
            client_secret='-vZkEG8s6qlRvbTcuGxmJOnpAds',
            user_agent='ubuntu:arguing-agents:v1 (by /u/HolzmindenScherfede)')

        if reddit_settings == {}:
            print(
                "Reddit: __init__: No settings given. Creating empty object.")
            return

        if 'mode' not in reddit_settings:
            print(
                "Reddit: __init__: Mode not provided. Initializing it to 'find'."
            )

        mode = reddit_settings['mode']

        if mode == 'find':
            if 'topic' in reddit_settings:
                topic = reddit_settings['topic']
            else:
                print("Reddit: provide topic")
                exit(-1003)

            if 'amount' in reddit_settings:
                amount = reddit_settings['amount']
            else:
                amount = 10

            if 'sortby' in reddit_settings:
                sortby = reddit_settings['sortby']
            else:
                sortby = 'hot'

            praw_submissions = list(
                self.reddit.subreddit('ChangeMyView').search(topic,
                                                             sortby))[:amount]

            self.submissions = []

            for praw_submission in praw_submissions:
                submission = Submission(praw_submission)
                self.submissions.append(submission)
        elif mode == 'url':
            if 'submission_urls' not in reddit_settings:
                print('Reddit: provide submission_urls!')
                exit(-12031)

            submission_urls = reddit_settings['submission_urls']

            self.submissions = []

            for submission_url in submission_urls:
                submission = Submission(
                    praw.models.Submission(self.reddit, url=submission_url))
                self.submissions.append(submission)
Ejemplo n.º 7
0
 def show_full_statistics_about_students():
     '''
     Prepares data to display in a formatted table.
     '''
     data = Submission.get_students_average_grades(Submission.submission_list)
     average_grades = Submission.get_name_by_id(data, Student.student_list)
     table = Student.get_full_statistics_about_students(Student.student_list, average_grades)
     title_list = ['ID', 'Name', 'Surname', 'Email', 'Team', 'Average', 'Card']
     title = 'FULL STATISTICS ABOUT STUDENTS'
     Ui.print_table(table, title, title_list)
Ejemplo n.º 8
0
 def show_average_of_grades():
     '''
     Prepares data to display in a formatted table.
     '''
     data = Submission.get_students_average_grades(Submission.submission_list)
     average_grades = Submission.get_name_by_id(data, Student.student_list)
     title = 'Students average grades'
     title_list = ['Id', 'Name', 'Surname', 'Average of grades']
     table = Ui.create_average_grades_table_to_print(average_grades)
     Ui.print_table(table, title, title_list)
Ejemplo n.º 9
0
 def test_duplicate(self):
     s = {'id': 30, 'completed': datetime.now(), 'score': 40, 'answers':
             {'q1': 'Correct', 'q2': 'Wrong', 'q3': 'Wrong', 'q4': 'Correct', 'q5': 'Wrong'}
         }
     sub = Submission(s)
     sub.save()
     try:
         sub.save()
     except KeyError, e:
         print 'Failed trying to save duplicate: %s' % e
Ejemplo n.º 10
0
def import_old():
    existing_tags = {}
    for tag in Tag.query.all():
        exisiting_tags[tag.name] = tag
    existing_people = {}
    for person in Person.query.all():
        existing_people[person.name] = person
    for resource in ['Projector', 'Microphone', 'Sound system', 'Drinking water', 'Quiet (no airwalls)']:
        penguicontrax.db.session.add(Resource(resource))
    for track in ['literature', 'tech', 'music', 'food', 'science']:
        penguicontrax.db.session.add(Track(track,None))
    with penguicontrax.app.open_resource('schedule2013.html', mode='r') as f:
        tree = ET.fromstring(f.read())
        events = tree.find('document')
        for section in events:
            if section.tag == 'div' and section.attrib['class'] == 'section':
                name = section[0].text
                tag_list = section[1].text # Tag doesn't seem to be in the DB yet
                person = section[3][0].text
                description = section[3][0].tail
                submission = Submission()
                submission.title = name
                submission.description = description
                submission.duration = 1
                submission.setupTime = 0
                submission.repetition = 0
                submission.followUpState = 0
                submission.eventType = 'talk'
                #Load presenters
                submission.personPresenters= []
                for presenter in [presenter.strip() for presenter in person.split(',')]:
                    person = None
                    if not presenter in existing_people:
                        person = Person(presenter)
                        penguicontrax.db.session.add(person)
                        existing_people[presenter] = person
                    else:
                        person = existing_people[presenter]
                    submission.personPresenters.append(person)
                #Load Tags
                submission.tags = []
                for tag in tag_list.split(','):
                    tag = normalize_tag_name(tag)
                    db_tag = None
                    if not tag in existing_tags:
                        db_tag = Tag(tag)
                        penguicontrax.db.session.add(db_tag)
                        existing_tags[tag] = db_tag
                    else:
                        db_tag = existing_tags[tag]
                    submission.tags.append(db_tag)
                penguicontrax.db.session.add(submission)
        penguicontrax.db.session.commit()
Ejemplo n.º 11
0
def submit():
    code = request.form['code']

    # Generate a unique ID for the submission
    id = str(uuid.uuid4())[:8]
    while Submission.objects(id=id).count() > 0:
        id = str(uuid.uuid4())[:8]

    # save the submission and submit it for judging
    submission = Submission(submission_text=code, id=id)
    submission.save()
    judge.submit(submission)

    return redirect(url_for('view_submission', id=id))
Ejemplo n.º 12
0
    def handleSubmission(id, lang, code):
        # Maximum time 
        try:
            with tlim(5):
                logger.info(f'[PROCESSING] {multiprocessing.current_process().name}')

                # Get compiler
                c = Client.__compiler.getCompiler(lang)
                s = Submission(id, lang, code, c)
                s.start()

        except TimeoutException:
            return
        finally:
            pass
Ejemplo n.º 13
0
    def evaluate(self) -> bool:
        """

        The function will error out if either the submission
        function raises an error or the solution and submission
        outputs do not match

        Thank God for stackoverflow
        """

        solution = Solution()
        submission = Submission()

        # retrive the running functions in the Solution and Submission classes
        # The names of these functions are defined in inputs.json
        solution_func = getattr(solution, self.func_name)
        submission_func = getattr(submission, self.func_name)

        for testcase in self.inputs:
            # unpack each testcase and execute
            solution_res = solution_func(*testcase)
            submission_res = submission_func(*testcase)

            if isinstance(solution_res, Iterable):
                if sorted(solution_res) != sorted(submission_res):
                    raise ImplementationError(
                        f"Error for testcase {testcase}. "
                        f"Expected {solution_res}, "
                        f"got {submission_res}")
            else:
                if solution_res != submission_res:
                    raise ImplementationError(
                        f"Error for testcase {testcase}. "
                        f"Expected {solution_res}, "
                        f"got {submission_res}")
Ejemplo n.º 14
0
Archivo: base.py Proyecto: ymc/hue
 def start_job(self, job):
     resp_dict = self._root.post('%s/submission/action/%d/' %
                                 (API_VERSION, job.id),
                                 headers=self.headers)
     if resp_dict['all'][0]['status'] in SqoopClient.STATUS_BAD:
         raise SqoopSubmissionException.from_dict(resp_dict['all'][0])
     return Submission.from_dict(resp_dict['all'][0])
Ejemplo n.º 15
0
def submission_json(id):
    submission = Submission.objects(id=id).first()
    data = {
        'id': submission.id,
        'status': submission.status,
    }
    return jsonify(data)
Ejemplo n.º 16
0
 def get_submissions(self):
     resp_dict = self._root.get('%s/submissions' % API_VERSION,
                                headers=self.headers)
     submissions = [
         Submission.from_dict(submission_dict)
         for submission_dict in resp_dict['submissions']
     ]
     return submissions
Ejemplo n.º 17
0
 def load_data(self):
     """Initialize blocchain + open submissions data from a file."""
     try:
         with open('blocchain-{}.bit'.format(self.node_id), mode='r') as f:
             # file_content = pickle.loads(f.read())
             file_content = f.readlines()
             # blocchain = file_content['chain']
             # open_submissions = file_content['ot']
             blocchain = json.loads(file_content[0][:-1])
             # We need to convert  the loaded data because submissions
             # should use OrderedDict
             updated_blocchain = []
             for bloc in blocchain:
                 converted_tx = [Submission(
                     tx['voter'],
                     tx['candidate'],
                     tx['zero'],         #added to hold day zero, countdown until final vote
                     tx['signature'],
                     tx['amount']) for tx in bloc['submissions']]
                 updated_bloc = Bloc(
                     bloc['index'],
                     bloc['previous_hash'],
                     converted_tx,
                     bloc['proof'],
                     bloc['timestamp'])
                 updated_blocchain.append(updated_bloc)
             self.chain = updated_blocchain
             open_submissions = json.loads(file_content[1][:-1])
             # We need to convert  the loaded data because submissions
             # should use OrderedDict
             updated_submissions = []
             for tx in open_submissions:
                 updated_submission = Submission(
                     tx['voter'],
                     tx['candidate'],
                     tx['zero'],
                     tx['signature'],
                     tx['amount'])
                 updated_submissions.append(updated_submission)
             self.__open_submissions = updated_submissions
             peer_nodes = json.loads(file_content[2])
             self.__peer_nodes = set(peer_nodes)
     except (IOError, IndexError):
         pass
     finally:
         print('Cleanup!')
def pixivSubmissionsFromJson(bookmarks):
    submissions = []
    for illustration in bookmarks.illusts:
        if illustration.type not in ['illust', 'manga']:
            # TODO: Add more format support
            logger.log("Skipping " + illustration.type)
            continue

        # Album
        if illustration.meta_pages:
            imageIndex = 0
            for imagePage in illustration.meta_pages:
                newSubmission = Submission()
                fillPixivSubmission(illustration, newSubmission)

                newSubmission.title = '{}_{}'.format(newSubmission.title,
                                                     imageIndex)
                newSubmission.bodyUrl = imagePage.image_urls.original
                imageIndex += 1

                submissions.append(newSubmission)

        # Single image
        elif illustration.meta_single_page:
            newSubmission = Submission()
            fillPixivSubmission(illustration, newSubmission)

            # The image that will be downloaded
            newSubmission.bodyUrl = illustration.meta_single_page.original_image_url

            submissions.append(newSubmission)

    logger.log("Got {} Pixiv bookmarks".format(len(submissions)))
    return submissions
Ejemplo n.º 19
0
def add_submission_by_user(user: User, subm: str, cat: str,
                           lang: str) -> Submission:
    submission = Submission(user=user,
                            value=subm,
                            category=cat,
                            language=lang,
                            mwe=mwe_helper.get_todays_mwe(lang))
    session.add(submission)
    session.commit()
    return submission
Ejemplo n.º 20
0
    def massageData(self, inflate):
        try:
            submissions = inflate['attached_media']
            from submission import Submission

            contributors = []
            for s in submissions:
                submission = Submission(_id=s)
                print submission.emit()

                contributors.append(
                    submission.j3m.genealogy['createdOnDevice'])

            inflate['contributors'] = list(set(contributors))

        except KeyError as e:
            print e
            pass

        return inflate
Ejemplo n.º 21
0
	def massageData(self, inflate):
		try:
			submissions = inflate['attached_media']
			from submission import Submission
			
			contributors = []
			for s in submissions:
				submission = Submission(_id=s)
				print submission.emit()
				
				contributors.append(
					submission.j3m.genealogy['createdOnDevice'])
			
			inflate['contributors'] = list(set(contributors))
			
		except KeyError as e:
			print e
			pass
		
		return inflate
Ejemplo n.º 22
0
def submit_handler_2(user: User, update: Update, context: CallbackContext):
    todays_mwe = mwe_helper.get_todays_mwe(user.language)
    submission = Submission(value=update.message.text)
    context.user_data["submission"] = submission
    context.user_data["state"] = "submit_example_type_1"
    update.message.reply_text(
        get_language_token(user.language, Tokens.SPECIAL_MEANING) %
        todays_mwe.name,
        parse_mode=telegram.ParseMode.MARKDOWN,
        reply_markup=get_submit_category_1_keyboard_markup(
            user.language, todays_mwe))
Ejemplo n.º 23
0
def run(pid, title, lang, code):
    controller = Controller.controller
    try:
        submission = Submission(pid, title, lang, code)
        controller.execute(submission)
        return submission.sid
    except Failed as e:
        logging.warning(e.message)
        raise
    except Exception as e:
        traceback.print_exc()
        raise Failed('unknown error')
    def submit(self, revision, data, mode_info, log=[]):
        repo = mode_info["repo"]
        settings = {
            "treeherder": {
                'group_symbol': 'AWFY',
                'group_name': 'AWFY',
                'job_name': mode_info["job_name"],
                'job_symbol': mode_info["job_symbol"],
                "tier": mode_info["tier"],
                "platform": mode_info["platform"]
            }
        }

        logfile = create_log_item({
            "repo": repo,
            "revision": revision,
            "settings": settings,
            "perf_data": data,
            "extra_log_info": log
        })
        loglink = "https://arewefastyet.com/data.php?file=treeherder-logs/" + logfile

        retriggerlink = None
        if "mode_id" in log and "machine_id" in log:
            retriggerlink = "https://arewefastyet.com/retrigger/?machine_id=" + str(
                log["machine_id"]) + "&mode_id=" + str(
                    log["mode_id"]) + "&revision=" + revision

        th = Submission(repo,
                        revision,
                        treeherder_url=awfy.th_host,
                        treeherder_client_id=awfy.th_user,
                        treeherder_secret=awfy.th_secret,
                        settings=settings)

        job = th.create_job(None)
        th.submit_completed_job(job,
                                data,
                                loglink=loglink,
                                retriggerlink=retriggerlink)
Ejemplo n.º 25
0
	def __init__(self, inflate=None, _id=None, submission=None):
		
		super(Derivative, self).__init__(
			inflate=inflate,
			_id=_id,
			db="derivatives",
			extra_omits=['submission','source','j3m']
		)
		
		if not hasattr(self, 'submission_id'):
			self.invalidate(
				invalidate['codes']['submission_undefined'],
				invalidate['reasons']['submission_undefined']
			)
			return
		
		from submission import Submission
		if submission is None:
			self.submission = Submission(_id=self.submission_id)
		else:
			self.submission = submission
			
		j3m = open(
			os.path.join(self.submission.asset_path, self.submission.j3m), 'r'
		).read()
		
		try:
			self.j3m = json.loads(j3m)
		except:
			self.submission.invalidate(
				invalidate['codes']['submission_invalid_j3m'],
				invalidate['reasons']['submission_invalid_j3m']
			)
			
			self.invalidate(
				invalidate['codes']['submission_invalid_j3m'],
				invalidate['reasons']['submission_invalid_j3m']
			)
			return
		
		self.getSource()
			
		if _id is None:
			setattr(self, 'date_created', self.j3m['genealogy']['dateCreated'])
			setattr(self, 'mime_type', self.submission.mime_type)
			self.save()
			
			self.parseLocationEntries()
			self.parseAnnotationEntries()
Ejemplo n.º 26
0
    def submit(self, revision, data, mode_info):
        repo = mode_info["repo"]
        settings = {
            "treeherder": {
                "group_symbol": "AWFY",
                "group_name": "AWFY",
                "job_name": mode_info["job_name"],
                "job_symbol": mode_info["job_symbol"],
                "tier": mode_info["tier"],
                "platform": mode_info["platform"],
            }
        }

        th = Submission(
            repo,
            revision,
            treeherder_url=awfy.th_host,
            treeherder_client_id=awfy.th_user,
            treeherder_secret=awfy.th_secret,
            settings=settings,
        )

        job = th.create_job(None)
        th.submit_completed_job(job, data)
Ejemplo n.º 27
0
    def post(self):
        subName = self.request.get('name')
        subCategory = self.request.get('category')
        subDescription = self.request.get('description')
        subLocation = self.request.get('location')
        subImage = str(self.request.get('image'))
        subKeywords = self.request.get('keywords')
        cookie = self.request.get('cookie')
        subTfrom = self.request.get('from')
        subTto = self.request.get('to')

        self.response.headers['Content-Type'] = 'application/json; charset=utf-8'

        # Name, Category, Location, Image and Cookie are the required fields. 
        if not cookie:
            self.response.write(json_response(5, 0)) 
        elif not subName:
            self.response.write(json_response(1, 0))
        elif not subCategory:
            self.response.write(json_response(2, 0))
        elif not subLocation:
            self.response.write(json_response(3, 0))
        elif not subImage:
            self.response.write(json_response(4, 0))
            
        else:
            session = Session.query(Session.cookie == cookie).get()
            if session:
                submission = Submission(name = subName,lowerName = subName.lower(), category = subCategory, description = subDescription, location = subLocation, image = subImage, 
                                        keywords = subKeywords, rating = 0, submitter = session.user)
                submission_key = submission.put()      
                # ndb.Key.urlsafe(), generates a url safe version of the Key
                self.response.write(json_response(0, submission.key.urlsafe()))
                
            else:
                self.response.write(json_response(6, 0))
Ejemplo n.º 28
0
    def get(self):
        cookie = self.request.get('cookie')

        self.response.headers['Content-Type'] = 'application/json; charset=utf-8'

        if not cookie:
            error = json_error('failure', 'cookie')
            self.response.write(json.dumps(error))
        else:

            session = Session.query(Session.cookie == cookie).get()

            if not session:
                error = json_error('failure', 'session')
                self.response.write(error)
            else:

                name = self.request.get('name')

                if not name:
                    error = json_error('failure', 'name')
                    self.response.write(json.dumps(error))
                else:

                    submissions_number = 40
                    submissions = Submission.query(Submission.lowerName == name.lower()).fetch(submissions_number)

                    if not submissions:
                        error = json_error('failure', 'no result')
                        self.response.write(json.dumps(error))

                    else:
                        submissions_array = []
                        if(submissions_number <= len(submissions)):
                            for i in range(0, submissions_number):
                                submission = submissions[i]
                                json_submission = json_string(submission.key.urlsafe(), submission.name, submission.image, submission.rating)
                                submissions_array.append(json_submission)

                        else:
                            for i in range(0, len(submissions)):
                                submission = submissions[i]
                                json_submission = json_string(submission.key.urlsafe(), submission.name, submission.image, submission.rating)
                                submissions_array.append(json_submission)


                        response = json.dumps(submissions_array)
                        self.response.write(response)
Ejemplo n.º 29
0
    def updateSolvedProblems(self, html):
        #TODO: compile
        submissionPattern = b'\t<tr class="sectiontableentry\d+">\n' \
                            b'\t<td align="center"><a href="[^"]*">(\d+)</a></td>\n' \
                            b'\t<td align="center">\d+</td>\n' \
                            b'\t<td align="center">\d+</td>\n' \
                            b'\t<td align="center">([^<]*)</td>\n' \
                            b'\t<td align="center">[^<]*</td>\n\t'
        tablePattern = b'<div class="contentheading">Solved problems</div>\n\t<table border="0" cellspacing="0" cellpadding="4" style="width:70%" align="center">\n\t<tr class="sectiontableheader"><th align="center">Problem</th><th align="center">Ranking</th><th align="center">Submission</th><th align="center">Date</th><th align="center">Run time</th></tr>\n\t' \
                       b'(' + submissionPattern + b')*' \
                                                  b'\t</table>'
        tableHTML = re.search(tablePattern, html).group(0)

        self.submissions = list()
        for match in re.finditer(submissionPattern, tableHTML):
            self.submissions.append(Submission(match.group(1).decode('utf-8'), match.group(2).decode('utf-8')))
Ejemplo n.º 30
0
    def add_submission(self,
                        candidate,
                        voter,
                        zero,
                        signature,
                        amount=1.0,
                        is_receiving=False):
        """ Append a new value as well as the last blocchain value to the blocchain.

        Arguments:
            :voter: The person voting.
            :candidate: The candidate recieving the votes.
            :amount: The amount of votes sent with the submission
            (default = 1.0)
        """
        # submission = {
        #     'voter': voter,
        #     'candidate': candidate,
        #     'amount': amount
        # }
        # if self.public_key == None:
        #     return False
        submission = Submission(voter, candidate, zero, signature, amount)
        if Verification.verify_submission(submission, self.get_balance):
            self.__open_submissions.append(submission)
            self.save_data()
            if not is_receiving:
                for node in self.__peer_nodes:
                    url = 'http://{}/broadcast-submission'.format(node)
                    try:
                        response = requests.post(url,
                                                 json={
                                                     'voter': voter,
                                                     'candidate': candidate,
                                                     'zero': zero,
                                                     'amount': amount,
                                                     'signature': signature
                                                 })
                        if (response.status_code == 400 or
                                response.status_code == 500):
                            print('Submission declined, needs resolving')
                            return False
                    except requests.exceptions.ConnectionError:
                        continue
            return True
        return False
Ejemplo n.º 31
0
 def resolve(self):
     """Checks all peer nodes' blocchains and replaces the local one with
     longer valid ones."""
     # Initialize the winner chain with the local chain
     winner_chain = self.chain
     replace = False
     for node in self.__peer_nodes:
         url = 'http://{}/chain'.format(node)
         try:
             # Send a request and store the response
             response = requests.get(url)
             # Retrieve the JSON data as a dictionary
             node_chain = response.json()
             # Convert the dictionary list to a list of bloc AND
             # submission objects
             node_chain = [
                 Bloc(bloc['index'],
                       bloc['previous_hash'],
                       [
                     Submission(
                         tx['voter'],
                         tx['candidate'],
                         tx['zero'],
                         tx['signature'],
                         tx['amount']) for tx in bloc['submissions']
                 ],
                     bloc['proof'],
                     bloc['timestamp']) for bloc in node_chain
             ]
             node_chain_length = len(node_chain)
             local_chain_length = len(winner_chain)
             # Store the received chain as the current winner chain if it's
             # longer AND valid
             if (node_chain_length > local_chain_length and
                     Verification.verify_chain(node_chain)):
                 winner_chain = node_chain
                 replace = True
         except requests.exceptions.ConnectionError:
             continue
     self.resolve_conflicts = False
     # Replace the local chain with the winner chain
     self.chain = winner_chain
     if replace:
         self.__open_submissions = []
     self.save_data()
     return replace
Ejemplo n.º 32
0
 def add_bloc(self, bloc):
     """Add a bloc which was received via broadcasting to the localb
     lockchain."""
     # Create a list of submission objects
     submissions = [Submission(
         tx['voter'],
         tx['candidate'],
         tx['zero'],
         tx['signature'],
         tx['amount']) for tx in bloc['submissions']]
     # Validate the proof of work of the bloc and store the result (True
     # or False) in a variable
     proof_is_valid = Verification.valid_proof(
         submissions[:-1], bloc['previous_hash'], bloc['proof'])
     # Check if previous_hash stored in the bloc is equal to the local
     # blocchain's last bloc's hash and store the result in a bloc
     hashes_match = hash_bloc(self.chain[-1]) == bloc['previous_hash']
     if not proof_is_valid or not hashes_match:
         return False
     # Create a bloc object
     converted_bloc = Bloc(
         bloc['index'],
         bloc['previous_hash'],
         submissions,
         bloc['proof'],
         bloc['timestamp'])
     self.__chain.append(converted_bloc)
     stored_submissions = self.__open_submissions[:]
     # Check which open submissions were included in the received bloc
     # and remove them
     # This could be improved by giving each submission an ID that would
     # uniquely identify it
     for itx in bloc['submissions']:
         for opentx in stored_submissions:
             if (opentx.voter == itx['voter'] and
                     opentx.candidate == itx['candidate'] and
                     opentx.zero == itx['zero'] and
                     opentx.amount == itx['amount'] and
                     opentx.signature == itx['signature']):
                 try:
                     self.__open_submissions.remove(opentx)
                 except ValueError:
                     print('Item was already removed')
     self.save_data()
     return True
Ejemplo n.º 33
0
 def criaSubmission(self, id):
     urlSubmission = "https://www.urionlinejudge.com.br/judge/pt/runs/code/" + id
     submissionPage = self.sess.get(urlSubmission)
     submissionSoup = BeautifulSoup(submissionPage.text,
                                    features="html.parser")
     dados = submissionSoup.find("div", {
         "class": "st-big-box"
     }).find_all("dd")
     idEnome = dados[0].text.split("-")
     resultado = dados[1].text.strip()
     language = dados[2].text.split("(")[0].strip()
     data = dados[5].text.strip()
     id = idEnome[0].strip()
     nome = idEnome[1].strip()
     codeLines = submissionSoup.find("pre", {"id": "code"})
     code = codeLines.text
     submission = Submission(id, nome, resultado, data, language, code)
     return submission
Ejemplo n.º 34
0
    def get(self):
        self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
        name = self.request.get('name')

        if not name:
            error = json_error('delete submission', 'failure', 'name')
            self.response.write(json.dumps(error))

        else:
            submission = Submission.query(Submission.name == name).get()
            
            if not submission:
            	error = json_error('delete submission', 'failure', 'no such submission')
                self.response.write(json.dumps(error))

            else:
            	submission.key.delete()
            	response = {'delete submission': {'status': 'ok'}}
            	self.response.write(json.dumps(response))
Ejemplo n.º 35
0
    def get(self):
        self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
        category = self.request.get('category')

        if not category:
            error = json_error('delete category', 'failure', 'category')
            self.response.write(json.dumps(error))

        else:
            submissions = Submission.query(Submission.category == category).fetch()
            
            if not submissions:
            	error = json_error('delete category', 'failure', 'no such submission')
                self.response.write(json.dumps(error))

            else:
                for i in range(0, len(submissions)):
                    submissions[i].key.delete()

            	response = {'delete category': {'status': 'ok'}}
            	self.response.write(json.dumps(response))
Ejemplo n.º 36
0
def getTumblrUserLikedSubmissions(clientId,
                                  clientSecret,
                                  tokenId,
                                  tokenSecret,
                                  likeRequestLimit=100,
                                  requestOnlyNewCache=None):
    tumblrClient = pytumblr.TumblrRestClient(clientId, clientSecret, tokenId,
                                             tokenSecret)

    # This is an annoying limit the api seems to impose
    POSTS_PER_PAGE = 50

    oldestPageTimestamp = 0
    totalRequests = 0
    submissions = []

    foundOldSubmission = False

    while totalRequests < likeRequestLimit:
        if oldestPageTimestamp:
            tumblrLikes = tumblrClient.likes(**{
                'limit': POSTS_PER_PAGE,
                'offset': totalRequests
            })
        else:
            tumblrLikes = tumblrClient.likes(**{'limit': POSTS_PER_PAGE})

        numPostsThisPage = len(tumblrLikes['liked_posts'])

        if not numPostsThisPage:
            break

        logger.log(
            str(numPostsThisPage) + ' Tumblr likes requested. Total likes: ' +
            str(tumblrLikes['liked_count']))

        for postIndex, post in reversed(
                list(enumerate(tumblrLikes['liked_posts']))):
            if 'photos' in post:
                for photoIndex, photo in enumerate(post['photos']):
                    newSubmission = Submission()

                    newSubmission.source = u'Tumblr'

                    # Tumblr submissions don't have titles, so make one
                    # This'll look ugly in the file browser, unfortunately
                    if len(post['photos']) > 1:
                        newSubmission.title = str(
                            signedCrc32(post['short_url'].encode()))
                        newSubmission.title += u'_'
                        newSubmission.title += str(photoIndex)
                    else:
                        newSubmission.title = str(
                            signedCrc32(post['short_url'].encode()))
                    """logger.log(post)
					return"""
                    newSubmission.author = post['blog_name']

                    newSubmission.subreddit = post['short_url']
                    newSubmission.subredditTitle = post['blog_name'] + '_Tumblr'

                    newSubmission.body = post['caption']
                    newSubmission.bodyUrl = photo['original_size']['url']

                    newSubmission.postUrl = post['short_url']

                    submissions.append(newSubmission)

                    if (requestOnlyNewCache and requestOnlyNewCache[0]
                            and newSubmission.postUrl
                            == requestOnlyNewCache[0].postUrl):
                        logger.log(
                            'Found early out point after ' +
                            str(len(submissions)) + ' new submissions.'
                            ' If you e.g. changed your total requests value and want to go deeper, set'
                            ' Tumblr_Try_Request_Only_New to False in your settings.txt'
                        )
                        foundOldSubmission = True
                        break

            else:
                logger.log('Skipped ' + post['short_url'] +
                           ' (does not have images)')

            if foundOldSubmission:
                break

        if foundOldSubmission:
            break

        oldestPageTimestamp = tumblrLikes['liked_posts'][-1]['liked_timestamp']

        # If we didn't get a full page's worth of posts, we're on the last page
        # Sometimes pages don't have POSTS_PER_PAGE, they're a little under
        RANDOM_PAGE_TOLERANCE = 10
        if numPostsThisPage < POSTS_PER_PAGE - RANDOM_PAGE_TOLERANCE:
            break

        totalRequests += numPostsThisPage

    newEarlyOut = submissions[0] if len(submissions) else None
    return submissions, newEarlyOut
Ejemplo n.º 37
0
def view_submission(id):
    submission = Submission.objects(id=id).first()
    return render_template('submission.html', submission=submission)
Ejemplo n.º 38
0
def import_old():
    existing_tags = {}
    for tag in Tag.query.all():
        exisiting_tags[tag.name] = tag
    for resource in ['Projector', 'Microphone', 'Sound system', 'Drinking water', 'Quiet (no airwalls)']:
        penguicontrax.db.session.add(Resource(resource))
    for track in ['literature', 'tech', 'music', 'food', 'science']:
        penguicontrax.db.session.add(Track(track,None))
    with penguicontrax.app.open_resource('schedule2013.html', mode='r') as f:
        tree = ET.fromstring(f.read())
        events = tree.find('document')
        for section in events:
            if section.tag == 'div' and section.attrib['class'] == 'section':
                name = section[0].text
                tag_list = section[1].text # Tag doesn't seem to be in the DB yet
                person = section[3][0].text
                # Only one presenter is supported so far
                firstPerson = person.split(',')[0].split(' ')
                description = section[3][0].tail
                submission = Submission()
                submission.email = '*****@*****.**'
                submission.title = name
                submission.description = description
                submission.duration = 1
                submission.setupTime = 0
                submission.repetition = 0
                submission.firstname = firstPerson[0]
                submission.lastname = firstPerson[1] if len(firstPerson) > 1 else ''
                submission.followUpState = 0
                submission.tags = []
                for tag in tag_list.split(','):
                    tag = normalize_tag_name(tag)
                    db_tag = None
                    if not tag in existing_tags:
                        db_tag = Tag(tag)
                        penguicontrax.db.session.add(db_tag)
                        existing_tags[tag] = db_tag
                    else:
                        db_tag = existing_tags[tag]
                    submission.tags.append(db_tag)
                penguicontrax.db.session.add(submission)
        penguicontrax.db.session.commit()
Ejemplo n.º 39
0
class Derivative(Asset):
	def __init__(self, inflate=None, _id=None, submission=None):
		
		super(Derivative, self).__init__(
			inflate=inflate,
			_id=_id,
			db="derivatives",
			extra_omits=['submission','source','j3m']
		)
		
		if not hasattr(self, 'submission_id'):
			self.invalidate(
				invalidate['codes']['submission_undefined'],
				invalidate['reasons']['submission_undefined']
			)
			return
		
		from submission import Submission
		if submission is None:
			self.submission = Submission(_id=self.submission_id)
		else:
			self.submission = submission
			
		j3m = open(
			os.path.join(self.submission.asset_path, self.submission.j3m), 'r'
		).read()
		
		try:
			self.j3m = json.loads(j3m)
		except:
			self.submission.invalidate(
				invalidate['codes']['submission_invalid_j3m'],
				invalidate['reasons']['submission_invalid_j3m']
			)
			
			self.invalidate(
				invalidate['codes']['submission_invalid_j3m'],
				invalidate['reasons']['submission_invalid_j3m']
			)
			return
		
		self.getSource()
			
		if _id is None:
			setattr(self, 'date_created', self.j3m['genealogy']['dateCreated'])
			setattr(self, 'mime_type', self.submission.mime_type)
			self.save()
			
			self.parseLocationEntries()
			self.parseAnnotationEntries()
			
	def getSource(self):
		from source import Source
		if hasattr(self, 'source_id'):
			self.source = Source(_id=self.source_id)
		else:
			fingerprint = None
			try:
				fingerprint = self.j3m['intent']['pgpKeyFingerprint'].lower()
			except KeyError as e:
				print "NO FINGERPRINT???"
				self.source = Source(inflate={
					'invalid': {
						'error_code' : invalidate['codes']['source_missing_pgp_key'],
						'reason' : invalidate['reasons']['source_missing_pgp_key']
					}
				})
				
			if fingerprint is not None:
				source = self.submission.db.query(
					'_design/sources/_view/getSourceByFingerprint', 
					params={
						'fingerprint' : fingerprint
					}
				)[0]
			
				if source:
					self.source = Source(_id=source['_id'])
				else:
					# we didn't have the pgp key.  
					# so init a new source and set an invalid flag about that.
				
					inflate = {
						'fingerprint' : fingerprint
					}
				
					## TODO: ACTUALLY THIS IS CASE-SENSITIVE!  MUST BE UPPERCASE!
					self.source = Source(inflate=inflate)
					self.source.invalidate(
						invalidate['codes']['source_missing_pgp_key'],
						invalidate['reasons']['source_missing_pgp_key']
					)
			
			
			setattr(self, 'source_id', self.source._id)
			self.save()
		
		if hasattr(self, 'source_id'):
			return True
		else:
			return False
			
	def parseLocationEntries(self):
		print "parsing locations, bssids and cell tower ids; mac addresses (BT)"
		
		capture_min = self.j3m['genealogy']['dateCreated']
		capture_max = capture_min + self.j3m['data']['exif']['duration']
		
		self.known_locations = []
		known_mac_addresses = []
		known_bssids = []
		known_cell_ids = []
		
		for entry in self.j3m['data']['sensorCapture']:
			try:
				known_bssids.extend(self.parseBSSIDs(
					entry['sensorPlayback']['visibleWifiNetworks'], 
					entry['timestamp']
				))
			except KeyError as e: 
				pass
				
			try:
				known_cell_ids.extend(self.parseCellIDs(
					entry['sensorPlayback']['cellTowerId'],
					entry['timestamp']
				))
			except KeyError as e:
				pass
				
			try:
				known_mac_addresses.extend(self.parseMACs(
					entry['sensorPlayback']['bluetoothDeviceAddress'],
					entry['timestamp']
				))
			except KeyError as e:
				pass
		
		if len(self.known_locations) > 0:
			known_locations_ = []
			for known_location in self.known_locations:
				if known_location not in known_locations_:
					known_locations_.append(known_location)
					
			setattr(self, 'known_locations', known_locations_)
			self.save()
		
		if len(known_mac_addresses) > 0:
			setattr(self, 'known_mac_addresses', list(set(known_mac_addresses)))
			self.save()
			
		if len(known_bssids) > 0:
			setattr(self, 'known_bssids', list(set(known_bssids)))
			self.save()
			
		if len(known_cell_ids) > 0:
			setattr(self, 'known_cell_ids', list(set(known_cell_ids)))
			self.save()
		
	def parseAnnotationEntries(self):
		print "parsing annotation entries"
		annotations = []
		
		try:
			if 'userAppendedData' in self.j3m['data']:
				pass
			else:
				return
		except KeyError as e:
			return
			
		for entry in self.j3m['data']['userAppendedData']:
			try:
				for f, form_data in enumerate(entry['associatedForms']):
					annotation = None					
					try:
						annotation = {
							'content' : self.translateFormValue(form_data['answerData']),
							'addedBy' : self.source._id,
							'dateAdded' : entry['timestamp']
						}
					except:
						continue
						
					try:
						annotation['index'] = entry['index']
					except KeyError as e:
						print "no index for annotation: %s" % e
						pass
						
					try:
						annotation['regionBounds'] = entry['regionBounds']
						
						del annotation['regionBounds']['displayWidth']
						del annotation['regionBounds']['displayHeight']
						del annotation['regionBounds']['displayTop']
						del annotation['regionBounds']['displayLeft']
						
						if annotation['regionBounds']['startTime'] == -1:
							del annotation['regionBounds']['startTime']
							
						if annotation['regionBounds']['endTime'] == -1:
							del annotation['regionBounds']['endTime']
					except KeyError as e:
						print "no region bounds for annotation: %s" %e
						pass
					
					if annotation is not None:
						print annotation	
						annotations.append(annotation)
			except:
				continue		
		
		if len(annotations) > 0:
			setattr(self, 'annotations', annotations)		
			self.save()
		
	def setDescription(self, description):
		"""Set derivative description.
		
		arguments:
		* description (str)
		"""
		
		setattr(self, 'description', description)
		self.save()
		
	def setAlias(self, alias):
		"""Set derivative alias.
		
		arguments:
		* alias (str)
		"""
		
		setattr(self, 'alias', alias)
		self.save()
		
	def setTier(self, tier):
		"""Set derivative tier.
		
		arguments:
		* tier (str)
		"""
		
		setattr(self, 'tier', tier)
		self.save()
		
	def setStatus(self, status):
		"""Set derivative status.
		
		arguments:
		* status (str)
		"""
		
		setattr(self, 'status', status)
		self.save()
		
	def addCustom(self, custom_key, custom_value):
		"""add custom field and corresponding value to derivative
		
		arguments:
		* custom_key (str)
		* custom_value (dict)
		"""
		
		if not hasattr(self, 'custom'):
			setattr(self, 'custom', {})
			
		self.custom[custom_key] = custom_value
		self.save()
	
	def getClosestLocation(self, timestamp):
		validity_range = [
			timestamp - validity_buffer['location'], 
			timestamp + validity_buffer['location']
		]
		
		for entry in self.j3m['data']['sensorCapture']:
			try:
				coords = AsTrueValue(entry['sensorPlayback']['gps_coords'])
				if coords != [0.0, 0.0] and coords != ['0.0','0.0']:
					if isWithinRange(validity_range, timestamp):
						return {
							'latitude' : coords[0],
							'longitude' : coords[1]
						}

			except KeyError as e:
				#print "OOPS: %s" % e
				continue
			
		return None
	
	def translateFormValue(self, form_data):
		if len(form_fields['translate']) > 0:
			for key, value in form_data.iteritems():
				for ff in form_fields['translate']:
					if ff['key'] == str(key):
						# TODO: THIS MIGHT BE AN ARRAY!
						form_data[key] = ff['values'][int(value) - 1]
	
		return form_data
		
	def parseLocationObject(self, location_object, timestamp):
		new_location = self.getClosestLocation(timestamp)
		if new_location is not None:
			new_location['dateReported'] = time.time() * 1000
			new_location['reportedBy'] = self.source._id
			
			add_new_location = True
		
			if not hasattr(location_object, 'location'):
				setattr(location_object, 'location', [])
								
			# cycle through locations to see if they need updating:
			# is this location relevant to our j3m (within 1 km)?
			# if yes, then we don't have to add it, just update.
			for location in location_object.location:
				if isNearby(new_location, location, 1):
					add_new_location = False
					location['dateLastSeen'] = timestamp
					location['lastSeenBy'] = self.source._id
					
					try:
						location['coorboration'].append(self.source._id)
						location['coorboration'] = list(set(location['coorboration']))
					except KeyError as e:
						location['coorboration'] = [self.source._id]
						
					break
			
			if add_new_location:
				location_object.location.append(new_location)
			
		location_object.save()
		
		del new_location['reportedBy']
		del new_location['dateReported']
		return new_location
		
	def parseCellIDs(self, cell_info, timestamp):
		cell_info_list = cell_info
		
		if type(cell_info) != list:
			cell_info_list = []
			cell_info_list.append(cell_info)
			
		consolidated = DB(db="consolidated")
		found_cell_ids = []
				
		for cell_info in cell_info_list:			
			if type(cell_info) != int:
				cell_info = int(cell_info)
				
			inflate = consolidated.query(
				"_design/locations/_view/getByCellID",
				params={
					'cellId' : AsTrueValue(cell_info)
				}
			)[0]
			
			if not inflate:
				inflate = {
					'cellId' : AsTrueValue(cell_info)
				}
				cell_id = CellID(inflate=inflate)
			else:
				cell_id = CellID(_id=inflate['_id'])
			
			if cell_id.cellId not in found_cell_ids:
				found_cell_ids.append(cell_id.cellId)
							
			parsed_location = self.parseLocationObject(cell_id, timestamp)
			if parsed_location not in self.known_locations:
				self.known_locations.append(parsed_location)
				
		return found_cell_ids
	
	def parseBSSIDs(self, network_info, timestamp):
		network_info_list = network_info
		
		if type(network_info) != list:
			network_info_list = []
			network_info_list.append(network_info)
		
		consolidated = DB(db="consolidated")
		found_bssids = []
			
		for network_info in network_info_list:
			inflate = consolidated.query(
				"_design/locations/_view/getByBSSID", 
				params={
					'bssid' : network_info['bssid']
				}
			)[0]
			
			if not inflate:
				# create a new entry
				inflate = {
					'bssid' : network_info['bssid']
				}
			
				bssid = BSSID(inflate=inflate)
			else:
				bssid = BSSID(_id=inflate['_id'])				
			
			if bssid.bssid not in found_bssids:
				found_bssids.append(bssid.bssid)
							
			parsed_location = self.parseLocationObject(bssid, timestamp)
			if parsed_location not in self.known_locations:
				self.known_locations.append(parsed_location)
		return found_bssids
		
	def parseMACs(self, bt_info, timestamp):
		bt_info_list = bt_info
		
		if type(bt_info) != list:
			bt_info_list = []
			bt_info_list.append(bt_info)
			
		consolidated = DB(db="consolidated")
		found_macs = []
		
		for bt_info in bt_info_list:
			inflate = consolidated.query(
				"_design/locations/_view/getByMACAddress",
				params = {
					'MAC' : bt_info
				}
			)[0]
			
			if not inflate:
				inflate = {
					'MAC' : bt_info
				}
				mac = BTDevice(inflate=inflate)
			else:
				mac = BTDevice(_id=inflate['_id'])
			
			if mac.MAC not in found_macs:
				found_macs.append(mac.MAC)
							
			parsed_location = self.parseLocationObject(mac, timestamp)
			if parsed_location not in self.known_locations:
				self.known_locations.append(parsed_location)
		return found_macs
Ejemplo n.º 40
0
 def stop_job(self, job):
     resp_dict = self._root.put('%s/job/%d/stop' % (API_VERSION, job.id),
                                headers=self.headers)
     return Submission.from_dict(resp_dict['submission'])
Ejemplo n.º 41
0
 def get_submission_by_id(self, submission_id: str):
     self.cursor.execute("SELECT * FROM submissions where id = ?",
                         (submission_id,))
     if db_response := self.cursor.fetchone():
         return Submission(*db_response)
Ejemplo n.º 42
0
 def get_recent_submissions(self, start_time_utc: int) -> list[Submission]:
     self.cursor.execute("SELECT * FROM submissions WHERE created_utc >= ?",
                         (start_time_utc,))
     if db_response := self.cursor.fetchall():
         return [Submission(*row) for row in db_response]
Ejemplo n.º 43
0
        sub = Submission()
        sub.id = 1
        try:
            sub.save()
        except KeyError, e:
            print 'Adding blank submission failed: %s' % e

    def test_duplicate(self):
        s = {'id': 30, 'completed': datetime.now(), 'score': 40, 'answers':
                {'q1': 'Correct', 'q2': 'Wrong', 'q3': 'Wrong', 'q4': 'Correct', 'q5': 'Wrong'}
            }
        sub = Submission(s)
        sub.save()
        try:
            sub.save()
        except KeyError, e:
            print 'Failed trying to save duplicate: %s' % e
        assert Submission.hasDuplicate(sub.uid)
        assert sub.uid == s['id'] and sub.completed == s['completed'] and sub.score == s['score'] and sub.answers == s['answers']

    def test_getSubmissions(self):
        #check that getSubmissions() returns the same amount of rows as mongo
        stat = len(Submission.getSubmissions())
        mon = Connection().testing.subs
        mon = mon.find().count()
        assert mon == stat


if __name__ == '__main__':
    unittest.main()
Ejemplo n.º 44
0
 def test_getSubmissions(self):
     #check that getSubmissions() returns the same amount of rows as mongo
     stat = len(Submission.getSubmissions())
     mon = Connection().testing.subs
     mon = mon.find().count()
     assert mon == stat
Ejemplo n.º 45
0
def import_old():
    existing_tags = {}
    for tag in Tag.query.all():
        exisiting_tags[tag.name] = tag
    with penguicontrax.app.open_resource('schedule2013.html', mode='r') as f:
        tree = ET.fromstring(f.read())
        events = tree.find('document')
        for section in events:
            if section.tag == 'div' and section.attrib['class'] == 'section':
                name = section[0].text
                tag_list = section[1].text # Tag doesn't seem to be in the DB yet
                person = section[3][0].text
                # Only one presenter is supported so far
                firstPerson = person.split(',')[0].split(' ')
                description = section[3][0].tail
                submission = Submission()
                submission.email = '*****@*****.**'
                submission.title = name
                submission.description = description
                submission.duration = 1
                submission.setupTime = 0
                submission.repetition = 0
                submission.firstname = firstPerson[0]
                submission.lastname = firstPerson[1] if len(firstPerson) > 1 else ''  
                submission.followUpState = 0
                submission.tags = []
                for tag in tag_list.split(','):
                    tag = tag.strip()
                    db_tag = None
                    if not tag in existing_tags:
                        db_tag = Tag(tag)
                        penguicontrax.db.session.add(db_tag)
                        existing_tags[tag] = db_tag
                    else:
                        db_tag = existing_tags[tag]
                    submission.tags.append(db_tag)
                penguicontrax.db.session.add(submission)
        penguicontrax.db.session.commit()
Ejemplo n.º 46
0
 def stop_job(self, job):
   resp_dict = self._root.delete('%s/submission/action/%d/' % (API_VERSION, job.id), headers=self.headers)
   return Submission.from_dict(resp_dict['all'][0])
Ejemplo n.º 47
0
def getPinterestUserPinnedSubmissions(email, username, password,
                                      cacheFileName):

    submissions = []

    lastIds = {} if not cacheFileName else loadPinterestCache(cacheFileName)
    updatedLastIds = lastIds

    pinterest = Pinterest(email=email,
                          password=password,
                          username=username,
                          cred_root='pinterest_creds')

    logger.log("Logging in to Pinterest...")
    pinterest.login()

    boards = pinterest.boards(username=username)

    for board in boards:
        # Get all pins for the board
        board_pins = []
        pin_batch = pinterest.board_feed(board_id=board['id'])

        while len(pin_batch) > 0:
            for pin in pin_batch:
                if pin['id'] not in lastIds:
                    # Only using the dict for its key lookup
                    updatedLastIds[pin['id']] = 1
                    board_pins.append(pin)

            pin_batch = pinterest.board_feed(board_id=board['id'])

        for pin in board_pins:

            # I'm not sure how important it is to support these
            if pin['type'] == 'story':
                continue

            newSubmission = Submission()
            newSubmission.source = u'Pinterest'
            # While pins do have titles, 90% of the time they seem useless
            newSubmission.title = pin['id']
            # There is probably a way to figure out who the original pinner is, but oh well
            newSubmission.author = 'N/A'
            newSubmission.subreddit = board['url']
            newSubmission.subredditTitle = board['name'] + '_Pinterest'
            if 'rich_summary' in pin and pin['rich_summary']:
                if 'display_description' in pin['rich_summary']:
                    newSubmission.body = pin['rich_summary'][
                        'display_description']
                else:
                    newSubmission.body = 'N/A'
                newSubmission.postUrl = pin['rich_summary']['url']

            # What is actually downloaded
            newSubmission.bodyUrl = pin['images']['orig']['url']
            submissions.append(newSubmission)

    if cacheFileName:
        savePinterestCache(cacheFileName, updatedLastIds)

    logger.log("Found {} new Pinterest submissions".format(len(submissions)))
    return submissions
Ejemplo n.º 48
0
 def get_job_status(self, job):
     resp_dict = self._root.get('%s/job/%d/status' % (API_VERSION, job.id),
                                headers=self.headers)
     return Submission.from_dict(resp_dict['submission'])
Ejemplo n.º 49
0
def import_old(path, as_convention = False, random_rsvp_users = 0, submission_limit = sys.maxint, timeslot_limit = sys.maxint):
    
    if as_convention == True:
        convention = Convention()
        convention.name = 'Penguicon 2013'
        convention.url = '2013'
        convention.description = 'Penguicon 2013 schedule imported from schedule2013.html'
        convention.start_dt = datetime.datetime(year=2013, month=4, day=26, hour=16)
        convention.end_dt = datetime.datetime(year=2013, month=4, day=28, hour=16)
        convention.timeslot_duration = datetime.timedelta(hours=1)
        penguicontrax.db.session.add(convention)
        current_day = convention.start_dt.date()
        current_time = None

    existing_tags = {}
    for tag in Tag.query.all():
        existing_tags[tag.name] = tag
        
    existing_people = {}
    for person in Person.query.all():
        existing_people[person.name] = person

    existing_tracks = {}
    for track in Track.query.all():
        existing_tracks[track.name] = track

    existing_rooms = {}
    existing_submissions = []

    submission_count = 0
    with penguicontrax.app.open_resource(path, mode='r') as f:
        tree = ET.fromstring(f.read())
        events = tree.find('document')
        for section in events:
            if submission_count == submission_limit:
                break
            if as_convention == True and section.tag == 'time':
                time_text= section.text.split(' ')
                hour = int(time_text[0])
                if time_text[1] == 'PM' and hour != 12:
                    hour += 12
                elif time_text[1] == 'AM' and hour == 12:
                    hour = 0
                new_time = datetime.time(hour = hour)
                if not current_time is None and new_time.hour < current_time.hour:
                    current_day = current_day + datetime.timedelta(days=1)
                current_time = new_time                 
            elif section.tag == 'div' and section.attrib['class'] == 'section':
                name = section[0].text
                tag_list = section[1].text # Tag doesn't seem to be in the DB yet
                room = section[2].text
                person = section[3][0].text
                description = section[3][0].tail
                submission = Submission() if as_convention == False else Events()
                submission.title = name
                submission.description = description
                submission.duration = 1
                submission.setupTime = 0
                submission.repetition = 0
                submission.followUpState = 0
                submission.eventType = 'talk'
                #Load presenters
                submission.personPresenters= []
                for presenter in [presenter.strip() for presenter in person.split(',')]:
                    if presenter == 'Open':
                        continue #"Open" person will cause the schedule to become infesible
                    person = None
                    if not presenter in existing_people:
                        person = Person(presenter)
                        penguicontrax.db.session.add(person)
                        existing_people[presenter] = person
                    else:
                        person = existing_people[presenter]
                    submission.personPresenters.append(person)
                #Load Tags
                submission.tags = []
                for tag in tag_list.split(','):
                    tag = normalize_tag_name(tag)
                    db_tag = None
                    if not tag in existing_tags:
                        db_tag = Tag(tag, tag, True)
                        penguicontrax.db.session.add(db_tag)
                        existing_tags[tag] = db_tag
                    else:
                        db_tag = existing_tags[tag]
                    # Set track -- pick any tag that is also a track
                    if submission.track is None:
                        if tag in existing_tracks:
                            submission.track = existing_tracks[tag]
                    submission.tags.append(db_tag)
                #Load rooms
                if as_convention == True:
                    submission.convention = convention
                    db_room = None
                    if not room in existing_rooms:
                        db_room = Rooms()
                        db_room.room_name = room
                        db_room.convention = convention
                        penguicontrax.db.session.add(db_room)
                        existing_rooms[room] = db_room
                    else:
                        db_room = existing_rooms[room]
                    if not current_day is None and not current_time is None:
                        submission.rooms.append(db_room)
                        submission.start_dt = datetime.datetime(year=current_day.year, month=current_day.month, day=current_day.day,\
                            hour = current_time.hour, minute=current_time.minute)
                        submission.duration = 4 #1 hour
                existing_submissions.append(submission)
                penguicontrax.db.session.add(submission)
                submission_count = submission_count + 1
        penguicontrax.db.session.commit()

    if random_rsvp_users > 0:
        for user_index in range(random_rsvp_users):
            user = User()
            user.name = 'Random User %d' % user_index
            user.email = '*****@*****.**' % user_index
            user.public_rsvps = True
            user.staff = False
            user.special_tag = None
            user.superuser = False
            generate_account_name(user)
            gravatar_image_update(user)
            for rsvp_index in range(user.points):
                rand = random.randint(0, len(existing_submissions) - 1)
                while user in existing_submissions[rand].rsvped_by:
                    rand = random.randint(0, len(existing_submissions) - 1)
                existing_submissions[rand].rsvped_by.append(user)
            user.points = 0
            penguicontrax.db.session.add(user)
        penguicontrax.db.session.commit()
        
    if as_convention == True:
        from event import generate_schedule, generate_timeslots
        generate_timeslots(convention, timeslot_limit)
        all_rooms = [room for room in existing_rooms.viewvalues()]
        hackerspace = [existing_rooms['Hackerspace A'], existing_rooms['Hackerspace B']]
        food = [existing_rooms['Food']]
        from copy import copy
        general_rooms = copy(all_rooms)
        general_rooms.remove(hackerspace[0])
        general_rooms.remove(hackerspace[1])
        general_rooms.remove(food[0])
        timeslots = [timeslot for timeslot in convention.timeslots]
        for submission in existing_submissions:
            if food[0] in submission.rooms:
                submission.suitable_rooms = food
            elif hackerspace[0] in submission.rooms or hackerspace[1] in submission.rooms:
                submission.suitable_rooms = hackerspace
            else:
                submission.suitable_rooms = general_rooms
        for room in all_rooms:
            room.available_timeslots = timeslots
        generate_schedule(convention)
Ejemplo n.º 50
0
 def get_job_status(self, job):
   resp_dict = self._root.get('%s/submission/action/%d/' % (API_VERSION, job.id), headers=self.headers)
   return Submission.from_dict(resp_dict['all'][0])
Ejemplo n.º 51
0
Archivo: base.py Proyecto: Lt-Pone/hue
 def get_job_status(self, job):
   resp_dict = self._root.get('%s/job/%d/status' % (API_VERSION, job.id), headers=self.headers)
   return Submission.from_dict(resp_dict['submission'])
Ejemplo n.º 52
0
Archivo: base.py Proyecto: Lt-Pone/hue
 def start_job(self, job):
   resp_dict = self._root.put('%s/job/%d/start' % (API_VERSION, job.id), headers=self.headers)
   if resp_dict['submission']['status'] in SqoopClient.STATUS_BAD:
     raise SqoopSubmissionException.from_dict(resp_dict['submission'])
   return Submission.from_dict(resp_dict['submission'])
Ejemplo n.º 53
0
Archivo: base.py Proyecto: Lt-Pone/hue
 def stop_job(self, job):
   resp_dict = self._root.put('%s/job/%d/stop' % (API_VERSION, job.id), headers=self.headers)
   return Submission.from_dict(resp_dict['submission'])
Ejemplo n.º 54
0
def view_submission_output(id):
    submission = Submission.objects(id=id).first()
    expected = newline_to_br(submission.expected_out)
    actual = newline_to_br(submission.actual_out)
    return render_template('output.html', expected=expected, actual=actual, id=submission.id)
Ejemplo n.º 55
0
Archivo: base.py Proyecto: Lt-Pone/hue
 def get_submissions(self):
   resp_dict = self._root.get('%s/submissions' % API_VERSION, headers=self.headers)
   submissions = [Submission.from_dict(submission_dict) for submission_dict in resp_dict['submissions']]
   return submissions
def getSubmissionsFromRedditList(redditList,
                                 source,
                                 earlyOutPoint=None,
                                 unlikeUnsave=False):
    submissions = []
    comments = []

    numTotalSubmissions = len(redditList)
    for currentSubmissionIndex, singleSubmission in enumerate(redditList):
        if currentSubmissionIndex and currentSubmissionIndex % 100 == 0:
            logger.log('Got {} submissions...'.format(currentSubmissionIndex))

        if type(singleSubmission) is praw.models.Submission:
            newSubmission = Submission()

            newSubmission.source = u'reddit'

            newSubmission.title = singleSubmission.title
            newSubmission.author = singleSubmission.author.name if singleSubmission.author else u'no_author'

            newSubmission.subreddit = singleSubmission.subreddit.url
            newSubmission.subredditTitle = singleSubmission.subreddit.title

            newSubmission.body = singleSubmission.selftext
            newSubmission.bodyUrl = singleSubmission.url

            newSubmission.postUrl = singleSubmission.permalink

            submissions.append(newSubmission)

            logger.log(
                percentageComplete(currentSubmissionIndex,
                                   numTotalSubmissions))

            if unlikeUnsave:
                if source == 'liked':
                    singleSubmission.clear_vote()
                else:
                    singleSubmission.unsave()

                logger.log('Unsaved/cleared vote on submission ' +
                           singleSubmission.permalink)

            # Check to see if we've already downloaded this submission; if so, early out
            if (earlyOutPoint and earlyOutPoint[0]
                    and newSubmission.postUrl == earlyOutPoint[0].postUrl):
                logger.log(
                    'Found early out point after ' + str(len(submissions)) +
                    ' new submissions.'
                    ' If you e.g. changed your total requests value and want to go deeper, set'
                    ' Reddit_Try_Request_Only_New to False in your settings.txt'
                )
                break

        # The submission is actually a saved comment
        else:
            # I looked at https://praw.readthedocs.io/en/latest/getting_started/quick_start.html
            #  very bottom to learn how to enumerate what information a submission can provide
            # logger.log(singleSubmission.body)
            # pprint.plogger.log(vars(singleSubmission))
            newSubmission = Submission()

            newSubmission.source = u'reddit'

            newSubmission.title = u'Comment on ' + singleSubmission.link_title
            newSubmission.author = singleSubmission.author.name if singleSubmission.author else u'no_author'

            newSubmission.subreddit = singleSubmission.subreddit.url
            newSubmission.subredditTitle = singleSubmission.subreddit.title

            newSubmission.body = singleSubmission.body
            newSubmission.bodyUrl = singleSubmission.permalink

            newSubmission.postUrl = singleSubmission.link_permalink

            comments.append(newSubmission)

    return submissions, comments
Ejemplo n.º 57
0
    def get(self):
        cookie = self.request.get('cookie')

        self.response.headers['Content-Type'] = 'application/json; charset=utf-8'

        if not cookie:
            error = json_error('retrieve', 'failure', 'cookie')
            self.response.write(json.dumps(error))

        else:
            flag = self.request.get('flag')
        
            if not flag:
                error = json_error('retrieve', 'failure', 'no flag')
                self.response.write(json.dumps(error))

            else:
                session = Session.query(Session.cookie == cookie).get()

                if not session:
                    error = json_error('retrieve', 'failure', 'session')
                    self.response.write(error)

                else:
                    # flag = 1 means a single request
                    if flag == '1':
                        try:
                            id = self.request.get('id');
                            key = ndb.Key(urlsafe=id)

                            if not id:
                                self.response.write(json.dumps(json_error('single request', 'failure', 'id')))

                            else:
                                submission = key.get()

                                if not submission:
                                    error = json_error('single request', 'failure', 'no corresponding submission')
                                    self.response.write(json.dumps(error))

                                else:
                                    last_vote = Vote.query(ndb.AND(Vote.user == session.user, Vote.sub_id == submission.key.urlsafe())).get()
                                    if not last_vote:
                                        vote_val = 0     
                                    else:
                                        vote_val = last_vote.value
    
                                    string_submission = json_string(submission.key.urlsafe(), submission.name, submission.category, submission.description, submission.location,
                                                                    submission.image, submission.keywords, submission.submitter,
                                                                    submission.tfrom, submission.tto, submission.rating, vote_val)
                                    response = json.dumps(string_submission)
                                    self.response.write(response)

                        except:
                            self.response.write(json.dumps(json_error('single request', 'failure', 'id')))


                    # flag = 2 means that we are requesting submissions to display in what's new
                    elif flag == '2':
                        time_range = 1447786800
                        date = datetime.datetime.fromtimestamp(time_range/1e3)
                        submissions_number = 20

                        submissions = Submission.query(Submission.submitted >= date).fetch(submissions_number)

                        if not submissions:
                            error = json_error('what is new', 'failure', 'nothing in range')
                            self.response.write(json.dumps(error))

                        else:
                            submissions_array = []
                            if(submissions_number <= len(submissions)):
                                for i in range(0, submissions_number):
                                    submission = submissions[i]
                                    # what's new used for around you too. Server return location so that the app
                                    # can use it to display submissions near user on the map
                                    json_submission = json_array_with_location(submission.key.urlsafe(), submission.name,submission.image, submission.rating, submission.location)
                                    submissions_array.append(json_submission)

                            else:
                                for i in range(0, len(submissions)):
                                    submission = submissions[i]
                                    json_submission = json_array_with_location(submission.key.urlsafe(), submission.name, submission.image, submission.rating, submission.location)
                                    submissions_array.append(json_submission)


                            response = json.dumps(submissions_array)
                            self.response.write(response)

                    # flag = 4 means that we are requesting submissions for a specific category
                    elif flag == '4':
                        category = self.request.get('category')
                        submissions_number = 20

                        if not category:
                            error = json_error('retrieve category', 'failure', 'no category')
                            self.response.write(json.dumps(error))

                        else:
                            submissions = Submission.query(Submission.category == category).fetch(submissions_number)

                            if not submissions:
                                error = json_error('retrieve category', 'failure', 'empty category')
                                self.response.write(json.dumps(error))

                            else:
                                submissions_array = []
                                if(submissions_number <= len(submissions)):
                                    for i in range(0, submissions_number):
                                        submission = submissions[i]
                                        json_submission = json_array(submission.key.urlsafe(), submission.name,submission.image, submission.rating)
                                        submissions_array.append(json_submission)

                                else:
                                    for i in range(0, len(submissions)):
                                        submission = submissions[i]
                                        json_submission = json_array(submission.key.urlsafe(), submission.name,submission.image, submission.rating)
                                        submissions_array.append(json_submission)

                                response = json.dumps(submissions_array)
                                self.response.write(response)

                    # every other flag generate an error
                    else:
                        error = json_error('retrieve' ,'failure', 'flag')
                        self.response.write(json.dumps(error))
Ejemplo n.º 58
0
 def test_fail(self):
     answers = {"q1": "c", "q3": "b", "q2": "a", "q5": "d", "q4": "b", "q7": "w", "q6": "w", "q9": "w", "q8": "w", "q10": "w"}
     sub = Submission()
     sub.uid = 324
     sub = gradeTest(sub, answers)
     assert sub.score == float(50) 
Ejemplo n.º 59
0
    def mine_bloc(self):
        global VOTE_WINDOW
        """Create a new bloc and add open submissions to it."""
        # update your ip (only if your publickey is registered) so that mining can be shared with all nodes
        if self.public_key is None:
            return None

        publickey = {"publickey": self.public_key}
        requests.post('https://blocbit.net/kitty.php' ,params=publickey)
        # Fetch the currently last bloc of the blocchain
        last_bloc = self.__chain[-1]
        #last_pf = last_bloc.proof
        #window = self.load_window_data()
        # Hash the last bloc (=> to be able to compare it to the stored hash
        # value)
        hashed_bloc = hash_bloc(last_bloc)
        proof = self.proof_by_vote()
        # Added to avoid blocchain startup error after genesis bloxk as it contains no submission i.e. no zero
        # last_pf = last_bloc.proof
        # if last_pf != 86400:
        #     zero = self.submission_zero()           
        # else:
        #     zero = 365.0
        zero = self.submission_zero()
        # Voters have the right to vote daily, so let's create a window submission
        # reward_submission = {
        #     'voter': 'STATION',
        #     'candidate': owner,
        #     'amount': 0 or 1
        # }
        Station_open = Submission(
            'STATION', self.public_key, zero, '', 1)
        Station_closed = Submission(
            'STATION', self.public_key, zero, '', 0)
        # Copy submission instead of manipulating the original
        # open_submissions list
        # This ensures that if for some reason the mining should fail,
        # we don't have the reward submission stored in the open submissions
        copied_submissions = self.__open_submissions[:]
        for tx in copied_submissions:
            if not Ballot.verify_submission(tx):
                return None
        
        # if global var is set to true award right and then set back to false
        if VOTE_WINDOW is False:
            copied_submissions.append(Station_closed)
        else:
            copied_submissions.append(Station_open)
            VOTE_WINDOW = False
        bloc = Bloc(len(self.__chain), hashed_bloc,
                      copied_submissions, proof)
        self.__chain.append(bloc)
        self.__open_submissions = []
        self.save_data()
        for node in self.__peer_nodes:
            url = 'http://{}/broadcast-bloc'.format(node)
            converted_bloc = bloc.__dict__.copy()
            converted_bloc['submissions'] = [
                tx.__dict__ for tx in converted_bloc['submissions']]
            try:
                response = requests.post(url, json={'bloc': converted_bloc})
                if response.status_code == 400 or response.status_code == 500:
                    print('Bloc declined, needs resolving')
                if response.status_code == 409:
                    self.resolve_conflicts = True
            except requests.exceptions.ConnectionError:
                continue
        return bloc