def run(self): if 'timer' in self.order.trigger.keys(): interval = self.order.trigger['timer'] while not self.lock.wait(timeout=interval): raw = self.soldier.tag.get_values(self.order.values) for k, v in raw.items(): if v == (None, None): os._exit(1) # 通信失敗していたらプロセスをkill # やり方が酷いし、leaderのacceptに工夫が居る values = [{"type": type, "value": v[0], "unit": v[1]} for type, v in raw.items()] time = datetime.datetime.now(datetime.timezone.utc).isoformat() work = Work(time, self.order.purpose, values) url = "{0}subordinates/{1}/work".format( self.soldier.superior_ep, self.soldier.id) res, err = rest.post(url, json=work.to_dict()) if err is not None: self.soldier.shutdown() logger.fatal('in WorkingThread, failed to post work: {0}', err) return else: pass
def test_submit_work(self): # add soldier soldier = SoldierInfo(id='sxxx0', name='sol_http', place="left", weapons=[], orders=[]) self.app.post('/leader/subordinates', data=dumps(soldier.to_dict()), content_type='application/json') # submit a work work = Work(purpose="some app", time=datetime.now().strftime('%Y-%m-%d %H:%M:%S'), values="some values") response = self.app.post('/leader/subordinates/sxxx0/work', data=dumps(work.to_dict()), content_type='application/json') self.assertEqual(response.status_code, 200) actual = loads(response.data.decode("utf-8")) # assert expected = { "_status": {'success': True, 'msg': "status is ok"}, "accepted": work.to_dict() } self.assertEqual(actual, expected)
def run(self): if 'timer' in self.order.trigger.keys(): interval = self.order.trigger['timer'] while not self.lock.wait(timeout=interval): raw = self.soldier.tag.get_values(self.order.values) for k, v in raw.items(): if v == (None, None): os._exit(1) # 通信失敗していたらプロセスをkill # やり方が酷いし、leaderのacceptに工夫が居る values = [{ "type": type, "value": v[0], "unit": v[1] } for type, v in raw.items()] time = datetime.datetime.now(datetime.timezone.utc).isoformat() work = Work(time, self.order.purpose, values) url = "{0}subordinates/{1}/work".format( self.soldier.superior_ep, self.soldier.id) res, err = rest.post(url, json=work.to_dict()) if err is not None: self.soldier.shutdown() logger.fatal('in WorkingThread, failed to post work: {0}', err) return else: pass
def test_submit_work(self): # add soldier soldier = SoldierInfo(id='sxxx0', name='sol_http', place="left", weapons=[], orders=[]) self.app.post('/leader/subordinates', data=dumps(soldier.to_dict()), content_type='application/json') # submit a work work = Work(purpose="some app", time=datetime.now().strftime('%Y-%m-%d %H:%M:%S'), values="some values") response = self.app.post('/leader/subordinates/sxxx0/work', data=dumps(work.to_dict()), content_type='application/json') self.assertEqual(response.status_code, 200) actual = loads(response.data.decode("utf-8")) # assert expected = { "_status": { 'success': True, 'msg': "status is ok" }, "accepted": work.to_dict() } self.assertEqual(actual, expected)
def get(self): data = self.request.GET found = Work.gql("WHERE link = :1",data["url"]) if found.count(1) > 0: work = found.fetch(1)[0] work.name = data['name'] if "contentlink" in data: work.contentlink = data['contentlink'] work.put() self.jsonout(status="dup", msg="%s already existed for %s with id %d", format=(data['name'],work.author.name,work.key().id()), key=str(work.key()), id = work.key().id() ) return try: a = None a = int(data['author']) author = A.get_author(a) except KeyError,strerror: self.jsonout(status="fail", msg="Author not found(%s:%s). Use /author/create" , format=(a,strerror)) return
def accept_work(sub_id): """ Accept new work --- parameters: - name: sub_id description: The work's author-id in: path type: string - name: work description: A work to be accepted in: body required: true schema: $ref: '#/definitions/Work' responses: 200: description: The work is accepted schema: properties: _status: description: Response status $ref: '#/definitions/ResponseStatus' accepted: description: The accepted work $ref: '#/definitions/Work' """ work = Work.make(request.json) leader.accept_work(sub_id, work) return jsonify(_status=ResponseStatus.Success, accepted=work.to_dict())
def all(self): works = [] for id, data in self._db.items(): w = Work(data['work_name'], data['starting_date'], data['ending_date'], data['work_status'], str(id)) works.append(w) works.sort(key=lambda w: w.id) return works
def get(self,id): self.enforce_admin() self.response.headers['Content-Type'] = 'application/pdf' expires_date = datetime.datetime.utcnow() + datetime.timedelta(365) expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT") # self.response.headers.add_header("Expires", expires_str) work = Work.get_by_id(int(id)) self.response.out.write(work.data)
def get(self, id): self.enforce_admin() self.response.headers['Content-Type'] = 'application/pdf' expires_date = datetime.datetime.utcnow() + datetime.timedelta(365) expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT") # self.response.headers.add_header("Expires", expires_str) work = Work.get_by_id(int(id)) self.response.out.write(work.data)
def get(self,id): keylist = SavedList.get_by_id(int(id)) templatevars = {"works":Work.get(keylist.keys)} templatevars["overalltotal"] = get_count("Work") templatevars["totalhere"] = len(keylist.keys) templatevars["listname"] = keylist.name templatevars["id"] = id self.generate("listofworks.html",templatevars) pass
def seed_data(): """Create example data for the test database.""" work_1 = Work(_class='music', _id='this_one', image='/static/images/jim_camera_small.jpg', title='xxxxxxxxx', text='Something about this thingy do dah', iframe='dmcdekker') db.session.add(work_1) db.session.commit()
def get(self,id): self.enforce_admin() keylist = SavedList.get_by_id(int(id)) templatevars = {"works":Work.get(keylist.keys)} templatevars["url"] = self.request.uri templatevars["generatedtime"] = datetime.now() templatevars["listid"] = id if not self.request.get("download",None) is None: self.response.headers['Content-Type'] = 'application/download' self.response.headers['Content-disposition'] = \ 'attachment; filename=%s.sh'%keylist.name else: self.response.headers['Content-Type'] = 'text/plain' self.generate("wgetscript.sh",templatevars)
class WorkCreate(BaseRequestHandler): def get(self): data = self.request.GET found = Work.gql("WHERE link = :1",data["url"]) if found.count(1) > 0: work = found.fetch(1)[0] work.name = data['name'] if "contentlink" in data: work.contentlink = data['contentlink'] work.put() self.jsonout(status="dup", msg="%s already existed for %s with id %d", format=(data['name'],work.author.name,work.key().id()), key=str(work.key()), id = work.key().id() ) return try: a = None a = int(data['author']) author = A.get_author(a) except KeyError,strerror: self.jsonout(status="fail", msg="Author not found(%s:%s). Use /author/create" , format=(a,strerror)) return work = Work( link = data['url'], blobtype= data['type'], name = data['name'], author = author, site = author.site, ) if "contentlink" in data: work.contentlink = data['contentlink'] work.put() increment("Work") increment("Work-%d"%author.key().id()) increment("Work-%s"%author.site) msg = "%s - added. %s - Id %d"%(work.name,work.link,work.key().id()) self.jsonout(status = "ok", msg = msg, id = work.key().id(), format = (), key = str(work.key()) )
def get(self,id): self.enforce_admin() work = Work.get_by_id(int(id)) rpc = urlfetch.create_rpc() blob = urlfetch.make_fetch_call(rpc,work.link) data = rpc.get_result().content l = len(data) if len(data) < 1000000: work.data = data work.put() msg = "%s - %d byte data saved in store with id %d" msg = msg % (work.name,l,work.key().id()) else: msg = "%s - %d byte is too much for datastore. Not inserting pdf. Id %d" msg = msg % (work.name,l,work.key().id()) self.jsonout(status="ok",msg=msg)
def items_that_need_coverage(self, identifiers=None, **kwargs): """Find all Works lacking coverage from this CoverageProvider. By default, all Works which don't already have coverage are chosen. :param: Only Works connected with one of the given identifiers are chosen. """ qu = Work.missing_coverage_from( self._db, operation=self.operation, count_as_missing_before=self.cutoff_time, **kwargs) if identifiers: ids = [x.id for x in identifiers] qu = qu.join(Work.license_pools).filter( LicensePool.identifier_id.in_(ids)) return qu
def create_work(data, current_user, task_id): """Crea un dia trabajado.""" task = Task.query.filter_by(task_id=task_id).first() if not task: return jsonify({'message': 'No task found!'}), 404 if current_user not in task.project.members: return jsonify({'message': 'You don\'t have permission to edit this task!'}), 403 if create_work_validator.validate(data): if Work.query.filter_by(date=data['date']).first(): return jsonify({'message': 'There\'s already work on that date!'}), 400 else: data['user_id'] = current_user.user_id data['task_id'] = task.task_id work = Work(**data) db.session.add(work) db.session.commit() return jsonify({'message': 'Work created!', 'work': work_schema.dump(work).data}), 201 else: return jsonify({'message': 'Work not created!', 'errors': create_work_validator.errors}), 400
def index(method, get, post, headers): works = work_manager.all() status = '200 OK' view = View("templates\index.html") body = view.render(works=works) if method == 'POST': status = '303 See Other' body = b'' headers.append(('Location', '/')) work_name = get_first_element(post, 'work_name', '').strip() starting_date = get_first_element(post, 'starting_date', '') ending_date = get_first_element(post, 'ending_date', '') work_status = get_first_element(post, 'work_status', '').strip() id = get_first_element(post, 'id') is_delete = get_first_element(post, 'is_delete') work = Work(work_name, starting_date, ending_date, work_status, id=id) if is_delete: work_manager.delete(work) else: work_manager.save(work) return status, body
row = [by_source[source] for source in sources] row += [sum(row)] row = [audience, "" ,""] + row out.writerow(row) out.writerow([]) for fiction, name in (True, "Fiction"), (False, "Nonfiction"), (None, "No Fiction Status"): base_query = _db.query(Work).filter(Work.fiction==fiction) by_source = count_for_each_data_source(base_query, sources) row = [by_source[source] for source in sources] row += [sum(row)] row = [name, "", ""] + row out.writerow(row) unclassified_query = Work.with_no_genres(base_query) by_source = count_for_each_data_source(unclassified_query, sources) row = [by_source[source] for source in sources] row += [sum(row)] row = [name + ", no genre", "", ""] + row out.writerow(row) out.writerow([]) stats = collect(_db, sources) for parentage, by_source in sorted(stats.items()): total = 0 row = list(parentage) for source in sources: row.append(by_source[source]) total += by_source[source] row += [total]
def bulk_update(self, works, retry_on_batch_failure=True): """Upload a batch of works to the search index at once.""" from model import Work time1 = time.time() docs = Work.to_search_documents(works) for doc in docs: doc["_index"] = self.works_index doc["_type"] = self.work_document_type time2 = time.time() success_count, errors = self.bulk( docs, raise_on_error=False, raise_on_exception=False, ) # If the entire update failed, try it one more time before giving up on the batch. if retry_on_batch_failure and len(errors) == len(docs): self.log.info("Elasticsearch bulk update timed out, trying again.") return self.bulk_update(works, retry_on_batch_failure=False) time3 = time.time() self.log.info("Created %i search documents in %.2f seconds" % (len(docs), time2 - time1)) self.log.info("Uploaded %i search documents in %.2f seconds" % (len(docs), time3 - time2)) doc_ids = [d['_id'] for d in docs] # We weren't able to create search documents for these works, maybe # because they don't have presentation editions yet. missing_works = [work for work in works if work.id not in doc_ids] error_ids = [ error.get('data', {}).get("_id", None) or error.get('index', {}).get('_id', None) for error in errors ] successes = [ work for work in works if work.id in doc_ids and work.id not in error_ids ] failures = [] for missing in missing_works: if not missing.presentation_ready: failures.append( (work, "Work not indexed because not presentation-ready.")) else: failures.append((work, "Work not indexed")) for error in errors: error_id = error.get('data', {}).get('_id', None) or error.get( 'index', {}).get('_id', None) work = None works_with_error = [work for work in works if work.id == error_id] if works_with_error: work = works_with_error[0] exception = error.get('exception', None) error_message = error.get('error', None) if not error_message: error_message = error.get('index', {}).get('error', None) failures.append((work, error_message)) self.log.info( "Successfully indexed %i documents, failed to index %i." % (success_count, len(failures))) return successes, failures
def bulk_update(self, works, retry_on_batch_failure=True): """Upload a batch of works to the search index at once.""" time1 = time.time() needs_add = [] successes = [] for work in works: if work.presentation_ready: needs_add.append(work) else: # Works are removed one at a time, which shouldn't # pose a performance problem because works almost never # stop being presentation ready. self.remove_work(work) successes.append(work) # Add any works that need adding. docs = Work.to_search_documents(needs_add) for doc in docs: doc["_index"] = self.works_index doc["_type"] = self.work_document_type time2 = time.time() success_count, errors = self.bulk( docs, raise_on_error=False, raise_on_exception=False, ) # If the entire update failed, try it one more time before # giving up on the batch. # # Removed works were already removed, so no need to try them again. if len(errors) == len(docs): if retry_on_batch_failure: self.log.info("Elasticsearch bulk update timed out, trying again.") return self.bulk_update(needs_add, retry_on_batch_failure=False) else: docs = [] time3 = time.time() self.log.info("Created %i search documents in %.2f seconds" % (len(docs), time2 - time1)) self.log.info("Uploaded %i search documents in %.2f seconds" % (len(docs), time3 - time2)) doc_ids = [d['_id'] for d in docs] # We weren't able to create search documents for these works, maybe # because they don't have presentation editions yet. def get_error_id(error): return error.get('data', {}).get('_id', None) or error.get('index', {}).get('_id', None) error_ids = [get_error_id(error) for error in errors] missing_works = [ work for work in works if work.id not in doc_ids and work.id not in error_ids and work not in successes ] successes.extend( [work for work in works if work.id in doc_ids and work.id not in error_ids] ) failures = [] for missing in missing_works: failures.append((work, "Work not indexed")) for error in errors: error_id = get_error_id(error) work = None works_with_error = [work for work in works if work.id == error_id] if works_with_error: work = works_with_error[0] exception = error.get('exception', None) error_message = error.get('error', None) if not error_message: error_message = error.get('index', {}).get('error', None) failures.append((work, error_message)) self.log.info("Successfully indexed %i documents, failed to index %i." % (success_count, len(failures))) return successes, failures
def __init__(self, session, iso639): self.session = session self.iso639_2b = iso639['2b'] self.work = Work(uuid=uuid4(), editions=[])
def test_missoin_do(self): def post_report(url, data=None, json=None, etag=None, **kwargs): res = requests.Response() res.status_code = 200 res_dict = { "_status": { "msg": "ok", "success": True }, "accepted": json } res._content = dumps(res_dict).encode() return res, None self.leader_obj.superior_ep = "test://cxxx0/commander/" soldier = SoldierInfo(id="sxxx0", name="sol-test", place="left", weapons=[], orders=[]) self.leader_obj.accept_subordinate(soldier) mission = Mission(author="sxxx0", requirement=Requirement(values=["zero", "random"], trigger={"timer": 0.4}), trigger={"timer": 0.7}, place="All", purpose="some purpose hash") work_1 = Work(time=datetime.utcnow().isoformat(), purpose=mission.get_id(), values=[0, 0.584249]) work_2 = Work(time=datetime.utcnow().isoformat(), purpose=mission.get_id(), values=[0, 0.238491]) work_3 = Work( time=datetime.utcnow().isoformat(), purpose="0" + mission.get_id()[:-1], # 上2つとずらす values=[0, 0.045066]) self.leader_obj.accept_work("sxxx0", work_1) self.leader_obj.accept_work("sxxx0", work_2) self.leader_obj.accept_work("sxxx0", work_3) with patch("utils.rest.post", side_effect=post_report) as m: self.leader_obj.accept_mission(mission) time.sleep(1) self.assertEqual(m.call_count, 1) self.assertEqual( m.call_args[0][0], "test://cxxx0/commander/subordinates/lxxx0/report") # reportのチェック actual = m.call_args[1]["json"] self.assertEqual(set(actual.keys()), {"time", "place", "purpose", "values"}) self.assertEqual(actual["purpose"], "some purpose hash") self.assertEqual(len(actual["values"]), 2) # report.valuesのチェック work_in_1 = work_1.to_dict() del work_in_1["purpose"] work_in_1["place"] = "left" work_in_2 = work_2.to_dict() del work_in_2["purpose"] work_in_2["place"] = "left" self.assertIn(work_in_1, actual["values"]) self.assertIn(work_in_2, actual["values"]) self.leader_obj.superior_ep = "" # shutdownでDELETEを送信するのを阻止
def test_missoin_do(self): def post_report(url, data=None, json=None, etag=None, **kwargs): res = requests.Response() res.status_code = 200 res_dict = { "_status": {"msg": "ok", "success": True}, "accepted": json } res._content = dumps(res_dict).encode() return res, None self.leader_obj.superior_ep = "test://cxxx0/commander/" soldier = SoldierInfo(id="sxxx0", name="sol-test", place="left", weapons=[], orders=[]) self.leader_obj.accept_subordinate(soldier) mission = Mission(author="sxxx0", requirement=Requirement( values=["zero", "random"], trigger={"timer": 0.4} ), trigger={"timer": 0.7}, place="All", purpose="some purpose hash") work_1 = Work(time=datetime.utcnow().isoformat(), purpose=mission.get_id(), values=[0, 0.584249]) work_2 = Work(time=datetime.utcnow().isoformat(), purpose=mission.get_id(), values=[0, 0.238491]) work_3 = Work(time=datetime.utcnow().isoformat(), purpose="0" + mission.get_id()[:-1], # 上2つとずらす values=[0, 0.045066]) self.leader_obj.accept_work("sxxx0", work_1) self.leader_obj.accept_work("sxxx0", work_2) self.leader_obj.accept_work("sxxx0", work_3) with patch("utils.rest.post", side_effect=post_report) as m: self.leader_obj.accept_mission(mission) time.sleep(1) self.assertEqual(m.call_count, 1) self.assertEqual(m.call_args[0][0], "test://cxxx0/commander/subordinates/lxxx0/report") # reportのチェック actual = m.call_args[1]["json"] self.assertEqual(set(actual.keys()), {"time", "place", "purpose", "values"}) self.assertEqual(actual["purpose"], "some purpose hash") self.assertEqual(len(actual["values"]), 2) # report.valuesのチェック work_in_1 = work_1.to_dict() del work_in_1["purpose"] work_in_1["place"] = "left" work_in_2 = work_2.to_dict() del work_in_2["purpose"] work_in_2["place"] = "left" self.assertIn(work_in_1, actual["values"]) self.assertIn(work_in_2, actual["values"]) self.leader_obj.superior_ep = "" # shutdownでDELETEを送信するのを阻止
def submit_grades(): """ handle setting all course grades from grid """ print_debug('submit grades') print_debug(request.form) Work.edit_grades(request.form) return url_for('mainroute', pagepath=request.page.path)