def save_headwords(taskId): task = m.Task.query.get(taskId) if not task: raise InvalidUsage(_('task {0} not found').format(taskId), 404) if task.taskType != 'Spelling': raise InvalidUsage( _('task {0} has unexpected task type').format(taskId)) data = MyForm(Field( 'headwords', is_mandatory=True, )).get_data() load = m.Load(taskId=taskId, createdBy=699) SS.add(load) SS.flush() rawPieces = [] for i, r in enumerate(data['headwords']): assemblyContext = 'L_%05d_%05d' % (load.loadId, i) allocationContext = 'L_%05d' % load.loadId try: del r['meta'] except KeyError: pass print r rawPiece = m.RawPiece(taskId=taskId, loadId=load.loadId, assemblyContext=assemblyContext, allocationContext=allocationContext, words=1, **r) rawPieces.append(rawPiece) SS.add(rawPiece) SS.flush() return jsonify(rawPieces=m.RawPiece.dump(rawPieces))
def create_label_group(labelSetId): ''' creates a new label group ''' labelSet = m.LabelSet.query.get(labelSetId) if not labelSet: raise InvalidUsage( _('label set {0} not found').format(labelSetId), 404) data = MyForm( Field('name', is_mandatory=True, validators=[ validators.is_string, (check_label_group_name_uniqueness, (labelSetId, None)), ]), Field('dropDownDisplay', default=False, validators=[ validators.is_bool, ]), Field('isMandatory', default=False, validators=[ validators.is_bool, ]), ).get_data() labelGroup = m.LabelGroup(**data) SS.add(labelGroup) SS.flush() return jsonify({ 'message': _('created label group {0} successfully').format(labelGroup.name), 'labelGroup': m.LabelGroup.dump(labelGroup), })
def disable_tag_for_sub_task(subTaskId, tagId): rec = m.ShadowedTag.query.get((subTaskId, tagId)) if not rec: rec = m.ShadowedTag(subTaskId=subTaskId, tagId=tagId) SS.add(rec) return jsonify(message=_( 'tag {0} has been disabled for sub task {1}').format(tagId, subTaskId))
def create_sub_task_rate_record(subTaskId): subTask = m.SubTask.query.get(subTaskId) if not subTask: raise InvalidUsage(_('sub task {0} not found').format(subTaskId)) data = MyForm( Field('rateId', is_mandatory=True, validators=[]), Field('multiplier', is_mandatory=True, normalizer=lambda data, key, value: float(value), validators=[ (validators.is_number, (), dict(min_value=0)), ]), Field('bonus', default=None, validators=[ (validators.is_number, (), dict(ge=0)), ]), ).get_data() me = session['current_user'] subTaskRate = m.SubTaskRate(taskId=subTask.taskId, updatedBy=me.userId, **data) SS.add(subTaskRate) SS.flush() return jsonify({ 'message': _('created sub task rate {0} successfully').format( subTaskRate.subTaskRateId), 'subTaskRate': m.SubTaskRate.dump(subTaskRate), })
def create_language(self): desc = json.loads(self.Message) data = util.edm.decode_changes('Language', desc['changes']) current_app.logger.info( 'a language is being created using {}'.format(data)) try: lang = m.Language.query.filter(m.Language.name == data['name']).one() current_app.logger.info( 'found language {}, applying changes {}'.format(lang.name, data)) changes = {} for k, v in data.iteritems(): try: if getattr(lang, k) != v: setattr(lang, k, v) changes[k] = v except AttributeError: continue current_app.logger.debug('actual changes {}'.format(changes)) SS.flush() SS.commit() except sqlalchemy.orm.exc.NoResultFound: SS.rollback() lang = m.Language(**data) SS.add(lang) SS.flush() SS.commit()
def create_alphabet(): data = MyForm( Field('name', is_mandatory=True, validators=[ validators.non_blank, check_name_uniqueness, ]), Field('description'), Field('dialectId', is_mandatory=True, validators=[ validators.is_number, check_dialect_existence, ]), Field( 'url', default=lambda: None, ), ).get_data() alphabet = m.Alphabet(**data) SS.add(alphabet) SS.flush() return jsonify({ 'message': _('created alphabet {0} successfully').format(alphabet.name), 'alphabet': m.Alphabet.dump(alphabet), })
def create_error_type(): ''' creates a new error type ''' data = MyForm( Field('name', is_mandatory=True, validators=[ validators.non_blank, check_name_uniqueness, ]), Field('errorClassId', is_mandatory=True, validators=[ check_error_class_existence, ]), Field('defaultSeverity', is_mandatory=True, normalizer=lambda data, key, value: float(value), validators=[ (validators.is_number, (), dict(max_value=1, min_value=0)), ]), ).get_data() errorType = m.ErrorType(**data) SS.add(errorType) SS.flush() return jsonify({ 'message': _('created error type {0} successfully').format(errorType.name), 'errorType': m.ErrorType.dump(errorType), })
def end_work_intervals(task=None): q = m.WorkInterval.query.filter( m.WorkInterval.status == m.WorkInterval.STATUS_CURRENT) if task is not None: q = q.filter(m.WorkInterval.taskId == task.taskId) now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) endTime = datetime.datetime(now.year, now.month, now.day, 23, 59, 59, 999999, tzinfo=pytz.utc) for wi in q.all(): wi.endTime = endTime wi.status = wi.STATUS_ADDING_FINAL_CHECKS newWorkInterval = m.WorkInterval( taskId=wi.taskId, subTaskId=wi.subTaskId, status=wi.STATUS_CURRENT, startTime=endTime, endTime=None, ) SS.add(newWorkInterval)
def disable_label_for_sub_task(subTaskId, labelId): rec = m.ShadowedLabel.query.get((subTaskId, labelId)) if not rec: rec = m.ShadowedLabel(subTaskId=subTaskId, labelId=labelId) SS.add(rec) return jsonify( message=_('label {0} has been disabled for sub task {1}').format( labelId, subTaskId))
def save_stats(self): # update dailysubtasktotals SS.bind.execute( m.DailySubtotal.__table__.delete( m.DailySubtotal.subTaskId == self.subTask.subTaskId)) for (subTaskId, userId, workDate), c in self.per_user_per_day.iteritems(): entry = m.DailySubtotal(subTaskId=subTaskId, userId=userId, totalDate=workDate, amount=c.itemCount, words=c.unitCount) SS.add(entry) # update subtaskmetrics/abnormalusage/subtaskmetricerrors # TODO: optionally delete existing entries from above 3 tables # TODO: configure server_default for lastUpdated now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) for source in [self.per_user_per_interval, self.per_user]: for (subTaskId, userId, workIntervalId), c in source.iteritems(): assert ((source == self.per_user_per_interval and subTaskId is None) or (source == self.per_user and workIntervalId is None)) metric = m.SubTaskMetric(userId=userId, workIntervalId=workIntervalId, subTaskId=subTaskId, itemCount=c.itemCount, unitCount=c.unitCount, workRate=c.workRate, accuracy=c.accuracy, lastUpdated=now) SS.add(metric) SS.flush() for errorTypeId, occurences in c.flaggedErrors.iteritems(): entry = m.SubTaskMetricErrorEntry(metricId=metric.metricId, errorTypeId=errorTypeId, occurences=occurences) SS.add(entry) for tagId, degree in c.abnormalTagUsage.iteritems(): entry = m.AbnormalUsageEntry(metricId=metric.metricId, tagId=tagId, labelId=None, degree=degree) SS.add(entry) for labelId, degree in c.abnormalLabelUsage.iteritems(): entry = m.AbnormalUsageEntry(metricId=metric.metricId, tagId=None, labelId=labelId, degree=degree) SS.add(entry) # update subtasks self.subTask.meanAmount = self.sub_task_stats.meanAmount self.subTask.maxAmount = self.sub_task_stats.maxAmount self.subTask.accuracy = self.sub_task_stats.accuracy self.subTask.maxWorkRate = self.sub_task_stats.maxWorkRate self.subTask.medianWorkRate = self.sub_task_stats.medianWorkRate
def create_person(self): desc = json.loads(self.Message) globalId = desc['global_id'] data = util.edm.decode_changes('Person', desc['changes']) iso3 = data.pop('_countryIso3', None) if iso3: try: country = m.Country.query.filter(m.Country.iso3 == iso3).one() # country not found - create country from edm except sqlalchemy.orm.exc.NoResultFound: result = util.edm.get_country(iso3) country_data = dict( name=result['name_eng'], iso2=result['iso2'], iso3=iso3, isoNum=result['iso_num'], internet=result['internet'], active=result['active'], ) country = m.Country(**country_data) SS.add(country) data['countryId'] = country.countryId try: user = m.User.query.filter_by(emailAddress=data['emailAddress']).one() # user not found via email address - create user except sqlalchemy.orm.exc.NoResultFound: user = m.User(**data) user.globalId = globalId SS.add(user) current_app.logger.info('user {0} was created using {1}'.format( user.userId, data)) # user found via email address - apply updates else: for k, v in data.items(): if k == 'emailAddress': continue setattr(user, k, v) user.globalId = globalId current_app.logger.info('user {0} was updated using {1}'.format( user.userId, data)) current_app.logger.info("committing create_person changes") try: SS.commit() except (psycopg2.Error, sqlalchemy.exc.IntegrityError), e: current_app.logger.error( "error while committing create_person changes, rolling back: {0}". format(e)) SS.rollback() raise
def save_record(self): if m.SnsMessageRecord.query.get(self.MessageId): raise RuntimeError( _('message {} has been saved already').format(self.MessageId)) record = m.SnsMessageRecord(messageId=self.MessageId, messageType=self.Type, body=self.body) SS.add(record) SS.flush() current_app.logger.debug('record of message {} has been saved'.format( self.MessageId))
def main(taskId=None): logging.basicConfig(level=logging.DEBUG) progress_work_intervals() if taskId is None: tasks = m.Task.query.filter( m.Task.status.notin_([ m.Task.STATUS_ARCHIVED, m.Task.STATUS_CLOSED, m.Task.STATUS_FINISHED ])).all() else: task = m.Task.query.get(taskId) if not task: raise ValueError('task {0} not found'.format(taskId)) tasks = [task] payroll_data = ao.get_payroll() # print 'payroll to use for payment submission:\n{}'.format(payroll_data) payrollId = payroll_data['payrollId'] payroll = m.BasicPayroll.query.get(payrollId) if not payroll: payroll = m.BasicPayroll(payrollId=payrollId) SS.add(payroll) for task in tasks: try: update_payroll_status(task, payrollId) except: log.info('task {} failed'.format(task.taskId)) out = cStringIO.StringIO() traceback.print_exc(file=out) log.error(out.getvalue()) SS.rollback() # break else: log.info('task {} succeeded'.format(task.taskId)) # SS.commit() pass SS.commit() # find all CalculatedPayment entries and send them as package payments = m.CalculatedPayment.query.filter( m.CalculatedPayment.receipt.is_(None)).filter( m.CalculatedPayment.payrollId == payrollId).filter( m.CalculatedPayment.unitCount > 0).all() # print 'payments to submit: ', len(payments) receipts = ao.send_payments(payments) # print receipts for cp in payments: cp.receipt = receipts.get(cp.calculatedPaymentId, None) SS.commit()
def action_assign_task_supervisor(userIds, taskId): for userId in userIds: s = m.TaskSupervisor.query.get((taskId, userId)) if not s: s = m.TaskSupervisor(taskId=taskId, userId=userId) SS.add(s) total = len(userIds) message = _('Assigned the user as a supervisor of task {0}' if total == 1 else 'Assigned {1} users as supervisors of task {0}' ).format(taskId, len(userIds)) url = url_for('views.task_config', taskId=taskId, _anchor='supervisors', _external=True) return {'message': message, 'link': url}
def create_new_alphabet_rule(alphabetId): alphabet = m.Alphabet.query.get(alphabetId) if not alphabet: raise InvalidUsage(_('alphabet {0} not found').format(alphabetId), 404) data = MyForm( Field('name'), Field('type'), Field('description'), ).get_data() rule = m.Rule(**data) rule.alphabetId = alphabetId SS.add(rule) SS.flush() return jsonify(rule=m.Rule.dump(rule))
def create_pool(): data = MyForm( Field('name', is_mandatory=True, validators=[ validators.non_blank, (check_pool_name_uniqueness, (None, )), ]), Field('meta', is_mandatory=True, default='{}', normalizer=normalize_pool_meta_data), Field('taskTypeId', is_mandatory=True, validators=[ validators.is_number, check_task_type_existence, ]), Field('autoScoring', is_mandatory=True, normalizer=normalize_bool_literal, validators=[ validators.is_bool, ]), Field('tagSetId', validators=[ check_tag_set_existence, ]), Field('dataFile', is_mandatory=True), Field('questions', is_mandatory=True, default=[], normalizer=load_questions), ).get_data(is_json=False) questions = data.pop('questions') del data['dataFile'] pool = m.Pool(**data) SS.add(pool) for qd in questions: q = m.Question(**qd) pool.questions.append(q) SS.flush() return jsonify({ 'pool': m.Pool.dump(pool, context={'level': 0}), })
def populate_rework_sub_task_from_extract(subTaskId): subTask = m.SubTask.query.get(subTaskId) if not subTask: raise InvalidUsage(_('sub task {0} not found').format(subTaskId), 404) if not subTask.workType == m.WorkType.REWORK: raise InvalidUsage( _('work type {0} not supported').format(m.WorkType.REWORK)) data = MyForm( Field('srcSubTaskId', ), Field('dataFile', is_mandatory=True, validators=[ validators.is_file, ]), Field('validation', default='false', normalizer=normalize_bool_literal, validators=[ validators.is_bool, ]), ).get_data(is_json=False) if data['validation']: return jsonify(message=_('data file validated')) srcSubTask = m.SubTask.query.get(data['srcSubTaskId']) dstSubTask = subTask fakeUser = session['current_user'] tx_loader = TxLoader(subTask.taskId) result = tx_loader.load_tx_file(data['dataFile'], srcSubTask, fakeUser, dstSubTask) itemCount = result['itemCount'] me = session['current_user'] now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) # add rework content event content_event = m.SubTaskContentEvent(subTaskId=subTaskId, isAdding=True, tProcessedAt=now, itemCount=itemCount, operator=me.userId) SS.add(content_event) SS.flush() return jsonify( message=_('okay'), event=m.SubTaskContentEvent.dump(content_event), )
def submit_answer(sheetId): sheet = m.Sheet.query.get(sheetId) if not sheet: raise InvalidUsage(_('sheet {0} not found').format(sheetId), 404) now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) if sheet.tExpiresBy < now: sheet.tExpiredAt = now SS.flush() SS.commit() raise InvalidUsage(_('sheet {0} has expired already').format(sheetId)) me = session['current_user'] if sheet.userId != me.userId: raise InvalidUsage( _('you are not the owner of sheet {0}').format(sheetId)) data = MyForm( Field('sheetEntryId', is_mandatory=True, validators=[ (check_sheet_entry_existence, (sheetId, )), ]), Field('answer', is_mandatory=True, validators=[ validators.is_string, check_answer, ]), ).get_data(with_view_args=False) answer = m.Answer(**data) SS.add(answer) SS.flush() assert answer.answerId # TODO: define relationship on SheetEntry entry = m.SheetEntry.query.get(data['sheetEntryId']) entry.answerId = answer.answerId return jsonify({ 'message': _('created answer {0} successfully').format(answer.answerId), 'answer': m.Answer.dump(answer), })
def create_qa_batches(self, qaSubTask, userId, intervalId, samples, priority=5): if not samples: return for load in self.paginate(samples, qaSubTask.maxPageSize): b = m.Batch(taskId=qaSubTask.taskId, subTaskId=qaSubTask.subTaskId, notUserId=userId, workIntervalId=intervalId, priority=priority) p = m.Page(pageIndex=0) b.pages.append(p) for memberIndex, workEntryId in enumerate(load): memberEntry = m.PageMemberEntry(memberIndex=memberIndex) memberEntry.workEntryId = workEntryId p.memberEntries.append(memberEntry) SS.add(b) SS.flush()
def submit_marking(sheetId): sheet = m.Sheet.query.get(sheetId) if not sheet: raise InvalidUsage(_('sheet {0} not found').format(sheetId), 404) # TODO: add policy check to enable/disable re-marking and/or marking of expired sheets data = MyForm( Field('moreAttempts', is_mandatory=True, validators=[ validators.is_bool, ]), Field('comment', validators=[ validators.is_string, ]), Field('markings', is_mandatory=True, normalizer=normalize_marking_data), Field('score', is_mandatory=True, default=0, normalizer=calculate_sheet_score), ).get_data() # TODO: define relationship marking on SheetEntry me = session['current_user'] markings = data.pop('markings') for entry, md in zip(sheet.entries, markings): marking = m.Marking(**md) marking.sheetEntryId = entry.sheetEntryId marking.scorerId = me.userId SS.add(marking) SS.flush() entry.markingId = marking.markingId for key in ['moreAttempts', 'comment', 'score']: setattr(sheet, key, data[key]) return jsonify({ 'message': _('marked sheet {0} successfully').format(sheetId), 'sheet': m.Sheet.dump(sheet), })
def _get_user(userId): user = m.User.query.get(userId) if user is None: # user not found locally try: user = edm.make_new_user(userId) except: # error getting user from edm user = None else: try: SS.add(user) SS.commit() except: # error adding user locally SS.rollback() user = None return user
def create_label(labelSetId): ''' creates a new label ''' labelSet = m.LabelSet.query.get(labelSetId) if not labelSet: raise InvalidUsage( _('label set {0} not found').format(labelSetId), 404) data = MyForm( Field('name', is_mandatory=True, validators=[ (check_label_name_uniqueness, (labelSetId, None)), ]), Field('description'), Field('shortcutKey', validators=[ (validators.is_string, (), dict(length=1)), check_label_shortcut_key_non_space, (check_label_shortcut_key_uniqueness, (labelSetId, None)), ]), Field('extract', is_mandatory=True, validators=[ validators.non_blank, (check_label_extract_uniqueness, (labelSetId, None)), ]), Field('labelGroupId', validators=[ (check_label_group_existence, (labelSetId, )), ]), ).get_data() label = m.Label(**data) SS.add(label) SS.flush() return jsonify({ 'message': _('created label {0} successfully').format(label.name), 'label': m.Label.dump(label), })
def create_error_class(): ''' creates a new error class ''' data = MyForm( Field('name', is_mandatory=True, validators=[validators.non_blank, check_name_uniqueness]), ).get_data() errorClass = m.ErrorClass(**data) SS.add(errorClass) SS.flush() return jsonify({ 'message': _('created error class {0} successfully').format(errorClass.name), 'errorClass': m.ErrorClass.dump(errorClass), })
def action_assign_task_workers(userIds, taskId): subTask = m.SubTask.query.filter_by(taskId=taskId ).filter(m.SubTask.workType==m.WorkType.WORK ).order_by(m.subTask.subTaskId).first() if not SubTask: raise InvalidUsage(_('no sub task found under task {0}' ).format(taskId)) for userId in userIds: s = m.TaskWorker.query.get((userId, taskId, subTask.subTaskId)) if not s: s = m.TaskWorker(userId=userId, taskId=taskId, subTaskId=subTask.subTaskId) SS.add(s) total = len(userIds) message = _('Assigned the user as a worker for task {0}' if total == 1 else 'Assigned {1} users as workers for task {0}' ).format(taskId, total) url = url_for('views.task_workers', taskId=taskId, _external=True) return {'message': message, 'link': url}
def start_or_resume_test(testId): me = session['current_user'] test = m.Test.query.get(testId) if not test: raise InvalidUsage(_('test {0} not found').format(testId)) if not test.isEnabled: raise InvalidUsage(_('test {0} is not enabled').format(testId)) # TODO: need to find out ids of languages current user speaks languageIds = [1, 2, 3, 4] record = TestManager.report_eligibility(test, me, languageIds) if not record.get('url'): raise InvalidUsage( _('user {0} is not eligible for test {1}').format( me.userId, testId)) sheets = m.Sheet.query.filter_by(testId=testId).filter_by( userId=me.userId).order_by(m.Sheet.nTimes.desc()).all() # # NOTE: uncomment this block if nTimes needs to be fixed automatically # for i, sheet in enumerate(sheets): # if sheet.nTimes != i: # sheet.nTimes = i if sheets and sheets[-1].status == m.Sheet.STATUS_ACTIVE: return jsonify(sheetId=sheets[-1].sheetId) now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) sheet = m.Sheet(userId=me.userId, testId=testId, nTimes=len(sheets), tStartedAt=now, tExpiresBy=(now + test.timeLimit)) SS.add(sheet) SS.flush() for i, question in enumerate(TestManager.generate_questions(test)): entry = m.SheetEntry(sheetId=sheet.sheetId, index=i, questionId=question.questionId) sheet.entries.append(entry) return jsonify(sheetId=sheet.sheetId)
def dismiss_all_batches(subTaskId): subTask = m.SubTask.query.get(subTaskId) if not subTask: raise InvalidUsage(_('sub task {0} not found').format(subTaskId), 404) batches = m.Batch.query.filter_by(subTaskId=subTaskId).all() itemCount = 0 for b in batches: for p in b.pages: itemCount += len(p.memberEntries) for memberEntry in p.memberEntries: SS.delete(memberEntry) SS.delete(p) SS.delete(b) me = session['current_user'] now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) # add tracking information tracking_event = m.TrackingEvent(eventType='unbatch_all', userId=me.userId, tTriggeredAt=now, hostIp=request.environ['REMOTE_ADDR'], details=dict(taskId=subTask.taskId, subTaskId=subTaskId, count=len(batches))) SS.add(tracking_event) # add rework content event content_event = m.SubTaskContentEvent(subTaskId=subTaskId, isAdding=False, tProcessedAt=now, itemCount=itemCount, operator=me.userId) SS.add(content_event) SS.flush() return jsonify({ 'message': _('deleted {0} batches from sub task {1}').format( len(batches), subTaskId), })
def create_new_batches(subTaskId): subTask = m.SubTask.query.get(subTaskId) if not subTask: raise InvalidUsage(_('sub task {0} not found').format(subTaskId)) if subTask.workType != m.WorkType.WORK: raise InvalidUsage( _('target sub task must be of type {0}').format(m.WorkType.WORK)) # TODO: implement this rawPieces = m.RawPiece.query.filter_by(taskId=subTask.taskId).filter( m.RawPiece.isNew == True).filter( m.RawPiece.rawPieceId.notin_( SS.query(m.PageMember.rawPieceId).filter_by( taskId=subTask.taskId).filter( m.PageMember.rawPieceId != None).distinct())).order_by( m.RawPiece.rawPieceId).all() batches = Batcher.batch(subTask, rawPieces) for batch in batches: SS.add(batch) return jsonify({ 'message': _('created {0} batches').format(len(batches)), })
def update_sub_task_worker_settings(subTaskId, userId): subTask = m.SubTask.query.get(subTaskId) if not subTask: raise InvalidUsage(_('sub task {0} not found').format(subTaskId), 404) user = m.User.query.get(userId) if not user: raise InvalidUsage(_('user {0} not found').format(userId), 404) if not subTask._workType.workable: raise InvalidUsage("no workers allowed for this sub task", 400) data = MyForm( Field('hasReadInstructions', validators=[ validators.is_bool, ]), Field('isNew', validators=[ validators.is_bool, ]), Field('paymentFactor', normalizer=lambda data, key, value: float(value), validators=[ (validators.is_number, (), dict(min_value=0)), ]), Field('removed', validators=[ validators.is_bool, ]), ).get_data() worker = m.TaskWorker.query.get((userId, subTask.taskId, subTaskId)) if not worker: worker = m.TaskWorker(taskId=subTask.taskId, **data) SS.add(worker) else: for key in data: setattr(worker, key, data[key]) SS.flush() return jsonify({ 'worker': m.TaskWorker.dump(worker), })
def update_country(self): desc = json.loads(self.Message) iso3 = desc['iso3'] try: country = m.Country.query.filter(m.Country.iso3 == iso3).one() data = util.edm.decode_changes('Country', desc['changes']) current_app.logger.info('found country {}, applying changes {}'.format( country.name, data)) changes = {} for k, v in data.items(): try: if getattr(country, k) != v: setattr(country, k, v) changes[k] = v except AttributeError: continue current_app.logger.debug('actual changes {}'.format(changes)) SS.flush() SS.commit() except sqlalchemy.orm.exc.NoResultFound: SS.rollback() current_app.logger.info( 'country {} not found, get country from edm'.format(iso3)) result = util.edm.get_country(iso3) data = dict( name=result['name_eng'], iso2=result['iso2'], iso3=iso3, isoNum=result['iso_num'], internet=result['internet'], active=result['active'], ) country = m.Country(**data) SS.add(country) SS.flush() SS.commit() current_app.logger.info('country {} is added locally'.format( country.name)) return
def update_language(self): desc = json.loads(self.Message) iso3 = desc['iso3'] try: lang = m.Language.query.filter(m.Language.iso3 == iso3).one() data = util.edm.decode_changes('Language', desc['changes']) current_app.logger.info( 'found language {}, applying changes {}'.format(lang.name, data)) changes = {} for k, v in data.items(): try: if getattr(lang, k) != v: setattr(lang, k, v) changes[k] = v except AttributeError: continue current_app.logger.debug('actual changes {}'.format(changes)) SS.flush() SS.commit() except sqlalchemy.orm.exc.NoResultFound: SS.rollback() current_app.logger.info( 'language {} not found, get language from edm'.format(iso3)) result = util.edm.get_language(iso3) data = dict( name=result['name_eng'], iso2=result['iso2'], iso3=iso3, active=result['active'], ) lang = m.Language(**data) SS.add(lang) SS.flush() SS.commit() current_app.logger.info('language {} is added locally'.format( lang.name)) return