def create_campaign(self, jsdata): data = threaded_loads(jsdata) db = database('chained_campaigns') if '_rev' in data: return {"results":" cannot create from a json with _rev"} try: ccamp = chained_campaign(json_input=threaded_loads(jsdata)) except chained_campaign('').IllegalAttributeName as ex: return {"results":False, "message":str(ex)} self.logger.log('Creating new chained_campaign %s...' % (ccamp.get_attribute('prepid'))) ccamp.set_attribute("_id", ccamp.get_attribute("prepid")) if not ccamp.get_attribute("_id") :#or self.db.document_exists(ccamp.get_attribute("_id")): self.logger.error('Campaign %s already exists. Cannot re-create it.' % (ccamp.get_attribute('_id'))) return {"results":False, "message":'Error: Campaign '+ccamp.get_attribute("_id")+' already exists'} # update history ccamp.update_history({'action':'created'}) saved = db.save(ccamp.json()) # update actions db self.update_actions(ccamp) # update campaigns db self.update_campaigns(ccamp) if saved: return {"results":True, "prepid" : ccamp.get_attribute("prepid")} else: return {"results":False, "message":"could not save to DB"}
def PUT(self): """ update the action with the provided json content """ res = self.import_action( threaded_loads(cherrypy.request.body.read().strip())) return dumps(res)
def create_campaign(self, data): db = database('campaigns') try: camp_mcm = campaign(json_input=threaded_loads(data)) except campaign.IllegalAttributeName as ex: return {"results":False} #id = RequestPrepId().generate_prepid(self.json['pwg'], self.json['member_of_campaign']) #self.json['prepid'] = loads(id)['prepid'] if not camp_mcm.get_attribute('prepid'): self.logger.error('Invalid prepid: Prepid returned None') return {"results":False} if '_' in camp_mcm.get_attribute('prepid'): self.logger.error('Invalid campaign name %s'%(camp_mcm.get_attribute('prepid'))) return {"results":False} camp_mcm.set_attribute('_id', camp_mcm.get_attribute('prepid')) camp_mcm.update_history({'action':'created'}) ## this is to create, not to update if db.document_exists( camp_mcm.get_attribute('prepid') ): return {"results":False} # save to db if not db.save(camp_mcm.json()): self.logger.error('Could not save object to database') return {"results":False} # create dedicated chained campaign self.create_chained_campaign(camp_mcm.get_attribute('_id'), db) return {"results":True}
def import_request(self, data): adb = database(self.db_name) try: mcm_a = action(json_input=threaded_loads(data)) except request.IllegalAttributeName as ex: return dumps({"results": False}) self.logger.log('Building new action %s by hand...' % (mcm_a.get_attribute('_id'))) priority_set = mcm_a.inspect_priority() saved = adb.update(mcm_a.json()) if saved: if priority_set: return { "results": True, "prepid": mcm_a.get_attribute('prepid') } else: return { "results": False, "prepid": mcm_a.get_attribute('prepid'), "message": "Priorities not set properly" } else: return {"results": False, "prepid": mcm_a.get_attribute('prepid')}
def PUT(self): """ Annouce a given batch id, with the provided notes in json content """ return dumps( self.announce(threaded_loads( cherrypy.request.body.read().strip())))
def create_campaign(self, data): db = database('campaigns') try: camp_mcm = campaign(json_input=threaded_loads(data)) except campaign.IllegalAttributeName as ex: return {"results": False} #id = RequestPrepId().generate_prepid(self.json['pwg'], self.json['member_of_campaign']) #self.json['prepid'] = loads(id)['prepid'] if not camp_mcm.get_attribute('prepid'): self.logger.error('Invalid prepid: Prepid returned None') return {"results": False} if '_' in camp_mcm.get_attribute('prepid'): self.logger.error('Invalid campaign name %s' % (camp_mcm.get_attribute('prepid'))) return {"results": False} camp_mcm.set_attribute('_id', camp_mcm.get_attribute('prepid')) camp_mcm.update_history({'action': 'created'}) ## this is to create, not to update if db.document_exists(camp_mcm.get_attribute('prepid')): return {"results": False} # save to db if not db.save(camp_mcm.json()): self.logger.error('Could not save object to database') return {"results": False} # create dedicated chained campaign self.create_chained_campaign(camp_mcm.get_attribute('_id'), db) return {"results": True}
def notify(self, body): db = database('users') data = threaded_loads(body) list_of_mails = [x["value"] for x in db.raw_query('pwg-mail', {'key': data["pwg"]})] com = communicator() com.sendMail(list_of_mails, data["subject"], data["content"], user_pack().get_email()) return {'results': True, 'message': 'Sent message to {0}'.format(list_of_mails)}
def update_campaign(self, data): if not '_rev' in data: return { "results": False, 'message': 'There is no previous revision provided' } try: camp_mcm = campaign(json_input=threaded_loads(data)) except campaign.IllegalAttributeName as ex: return {"results": False} if not camp_mcm.get_attribute('prepid') and not camp_mcm.get_attribute( '_id'): raise ValueError('Prepid returned was None') #cast schema evolution of sequences sequences = camp_mcm.get_attribute('sequences') for steps in sequences: for label in steps: steps[label] = sequence(steps[label]).json() camp_mcm.set_attribute('sequences', sequences) # create dedicated chained campaign self.create_chained_campaign(camp_mcm.get_attribute('_id'), camp_mcm.get_attribute('root')) camp_mcm.update_history({'action': 'update'}) return self.save_campaign(camp_mcm)
def flow2(self, data): try: vdata = threaded_loads(data) except ValueError as ex: self.logger.error('Could not start flowing to next step. Reason: %s' % (ex)) return {"results":str(ex)} db = database('chained_requests') try: creq = chained_request(json_input=db.get(vdata['prepid'])) except Exception as ex: self.logger.error('Could not initialize chained_request object. Reason: %s' % (ex)) return {"results":str(ex)} self.logger.log('Attempting to flow to next step for chained_request %s' % (creq.get_attribute('_id'))) # if the chained_request can flow, do it inputds = '' inblack = [] inwhite = [] if 'input_dataset' in vdata: inputds = vdata['input_dataset'] if 'block_black_list' in vdata: inblack = vdata['block_black_list'] if 'block_white_list' in vdata: inwhite = vdata['block_white_list'] if 'force' in vdata: check_stats = vdata['force']!='force' if 'reserve' in vdata and vdata["reserve"]: return creq.reserve() return creq.flow_trial( inputds, inblack, inwhite, check_stats)
def PUT(self): """ Create the mccm with the provided json content """ try: mccm_d = mccm(threaded_loads(cherrypy.request.body.read().strip())) except Exception as e: self.logger.error(mccm_d.json()) self.logger.error("Something went wrong with loading the mccm data:\n {0}".format(e)) return dumps({"results": False, "message": "Something went wrong with loading the mccm data:\n {0}".format(e)}) if not mccm_d.get_attribute('prepid'): self.logger.error('Non-existent prepid') return dumps({"results": False, "message": "The mccm ticket has no id!"}) db = database('mccms') if mccm_d.get_attribute('prepid') == mccm_d.get_attribute('pwg'): # need to complete the pwg mccm_d.set_attribute('prepid', self.fill_id(mccm_d.get_attribute('pwg'), db)) elif db.document_exists(mccm_d.get_attribute('prepid')): return dumps({"results": False, "message": "Mccm document {0} already exists".format(mccm_d.get_attribute('prepid'))}) mccm_d.set_attribute('_id', mccm_d.get_attribute('prepid')) mccm_d.set_attribute('meeting', mccm.get_meeting_date().strftime("%Y-%m-%d")) mccm_d.update_history({'action': 'created'}) self.logger.log('Saving mccm {0}'.format(mccm_d.get_attribute('prepid'))) return dumps({"results": db.save(mccm_d.json()), "prepid": mccm_d.get_attribute('prepid')})
def import_request(self, data): db = database(self.db_name) json_input=threaded_loads(data) if 'pwg' not in json_input or 'member_of_campaign' not in json_input: self.logger.error('Now pwg or member of campaign attribute for new chained request') return {"results":False} if 'prepid' in json_input: req = chained_request(json_input) cr_id = req.get_attribute('prepid') else: cr_id = ChainedRequestPrepId().next_id(json_input['pwg'], json_input['member_of_campaign']) if not cr_id: return {"results":False} req = chained_request(db.get(cr_id)) for key in json_input: if key not in ['prepid', '_id', '_rev', 'history']: req.set_attribute(key, json_input[key]) if not req.get_attribute('prepid'): self.logger.error('prepid returned was None') raise ValueError('Prepid returned was None') self.logger.log('Created new chained_request %s' % cr_id) # update history with the submission details req.update_history({'action': 'created'}) return self.save_request(db, req)
def commitOne(self, doc): """ put single document to couchDB, _id can be specified in to-be written document object """ db_request = self.construct_request("%s" % self.__dbname, method='POST', data=doc) retval = self.opener.open(db_request) return threaded_loads(retval.read())
def update(self, body): data = threaded_loads(body) db = database('settings') if '_rev' not in data: self.logger.error('Could not locate the CouchDB revision number in object: %s' % data) return {"results": False, 'message': 'could not locate revision number in the object'} if not db.document_exists(data['_id']): return {"results": False, 'message': 'mccm %s does not exist' % ( data['_id'])} else: if db.get(data['_id'])['_rev'] != data['_rev']: return {"results": False, 'message': 'revision clash'} new_version = setting(json_input=data) if not new_version.get_attribute('prepid') and not new_version.get_attribute('_id'): self.logger.error('Prepid returned was None') raise ValueError('Prepid returned was None') ## operate a check on whether it can be changed previous_version = setting(db.get(new_version.get_attribute('prepid'))) editable = previous_version.get_editable() for (key, right) in editable.items(): # does not need to inspect the ones that can be edited if right: continue if previous_version.get_attribute(key) != new_version.get_attribute(key): self.logger.error('Illegal change of parameter, %s: %s vs %s : %s' % ( key, previous_version.get_attribute(key), new_version.get_attribute(key), right)) return {"results": False, 'message': 'Illegal change of parameter %s' % key} self.logger.log('Updating setting %s...' % (new_version.get_attribute('prepid'))) return {"results": settings().set(new_version.get_attribute('prepid'), new_version.json())}
def create_flow(self, jsdata): cdb = database('campaigns') db = database(self.db_name) data = threaded_loads(jsdata) if '_rev' in data: return {"results": 'Cannot create a flow with _rev'} try: f = flow(json_input=data) except flow.IllegalAttributeName as ex: return {"results": False} except ValueError as ex: self.logger.error('Could not initialize flow object. Reason: %s' % ex) return {"results": False} if not f.get_attribute('prepid'): self.logger.error('prepid is not defined.') return { "results": False, 'message': 'Error: PrepId was not defined.' } f.set_attribute('_id', f.get_attribute('prepid')) #uniquing the allowed campaigns if passed duplicates by mistake if len(list(set(f.get_attribute('allowed_campaigns'))) ) != f.get_attribute('allowed_campaigns'): f.set_attribute('allowed_campaigns', list(set(f.get_attribute('allowed_campaigns')))) self.logger.log('Creating new flow %s ...' % (f.get_attribute('_id'))) nc = f.get_attribute('next_campaign') result = self.are_campaigns_correct( nc, f.get_attribute('allowed_campaigns'), cdb) if result is not True: return result ## adjust the requests parameters based on what was provided as next campaign self.set_default_request_parameters(nc, cdb, f) # update history f.update_history({'action': 'created'}) # save the flow to db if not db.save(f.json()): self.logger.error( 'Could not save newly created flow %s to database.' % (f.get_attribute('_id'))) return {"results": False} #return right away instead of trying and failing on missing next or allowed if not nc or not len(f.get_attribute('allowed_campaigns')): return {"results": True} # update all relevant campaigns with the "Next" parameter return self.update_derived_objects(flow().json(), f.json())
def FtiSearch(self, viewname, options=None, get_raw=False): """ query couchDB view with optional query parameters """ if "key" in options: options["key"] = '"'+str(options["key"])+'"' db_request = self.construct_request("%s/%s&%s" %(self.__dbname, viewname, self.to_json_query(options))) data = self.opener.open(db_request) return data.read() if get_raw else threaded_loads(data.read())
def PUT(self): """ Clear selected invalidations without announcing """ input_data = threaded_loads(request.body.read().strip()) if len(input_data) > 0: return self.clear(input_data) else: return dumps({"results":False, "message": "No elements selected"})
def UpdateSequence(self, options=None): """ get database update sequence information """ if options is None: options = {} options["_info"] = True db_request = self.construct_request("%s?%s" %(self.__dbname, self.to_json_query(options))) data = self.opener.open(db_request) return threaded_loads(data.read())["update_seq"]
def create_campaign(self, jsdata): data = threaded_loads(jsdata) db = database('chained_campaigns') if '_rev' in data: return {"results": " cannot create from a json with _rev"} try: ccamp = chained_campaign(json_input=threaded_loads(jsdata)) except chained_campaign('').IllegalAttributeName as ex: return {"results": False, "message": str(ex)} self.logger.log('Creating new chained_campaign %s...' % (ccamp.get_attribute('prepid'))) ccamp.set_attribute("_id", ccamp.get_attribute("prepid")) if not ccamp.get_attribute( "_id" ): #or self.db.document_exists(ccamp.get_attribute("_id")): self.logger.error( 'Campaign %s already exists. Cannot re-create it.' % (ccamp.get_attribute('_id'))) return { "results": False, "message": 'Error: Campaign ' + ccamp.get_attribute("_id") + ' already exists' } # update history ccamp.update_history({'action': 'created'}) saved = db.save(ccamp.json()) # update actions db self.update_actions(ccamp) # update campaigns db self.update_campaigns(ccamp) if saved: return {"results": True, "prepid": ccamp.get_attribute("prepid")} else: return {"results": False, "message": "could not save to DB"}
def PUT(self): """ Announce selected invalidations to Data OPS """ input_data = threaded_loads(request.body.read().strip()) self.logger.error("invaldations input: %s" % (input_data)) if len(input_data) > 0: return self.announce(input_data) else: return dumps({"results":False, "message": "No elements selected"})
def PUT(self): """ Update the content of a batch given the json content """ bdb = database('batches') data = threaded_loads(cherrypy.request.body.read().strip()) mcm_b = batch( data ) bdb.update( mcm_b.json() )
def PUT(self): """ Update the content of a batch given the json content """ bdb = database('batches') data = threaded_loads(cherrypy.request.body.read().strip()) mcm_b = batch(data) bdb.update(mcm_b.json())
def update_new(self, data): try: news_data = threaded_loads(data) except Exception as ex: return {"results": False, 'message': str(ex)} db = database('news') if not db.document_exists(news_data['_id']): return {"results": False, 'message': 'new %s does not exist in News DB' % data['_id']} # self.db.update(dnews_ata) #mcm_new = self.db.get(prepid=doc_id) return {"results": db.update(news_data)}
def loadView(self, viewname, options=None, get_raw=False): """ query couchDB view with optional query parameters """ if options is None: db_request = self.construct_request("%s/%s" % (self.__dbname, viewname)) else: #db_request = self.construct_request("%s/%s?%s" %(self.__dbname, viewname, urllib.urlencode(options).replace('%27','%22'))) db_request = self.construct_request("%s/%s?%s" %(self.__dbname, viewname, self.to_json_query(options))) data = self.opener.open(db_request) return data.read() if get_raw else threaded_loads(data.read())
def UpdateSequence(self, options=None): """ get database update sequence information """ if options is None: options = {} options["_info"] = True db_request = self.construct_request( "%s?%s" % (self.__dbname, self.to_json_query(options))) data = self.opener.open(db_request) return threaded_loads(data.read())["update_seq"]
def PUT(self, *args): """ Remove tag from the list. """ db = database('searchable') data = threaded_loads(cherrypy.request.body.read().strip()) tag = data["tag"] doc = db.get("tags") if tag in doc["list"]: doc["list"].remove(tag) return dumps({"results": db.save(doc)})
def FtiSearch(self, viewname, options=None, get_raw=False): """ query couchDB view with optional query parameters """ if "key" in options: options["key"] = '"' + str(options["key"]) + '"' db_request = self.construct_request( "%s/%s&%s" % (self.__dbname, viewname, self.to_json_query(options))) data = self.opener.open(db_request) return data.read() if get_raw else threaded_loads(data.read())
def PUT(self): """ This allows to send a message to data operation in the same thread of the announcement of a given batch """ data = threaded_loads(cherrypy.request.body.read().strip()) if not 'prepid' in data or not 'notes' in data: raise ValueError('no prepid nor notes in batch announcement api') bid=data['prepid'] if not self.bdb.document_exists(bid): return dumps({"results":False, "message": "%s is not a valid batch name"% bid}) return dumps(self.notify_batch(bid, data['notes'] ))
def document(self, id, rev=None): """ get single document from couchDB """ doc_id = id if rev is None: db_request = self.construct_request("%s/%s" % (self.__dbname, doc_id)) else: db_request = self.construct_request("%s/%s?rev=%s" %(self.__dbname, doc_id, rev)) data = self.opener.open(db_request) return threaded_loads(data.read())
def PUT(self): """ Update a multiple number of actions at the same time from the provided json content """ self.logger.log('Updating multiple actions') data = threaded_loads(cherrypy.request.body.read().strip()) results = [] for single_action in data: results.append(self.import_action(single_action)) return dumps({"results": results})
def PUT(self): """ Save the content of a batch given the json content """ bdb = database('batches') data = threaded_loads(cherrypy.request.body.read().strip()) data.pop('_rev') mcm_b = batch( data ) bdb.save( mcm_b.json() )
def PUT(self): """ Update a multiple number of actions at the same time from the provided json content """ self.logger.log('Updating multiple actions') data = threaded_loads(cherrypy.request.body.read().strip()) results=[] for single_action in data: results.append(self.import_action(single_action)) return dumps({"results": results})
def PUT(self): """ Save the content of a batch given the json content """ bdb = database('batches') data = threaded_loads(cherrypy.request.body.read().strip()) data.pop('_rev') mcm_b = batch(data) bdb.save(mcm_b.json())
def PUT(self): """ Save the information about a given user """ db = database(self.db_name) data = threaded_loads(cherrypy.request.body.read().strip()) new_user = user(data) if '_rev' in data: new_user.update_history({'action': 'updated'}) return dumps({"results": db.update(new_user.json())}) else: new_user.update_history({'action': 'created'}) return dumps({"results": db.save(new_user.json())})
def notify(self, body): db = database('users') data = threaded_loads(body) list_of_mails = [ x["value"] for x in db.raw_query('pwg-mail', {'key': data["pwg"]}) ] com = communicator() com.sendMail(list_of_mails, data["subject"], data["content"], user_pack().get_email()) return { 'results': True, 'message': 'Sent message to {0}'.format(list_of_mails) }
def document(self, id, rev=None): """ get single document from couchDB """ doc_id = id if rev is None: db_request = self.construct_request("%s/%s" % (self.__dbname, doc_id)) else: db_request = self.construct_request("%s/%s?rev=%s" % (self.__dbname, doc_id, rev)) data = self.opener.open(db_request) return threaded_loads(data.read())
def POST(self): try: search_dicts = threaded_loads(cherrypy.request.body.read().strip()) except TypeError: return dumps({"results": False, "message": "Couldn't read body of request"}) limit = 20 page = 0 if 'limit' in search_dicts: limit = int(search_dicts['limit']) if 'page' in search_dicts: page = int(search_dicts['page']) if page == -1: limit = 1000000000 skip = 0 else: skip = limit * page previous = [] for search in search_dicts['searches'][:-1]: prev_len = len(previous) prev_len = prev_len if prev_len else 1 new_previous = [] flatten = self.type_dict[search['db_name']][search['return_field']] == list for i in range(0, prev_len, 100): self.__add_previous_to_search(search, previous[i:i+100], i) res = [x[search['return_field']] for x in self.search(search['search'], search['db_name'])] new_previous.extend([i for x in res for i in x] if flatten else res) previous = list(set(new_previous)) search = search_dicts['searches'][-1] prev_len = len(previous) res = [] current_len = 0 subskip = 0 start_adding = False # pagination by hand (so the whole thing won't break because of super-long queries) # MIGHT CAUSE DUPLICATIONS OF DOCUMENTS IN RESULTS! for i in range(0, prev_len, 100): self.__add_previous_to_search(search, previous[i:i+100], i) partial_result = self.search(search['search'], search['db_name']) current_len += len(partial_result) if start_adding: subskip += len(partial_result) res.extend(partial_result) if current_len >= skip and not start_adding: subskip = current_len - skip start_adding = True res.extend(partial_result) if current_len >= skip+limit: break return dumps({"results": res[-subskip:len(res)-subskip+limit] if page != -1 else res})
def loadView(self, viewname, options=None, get_raw=False): """ query couchDB view with optional query parameters """ if options is None: db_request = self.construct_request("%s/%s" % (self.__dbname, viewname)) else: #db_request = self.construct_request("%s/%s?%s" %(self.__dbname, viewname, urllib.urlencode(options).replace('%27','%22'))) db_request = self.construct_request( "%s/%s?%s" % (self.__dbname, viewname, self.to_json_query(options))) data = self.opener.open(db_request) return data.read() if get_raw else threaded_loads(data.read())
def PUT(self): """ This allows to send a message to data operation in the same thread of the announcement of a given batch """ data = threaded_loads(cherrypy.request.body.read().strip()) if not 'prepid' in data or not 'notes' in data: raise ValueError('no prepid nor notes in batch announcement api') bid = data['prepid'] if not self.bdb.document_exists(bid): return dumps({ "results": False, "message": "%s is not a valid batch name" % bid }) return dumps(self.notify_batch(bid, data['notes']))
def commit(self, doc=None): """ commit queue to DB. if wanted to commit single doc -> it is added to queue """ if doc is not None: self.queue(doc) if len(self.__queue) == 0: return to_send = dict() to_send['docs'] = list(self.__queue) db_request = self.construct_request("%s/_bulk_docs/" %(self.__dbname), method='POST', data=doc) retval = self.opener.open(db_request) self.reset_queue() return threaded_loads(retval)
def PUT(self): """ Create the mccm with the provided json content """ try: mccm_d = mccm(threaded_loads(cherrypy.request.body.read().strip())) except Exception as e: self.logger.error(mccm_d.json()) self.logger.error( "Something went wrong with loading the mccm data:\n {0}". format(e)) return dumps({ "results": False, "message": "Something went wrong with loading the mccm data:\n {0}". format(e) }) if not mccm_d.get_attribute('prepid'): self.logger.error('Non-existent prepid') return dumps({ "results": False, "message": "The mccm ticket has no id!" }) db = database('mccms') if mccm_d.get_attribute('prepid') == mccm_d.get_attribute( 'pwg'): # need to complete the pwg mccm_d.set_attribute('prepid', self.fill_id(mccm_d.get_attribute('pwg'), db)) elif db.document_exists(mccm_d.get_attribute('prepid')): return dumps({ "results": False, "message": "Mccm document {0} already exists".format( mccm_d.get_attribute('prepid')) }) mccm_d.set_attribute('_id', mccm_d.get_attribute('prepid')) mccm_d.set_attribute('meeting', mccm.get_meeting_date().strftime("%Y-%m-%d")) mccm_d.update_history({'action': 'created'}) self.logger.log('Saving mccm {0}'.format( mccm_d.get_attribute('prepid'))) return dumps({ "results": db.save(mccm_d.json()), "prepid": mccm_d.get_attribute('prepid') })
def create_flow(self, jsdata): cdb = database('campaigns') db = database(self.db_name) data = threaded_loads(jsdata) if '_rev' in data: return {"results": 'Cannot create a flow with _rev'} try: f = flow(json_input=data) except flow.IllegalAttributeName as ex: return {"results": False} except ValueError as ex: self.logger.error('Could not initialize flow object. Reason: %s' % ex) return {"results": False} if not f.get_attribute('prepid'): self.logger.error('prepid is not defined.') return {"results": False, 'message': 'Error: PrepId was not defined.'} f.set_attribute('_id', f.get_attribute('prepid')) #uniquing the allowed campaigns if passed duplicates by mistake if len(list(set(f.get_attribute('allowed_campaigns')))) != f.get_attribute('allowed_campaigns'): f.set_attribute('allowed_campaigns', list(set(f.get_attribute('allowed_campaigns')))) self.logger.log('Creating new flow %s ...' % (f.get_attribute('_id'))) nc = f.get_attribute('next_campaign') result = self.are_campaigns_correct(nc, f.get_attribute('allowed_campaigns'), cdb) if result is not True: return result ## adjust the requests parameters based on what was provided as next campaign self.set_default_request_parameters(nc, cdb, f) # update history f.update_history({'action': 'created'}) # save the flow to db if not db.save(f.json()): self.logger.error('Could not save newly created flow %s to database.' % (f.get_attribute('_id'))) return {"results": False} #return right away instead of trying and failing on missing next or allowed if not nc or not len(f.get_attribute('allowed_campaigns')): return {"results": True} # update all relevant campaigns with the "Next" parameter return self.update_derived_objects(flow().json() ,f.json())
def commit(self, doc=None): """ commit queue to DB. if wanted to commit single doc -> it is added to queue """ if doc is not None: self.queue(doc) if len(self.__queue) == 0: return to_send = dict() to_send['docs'] = list(self.__queue) db_request = self.construct_request("%s/_bulk_docs/" % (self.__dbname), method='POST', data=doc) retval = self.opener.open(db_request) self.reset_queue() return threaded_loads(retval)
def update_request(self, data): try: req = chained_request(json_input=threaded_loads(data)) except chained_request.IllegalAttributeName as ex: return {"results":False} if not req.get_attribute('prepid') and not req.get_attribute('_id'): self.logger.error('prepid returned was None') raise ValueError('Prepid returned was None') #req.set_attribute('_id', req.get_attribute('prepid') self.logger.log('Updating chained_request %s' % (req.get_attribute('_id'))) self.logger.log('wtf %s'%(str(req.get_attribute('approval')))) # update history req.update_history({'action': 'update'}) return self.save_request(req)
def update(self, body): data = threaded_loads(body) db = database('settings') if '_rev' in data: return {"results": False, 'message': 'could save an object with revision'} if '_id' in data and db.document_exists(data['_id']): return {"results": False, 'message': 'setting %s already exists.'% ( data['_id'])} if 'prepid' in data and db.document_exists(data['prepid']): return {"results": False, 'message': 'setting %s already exists.'% ( data['prepid'])} if not 'prepid' in data and not '_id' in data: return {"results": False, 'message': 'could save an object with no name'} new_setting = setting(data) return {"results": settings().add(new_setting.get_attribute('prepid'), new_setting.json())}
def update_flow(self, jsdata): cdb = database('campaigns') db = database(self.db_name) data = threaded_loads(jsdata) if not '_rev' in data: return {"results": "Cannot update without _rev"} try: f = flow(json_input=data) except flow.IllegalAttributeName as ex: return {"results": str(ex)} if not f.get_attribute('prepid') and not f.get_attribute('_id'): self.logger.error('prepid returned was None') raise ValueError('Prepid returned was None') # find out what is the change old = db.get(f.get_attribute('_id')) #uniquing the allowed campaigns if passed duplicates by mistake if len(list(set(f.get_attribute('allowed_campaigns'))) ) != f.get_attribute('allowed_campaigns'): f.set_attribute('allowed_campaigns', list(set(f.get_attribute('allowed_campaigns')))) nc = f.get_attribute('next_campaign') result = self.are_campaigns_correct( nc, f.get_attribute('allowed_campaigns'), cdb) if result is not True: return result ## adjust the requests parameters based on what was provided as next campaign self.set_default_request_parameters(nc, cdb, f) # update history f.update_history({'action': 'update'}) # save to db if not db.update(f.json()): self.logger.error('Could not update flow {0}.'.format( f.get_attribute('_id'))) return {'results': False} return self.update_derived_objects(old, f.json())
def import_request(self, data): adb = database(self.db_name) try: mcm_a = action(json_input=threaded_loads(data)) except request.IllegalAttributeName as ex: return dumps({"results":False}) self.logger.log('Building new action %s by hand...'%(mcm_a.get_attribute('_id'))) priority_set = mcm_a.inspect_priority() saved = adb.update( mcm_a.json() ) if saved: if priority_set: return {"results":True , "prepid": mcm_a.get_attribute('prepid')} else: return {"results":False , "prepid": mcm_a.get_attribute('prepid'), "message":"Priorities not set properly"} else: return {"results":False , "prepid": mcm_a.get_attribute('prepid')}
def create_new(self, data): db = database('news') try: new_news = threaded_loads(data) except Exception as ex: return {"results":False} user_p = user_pack() new_news['author'] = user_p.get_username() #localtime = time.localtime(time.time()) #datetime = '' #for i in range(5): # datetime += str(localtime[i]).zfill(2)+'-' #datetime = datetime.rstrip('-') #datetime = '-'.join( map ('%02d'%localtime[0:5])) datetime = time.strftime('%Y-%m-%d-%H-%M') new_news['date'] = datetime new_news['announced'] = False db.save(new_news) return {"results":True}
def update_campaign(self, jsdata): db = database('chained_campaigns') data = threaded_loads(jsdata) if '_rev' not in data: return {"results": False} try: ccamp = chained_campaign(json_input=data) except chained_campaign('').IllegalAttributeName as ex: return {"results": False} if not ccamp.get_attribute("_id"): self.logger.error('prepid returned was None') return {"results": False} self.logger.log('Updating chained_campaign %s ...' % (ccamp.get_attribute('_id'))) # update history ccamp.update_history({'action': 'updated'}) return {"results": db.update(ccamp.json())}
def update_campaign(self, jsdata): db = database('chained_campaigns') data = threaded_loads( jsdata) if '_rev' not in data: return {"results":False} try: ccamp = chained_campaign(json_input=data) except chained_campaign('').IllegalAttributeName as ex: return {"results":False} if not ccamp.get_attribute("_id"): self.logger.error('prepid returned was None') return {"results":False} self.logger.log('Updating chained_campaign %s ...' % (ccamp.get_attribute('_id'))) # update history ccamp.update_history({'action':'updated'}) return {"results":db.update(ccamp.json())}
def update_flow(self, jsdata): cdb = database('campaigns') db = database(self.db_name) data = threaded_loads(jsdata) if not '_rev' in data: return {"results": "Cannot update without _rev"} try: f = flow(json_input=data) except flow.IllegalAttributeName as ex: return {"results": str(ex)} if not f.get_attribute('prepid') and not f.get_attribute('_id'): self.logger.error('prepid returned was None') raise ValueError('Prepid returned was None') # find out what is the change old = db.get(f.get_attribute('_id')) #uniquing the allowed campaigns if passed duplicates by mistake if len(list(set(f.get_attribute('allowed_campaigns')))) != f.get_attribute('allowed_campaigns'): f.set_attribute('allowed_campaigns', list(set(f.get_attribute('allowed_campaigns')))) nc = f.get_attribute('next_campaign') result = self.are_campaigns_correct(nc, f.get_attribute('allowed_campaigns'), cdb) if result is not True: return result ## adjust the requests parameters based on what was provided as next campaign self.set_default_request_parameters(nc, cdb, f) # update history f.update_history({'action': 'update'}) # save to db if not db.update(f.json()): self.logger.error('Could not update flow {0}.'.format(f.get_attribute('_id'))) return {'results': False} return self.update_derived_objects(old, f.json())
def POST(self): try: search_dicts = threaded_loads(cherrypy.request.body.read().strip()) except TypeError: return dumps({ "results": False, "message": "Couldn't read body of request" }) limit = 20 page = 0 if 'limit' in search_dicts: limit = int(search_dicts['limit']) if 'page' in search_dicts: page = int(search_dicts['page']) if page == -1: limit = 1000000000 skip = 0 else: skip = limit * page previous = [] for search in search_dicts['searches'][:-1]: prev_len = len(previous) prev_len = prev_len if prev_len else 1 new_previous = [] flatten = self.type_dict[search['db_name']][ search['return_field']] == list for i in range(0, prev_len, 100): self.__add_previous_to_search(search, previous[i:i + 100], i) res = [ x[search['return_field']] for x in self.search(search['search'], search['db_name']) ] new_previous.extend([i for x in res for i in x] if flatten else res) previous = list(set(new_previous)) search = search_dicts['searches'][-1] prev_len = len(previous) res = [] current_len = 0 subskip = 0 start_adding = False # pagination by hand (so the whole thing won't break because of super-long queries) # MIGHT CAUSE DUPLICATIONS OF DOCUMENTS IN RESULTS! for i in range(0, prev_len, 100): self.__add_previous_to_search(search, previous[i:i + 100], i) partial_result = self.search(search['search'], search['db_name']) current_len += len(partial_result) if start_adding: subskip += len(partial_result) res.extend(partial_result) if current_len >= skip and not start_adding: subskip = current_len - skip start_adding = True res.extend(partial_result) if current_len >= skip + limit: break return dumps({ "results": res[-subskip:len(res) - subskip + limit] if page != -1 else res })