def post(self): requestDict = request.get_json() if not requestDict: response = {'error': 'No input data provided'} return response, status.HTTP_400_BAD_REQUEST try: name = requestDict['name'] month = requestDict['month'] startDate = requestDict['startDate'] endDate = requestDict['endDate'] idCurrency = requestDict['idCurrency'] campaign = Campaign(name=name,month=month,startDate=startDate,endDate=endDate,idCurrency=idCurrency,active=1) campaign.add(campaign) db.session.commit() query = Campaign.query.get(campaign.id) result = query.toJson() return result, status.HTTP_201_CREATED except SQLAlchemyError as e: db.session.rollback() response = {'error': str(e)} return response, status.HTTP_400_BAD_REQUEST except Exception as e: db.session.rollback() response = {'error': str(e)} return response, status.HTTP_400_BAD_REQUEST
def mutate(root, info, company_id, product_id, campaign_data): company = CompanyModel.find_by_id(company_id) if not company: raise Exception("Company not found!") product = ProductModel.find_by_id(product_id) if not product: raise Exception("Product not found!") campaign = CampaignModel(**campaign_data, product=product, company=company) campaign.save() return NewCampaign(campaign=campaign)
def get(self): _id = request.args.get("id") try: response = hdb.get(Campaign(), _id) return {"campaign": response} except Exception as error: gunicorn_logger.error(error) return "Could not retrieve campaign", 400
def extract_campaign_data(): session = Session() campaign_name = request.args.get('campaign_name') campaign_status = request.args.get('status') candidate_list = request.args.get('candidate_list') created_candidate_list = Candidate_List(candidate_list) session.add(created_candidate_list) session.commit() campaign = Campaign(campaign_name, campaign_status, created_candidate_list) return campaign
def post(self): character_id = request.args.get("character") try: character = hdb.get(Character(), character_id) game = Campaign() payload = { "character": character, "current_hp": character["hp"], "current_xp": character["xp"], "currently_equiped": {}, "current_pack": [], } gunicorn_logger.warning(payload) valid = game.create(payload) if valid: response = hdb.insert(game) return {"campaign": game.payload} else: return "Could not create campaign: request not valid", 400 except Exception as error: gunicorn_logger.error(error) return "Could not create campaign", 400
def new(self): parser = reqparse.RequestParser() parser.add_argument('name', required=True) parser.add_argument('gameMaster', required=True) parser.add_argument('players', action='append') parser.add_argument('characters', action='append') parser.add_argument('rules', action='append') parse_result = parser.parse_args(req=self.request) campaign = Campaign.from_json(dumps(parse_result)).save() return "{}".format(campaign.id)
def create_campaign_from_json(json): campaign = Campaign(json['name'], json['max_uses_per_code'], json['expiration_date'], json['desc'], json['number_of_codes']) if 'coupons' in json and len(json['coupons']) > 0: print len(json['coupons']) print json['number_of_codes'] if len(json['coupons']) != json['number_of_codes']: return None code_list = json['coupons'] campaign.add_new_coupons(code_list) else: campaign.generate_new_coupons(json['number_of_codes']) return campaign
def post(self): requestDict = request.get_json() if not requestDict: response = {'error' : 'No input data provided'} return response, status.HTTP_400_BAD_REQUEST try: for camp in requestDict: campaign = Campaign(name=camp['name'],month=camp['month'],startDate=camp['startDate'],endDate=camp['endDate'],minimumLoan=camp['minimumLoan'],maximumLoan=camp['maximumLoan'],minimumPeriod=camp['minimumPeriod'],maximumPeriod=camp['maximumPeriod'],interestRate=camp['interestRate'],idCurrency=camp['idCurrency'],active=1) campaign.add(campaign) db.session.commit() response = {'ok' : 'Campañas añadidas correctamente'} return response, status.HTTP_201_CREATED except SQLAlchemyError as e: db.session.rollback() response = {'error': str(e)} return response, status.HTTP_400_BAD_REQUEST except Exception as e: db.session.rollback() response = {'error': 'An error ocurred. Contact cat-support asap. ' + str(e)} return response, status.HTTP_400_BAD_REQUEST
def mutate(root, info, campaign_id, customer_data): campaign = CampaignModel.find_by_id(campaign_id) if not campaign: raise Exception("Campaign not found!") company = campaign.company.fetch() if not company: raise Exception("Company not found!") customer = CustomerModel(**customer_data, source=campaign, company=company) customer.save() return AddCustomer(customer=customer)
class AsmodeusObserve(asmodeus.AsmodeusMultiprocessing): name = 'observe' def create_argparser(self): super().create_argparser() self.argparser.add_argument( '-s', '--streaks', action='store_true', help="Save observations as streaks (all frames will be recorded)") def override_config(self): super().override_config() self.config.campaign.streaks = False if self.args.streaks: self.override_warning('streaks', self.config.campaign.streaks, self.args.streaks) self.config.campaign.streaks = True def prepare_dataset(self): self.dataset.protected_reset('sightings') self.dataset.remove('analyses') def configure(self): self.campaign = Campaign(self.dataset, self.config.campaign) def run_specific(self): self.mark_time() self.campaign.load_population(processes=self.config.mp.processes, period=self.config.mp.report) self.campaign.observe(processes=self.config.mp.processes, period=self.config.mp.report) self.campaign.save() def finalize(self): log.info( "{num} observations were processed in {time} seconds ({rate} sightings per second)" .format( num=c.num(self.campaign.population.count), time=c.num("{:.6f}".format(self.run_time())), rate=c.num("{:.3f}".format(self.campaign.population.count / self.run_time())), )) log.info("Observations were saved as {target} to {dir}".format( target=c.over( 'streaks' if self.config.campaign.streaks else 'points'), dir=c.path(self.dataset.path('sightings')), )) super().finalize()
def configure(self): self.campaign = Campaign.load(self.dataset, analyses=self.config) if self.args.bias: try: log.info("Setting bias function discriminators") discriminators = { 'apparent_magnitude': MagnitudeDiscriminator.from_config(self.bias.magnitude), 'altitude': AltitudeDiscriminator.from_config(self.bias.altitude), 'angular_speed': AngularSpeedDiscriminator.from_config(self.bias.angular_speed), } log.info(f"Loaded {c.num(len(discriminators))} discriminators:") for discriminator in discriminators.values(): discriminator.log_info() self.campaign.set_discriminators(discriminators) except AttributeError as e: raise exceptions.ConfigurationError(e) from e else: log.debug("No bias file set")
def submitCampaign(Session, jobsFile): # read yaml description jobdef = None try: campdef = submissionTools.PandaJobsJSONParser.parse(jobsFile) campaign = Session.query(Campaign).filter( Campaign.name.like(campdef['campaign'])).first() if (campaign is None): #Don't let colons into campaign names campName = re.sub(':', '', campdef['campaign']) campaign = Campaign(name=campName, lastUpdate=datetime.datetime.utcnow()) Session.add(campaign) Session.commit() except Exception as e: logging.error(traceback.format_exc()) Session.rollback() sys.exit(1) aSrvID = None for j in campdef['jobs']: nodes = j['nodes'] walltime = j['walltime'] queuename = j['queuename'] try: outputFile = j['outputFile'].strip() except: outputFile = None command = j['command'] try: iterable = j['iterable'].strip() except: iterable = None #Check to see if this is a duplicate output file jobsThisOF = Session.query(Job).filter( Job.outputFile.like(outputFile)).count() if (jobsThisOF > 0): print( coloured( 'Warning:' + str(jobsThisOF) + ' job(s) already exist with output file: \n' + outputFile + '\n', 'red')) dbJob = Job(script=command, nodes=nodes, wallTime=walltime, status="To Submit", subStatus="To Submit", campaignID=campaign.id, outputFile=outputFile) dbJob.serverName = 'c:' + campaign.name + ':' if iterable: dbJob.serverName += 'i:' + iterable + ':' if outputFile: #Panda Server doesn't like slashes in its job names dbJob.serverName += 'oF:' + re.sub('/', ';', outputFile) + ':' dbJob.serverName += subprocess.check_output('uuidgen') dbJob.iterable = iterable jobSpec = submissionTools.createJobSpec(walltime=walltime, command=command, outputFile=outputFile, nodes=nodes, jobName=dbJob.serverName) s, o = Client.submitJobs([jobSpec]) try: print(o) dbJob.pandaID = o[0][0] dbJob.status = 'submitted' dbJob.subStatus = 'submitted' print( coloured(iterable.strip() + ", " + str(o[0][0]) + "\n", 'green')) except Exception as e: logging.error(traceback.format_exc()) print(coloured(iterable.strip() + " job failed to submit\n", 'red')) dbJob.status = 'failed' dbJob.subStatus = 'failed' Session.add(dbJob) Session.commit() return None
def configure(self): self.campaign = Campaign(self.dataset, self.config.campaign)
def syncCampaign(Session): try: output = Client.getAllJobs() if output[0] != 0: raise Exception("Server error") else: output = json.loads(output[1])['jobs'] except Exception as e: logging.error(traceback.format_exc()) Session.rollback() sys.exit(1) jobsToRepopulate = [] for j in output: try: #Check for pre-existing job with this pandaid #We have to evaluate these queries lazily to avoid throwing an unnecessary exception if (j['pandaid'] and j['jobname']): isExistingPandaID = Session.query(Job).filter( Job.pandaID.like(j['pandaid'])) isExistingJobName = Session.query(Job).filter( Job.serverName.like(j['jobname'])) if (isExistingPandaID.first() is None and isExistingJobName.first() is None): if (len(j['jobname']) > 37): #See if the jobname fits the format campaignName, i, oF = unpackServerName(j['jobname']) if (campaignName): campaign = Session.query(Campaign).filter( Campaign.name.like(campaignName)).first() if (campaign is None): campaign = Campaign( name=campaignName, lastUpdate=datetime.datetime.utcnow()) Session.add(campaign) Session.commit() #We can't recover the job script from the monitor output - we do that with another query below job = Job(script="unknown", campaignID=campaign.id, pandaID=j['pandaid'], serverName=j['jobname'], status=j['jobstatus'], subStatus=j['jobsubstatus']) if i: job.iterable = i #In some instances panda server can report a null substatus. Converting these to empty strings to fulfil database rules if not j['jobsubstatus']: job.subStatus = "" Session.add(job) Session.commit() #Record that this campaign/job id pair was missing, but only after it's been committed jobsToRepopulate.append((campaign.id, job.pandaID)) except Exception as e: logging.error(traceback.format_exc()) Session.rollback() #We need to query each job individually to get its job parameters campsToRepopulate = set([seq[0] for seq in jobsToRepopulate]) for c in campsToRepopulate: try: camp = Session.query(Campaign).get(c) jobs = [seq[1] for seq in jobsToRepopulate if seq[0] == c] #Recreate the jobs that were missing camp.updateJobs(Session, recreate=True, jobs_to_query=jobs) #Now update them all to make sure everything is legit camp.updateJobs(Session) #Now check to see if we have duplicate output files for OF in Session.query(Job).with_entities( Job.outputFile).group_by(Job.outputFile).all(): jobsThisOF = Session.query(Job).filter( Job.outputFile.like(OF[0])).count() if (jobsThisOF > 1): print( coloured( 'Warning:' + str(jobsThisOF) + ' job(s) have shared output file: \n' + OF[0] + '\n', 'red')) except Exception as e: logging.error(traceback.format_exc()) Session.rollback() return None
def create_campaign(self, chat_id, name): campaign = Campaign(chat_id, name) self.firebase_db.post('/campaigns', data=campaign.to_json(), params={'auth': FIREBASE_API_SECRET})
def resolve_campaign(root, info, _id): return CampaignModel.find_by_id(_id)
def submitCampaign(Session, campSpecFile, listFile): # read yaml description jobdef = None try: campdef = submissionTools.PandaJobsJSONParser.parse(campSpecFile) campaign = Session.query(Campaign).filter( Campaign.name.like(campdef['campaign'])).first() if (campaign is None): campaign = Campaign(name=campdef['campaign'], lastUpdate=datetime.datetime.utcnow()) Session.add(campaign) Session.commit() except Exception as e: logging.error(traceback.format_exc()) Session.rollback() sys.exit(1) aSrvID = None nodes = campdef['jobtemplate']['nodes'] walltime = campdef['jobtemplate']['walltime'] queuename = campdef['jobtemplate']['queuename'] try: outputFile = campdef['jobtemplate']['outputFile'] except: outputFile = None command = campdef['jobtemplate']['command'] if (listFile): iterList = [] with open(listFile, 'r') as f: for i in f: ii = re.sub("\n", "", i) iterList.append(ii) else: iterList = [''] for iterable in iterList: if (listFile): jobCommand = re.sub('<iter>', iterable, command) jobOutput = re.sub('<iter>', iterable, outputFile) else: jobCommand = command jobOutput = outputFile dbJob = Job(script=jobCommand, nodes=nodes, wallTime=walltime, status="To Submit", campaignID=campaign.id, outputFile=jobOutput) dbJob.servername = campaign.name + subprocess.check_output('uuidgen') if (listFile): dbJob.iterable = iterable Session.add(dbJob) Session.commit() jobSpec = submissionTools.createJobSpec(walltime=walltime, command=jobCommand, outputFile=jobOutput, nodes=nodes, jobName=dbJob.servername) s, o = Client.submitJobs([jobSpec]) try: dbJob.pandaID = o[0][0] dbJob.status = 'submitted' dbJob.subStatus = 'submitted' print( coloured(iterable.strip() + ", " + str(o[0][0]) + "\n", 'green')) except Exception as e: logging.error(traceback.format_exc()) print(coloured(iterable.strip() + " job failed to submit\n", 'red')) dbJob.status = 'failed' dbJob.subStatus = 'failed' Session.commit() return None
def resolve_campaign_list(root, info, company_id): return CampaignModel.find_many_by(company_id)