def index(self, length, span, type, site, **rest ): _span=3600 if span == 'days': _span=24*3600 query_time = int(length)*_span end_time = time.time() - time.altzone start_time = end_time - query_time status_site_dict = {} import Sites if site == 'all' : for st1,st2 in Sites.SiteMap().items(): query = API.getKeyNum( 'status_scheduler', st2, query_time ) for a,b in query: status_site_dict[b] = {str(st1).split('_')[2]:a} else: site_to_query = Sites.SiteMap()[site] query = API.getKeyNum('status_scheduler', site_to_query, query_time ) for a,b in query: status_site_dict[b] = {site:a} if type == 'plot': html = self.TypePlot(span, length, site, status_site_dict) else: html = """<html><body><h2>NOT YET IMPLEMENTED """ # html += "Last %s %s</h2>\n " % ( length, span ) # html += self.TypeList(sites) return html
def Host_getVMList_Call(api, args): """ This call is only interested in returning the VM UUIDs so pass False for the first argument in order to suppress verbose results. """ API.updateTimestamp() # required for editNetwork flow vmList = args.get('vmList', []) return API.Global().getVMList(False, vmList)
def checkWorkerHeartbeat(events): for worker in API.getWorkers(): if (datetime.now() - worker.heartbeat) > WORKER_HEARTBEAT_TIMEOUT: for schedule in API.getSchedules(worker): schedule.worker = API.getNextWorker() API.destroyWorker(worker) events.enter(WORKER_HEARTBEAT_TIMEOUT.total_seconds(), 1, checkWorkerHeartbeat, (events,))
def test(self): API.updatecsv() a=sqldb.getDBbalance(MSISDN) print a makecall.Call(dur) b=sqldb.getDBbalance(MSISDN) print b c=format(a-b, '.2f') m1=mbilling.billingcalc(dur,rate) m2=format(m1, '.2f') self.assertEqual(float(c),float(m2))
def runSchedules(events): global schedules if len(schedules) > 0: schedule = schedules[-1] secondsToNextRun = (schedule.timeToRun - datetime.now()).total_seconds() if secondsToNextRun <= 0: subprocess.call(schedule.job.command, shell=True) API.removeSchedule(schedule) schedules.pop() events.enter(secondsToNextRun, 1, runSchedules, (events,))
def index(self, length, span, type, site, **rest ): _span=3600 if span == 'days': _span=24*3600 query_time = int(length)*_span end_time = time.time() - time.altzone start_time = end_time - query_time import Sites to_query = site if site != 'all' : to_query = Sites.SiteMap()[site] sites = API.getSites( query_time, to_query ) total = len(sites) if (total == 0): html = "<html><body>No job for %s Site, during last %s %s </body></html>" % ( site,length,span) return html if type == 'plot': html = self.TypePlot(span, length, site, sites) else: html = """<html><body><h2>List of Users for """ html += "Last %s %s</h2>\n " % ( length, span ) html += self.TypeList(sites) return html
def index(self, length , span, type, **rest): _span=3600 if span == 'days': _span=24*3600 query_time = int(length)*_span end_time = time.time() - time.altzone start_time = end_time - query_time dataset = API.getKeyNum_task('dataset',from_time=query_time) if not len(dataset): html = """<html><body><h2> No dataset accessed </h2>\n """ html += """</body></html>""" return html data={} for num,dat in dataset: data[dat]= num if type == 'plot': html = self.DatasetGraph(span, length, data) else: html = self.DatasetList(data,query_time) return html
def sync_predictions(routes, session): Logger.log.info("Syncing predictions...") Logger.log.info("Input routes: %s" % routes) if 'Red-Ashmont' in routes or 'Red-Braintree' in routes: routes.remove('Red-Ashmont') routes.remove('Red-Braintree') routes.append('Red') Logger.log.info("Using routes: %s" % routes) to_save = [] route_string = ",".join(routes) data = API.get("predictionsbyroutes", {'routes': route_string}) mode = data['mode'] for route in mode: route_sub = route['route'] for route_sub_sub in route_sub: route_name = route_sub_sub['route_id'] Logger.log.info("Processing route %s" % route_name) for direction in route_sub_sub['direction']: for trip in direction['trip']: api_trip_id = trip['trip_id'] trip_ref = session.query(db.Trip).filter(db.Trip.api_id == api_trip_id).first() if trip_ref is None: Logger.log.info('No trip record for this prediction. trip_api_id: %s' % api_trip_id) continue for stop in trip['stop']: stop_name = stop['stop_name'].split(' -')[0] if 'JFK/UMASS' in stop_name: stop_name = stop_name.split(' ')[0] try: station_id = session.query(db.Station).filter(db.Station.route_id == trip_ref.route_id)\ .filter(db.Station.name_human_readable.like('%' + stop_name + '%')).first().id except AttributeError as e: station_id = station_with_most_similar_name(session, trip_ref.route_id, stop_name) try: seconds = stop['pre_away'] new_prediction_record = db.PredictionRecord(trip_id=trip_ref.id, stamp=datetime.datetime.utcnow(), station_id=station_id, seconds_away_from_stop=seconds) to_save.append(new_prediction_record) except KeyError as e: continue Logger.log.info('trip %s has terminated' % api_trip_id) for object in to_save: session.merge(object) session.commit() return to_save
def doLogin(self): try: if ice.haveCredentials(): ice.Logout().delete() except: # Failure to logout is not a show-stopper pass try: self.loginCmd() self.success = True self["instructions"].setText(_("Congratulations, you have successfully configured your %s %s " "for use with the IceTV Smart Recording service. " "Your IceTV guide will now download in the background.") % (getMachineBrand(), getMachineName())) self["message"].setText(_("Enjoy how IceTV can enhance your TV viewing experience by " "downloading the IceTV app to your smartphone or tablet. " "The IceTV app is available free from the iTunes App Store, " "the Google Play Store and the Windows Phone Store.\n\n" "Download it today!")) self["qrcode"].show() config.plugins.icetv.configured.value = True enableIceTV() fetcher.createFetchJob() except (IOError, RuntimeError) as ex: msg = "Login failure: " + str(ex) if hasattr(ex, "response") and hasattr(ex.response, "text"): msg += "\n%s" % str(ex.response.text).strip() fetcher.addLog(msg) self["instructions"].setText(_("There was an error while trying to login.")) self["message"].hide() self["error"].show() self["error"].setText(msg)
def test_getDatasetDownload14(self): a = API.getDatasetDownload([1, 2, 3, 4]) i = 0 while i < len(a.items): b = a.items[i] self.assertIsInstance(b, classes.DatasetDownload) ##make sure all items in list are download i += 1
def put(self, request): """ Create a new VNF template. The 'vnf_id' assigned by the datastore is contained in the response. """ if request.META['CONTENT_TYPE'] != 'application/json': return HttpResponse(status=415) if 'image-upload-status' not in request.data.keys(): try: if 'functional-capability' not in request.data.keys(): return HttpResponse("Missing functional-capability field", status=400) capability = request.data['functional-capability'] ValidateTemplate().validate(request.data) template = json.dumps(request.data) image_upload_status = VNF.REMOTE except: return HttpResponse(status=400) elif all(request.data['image-upload-status'] not in state for state in VNF.IMAGE_UPLOAD_STATUS): return HttpResponse("Wrong value of image-upload-status field", status=400) elif 'template' not in request.data.keys(): return HttpResponse("Missing template field", status=400) else: try: if 'functional-capability' not in request.data['template'].keys(): return HttpResponse("Missing functional-capability field", status=400) capability = request.data['template']['functional-capability'] ValidateTemplate().validate(request.data['template']) template = json.dumps(request.data['template']) image_upload_status = request.data['image-upload-status'] except: return HttpResponse(status=400) vnf_id = API.addVNFTemplateV2(template, capability, image_upload_status) return HttpResponse(vnf_id, status=200)
def delete(self, request, yang_id): ''' Delete a YANG model given the yang id ''' if API.deleteYANG_model(yang_id): return HttpResponse(status=200) return HttpResponse(status=404)
def delete(self, request, vnf_id): """ Delete an existing VNF template """ if API.deleteVNFTemplate(vnf_id): return HttpResponse(status=200) return HttpResponse(status=404)
def search(query): data = { 'resources': 'volume', 'field_list': ','.join(Series.fields) } search = API.search(query, data) return [Series(result) for result in search['results']]
def DatasetList(self, data,query_time): # <td align="left"><a href=\"%s?dataset+%s\">%s</a></td>\ # self.baseDDUrl,'job::%s'%dataset,jobs,\ html = "<html><body><h2>List of Dataset</h2>\n " html += '<table cellspacing="10" cellpadding=5>\n' st = ['Dataset name','Numeber of users','Number of tasks','Total Number of jobs','Efficiency'] html += '<tr>' for s in st: html += '<th align="left"> %s</th>\n'%s html += '</tr>' for dataset in data.keys(): if dataset: html += '<tr>' users = API.countUsers(dataset,query_time) tasks = API.countTasks(dataset,query_time) jobs = API.countJobs(dataset,query_time) exitcodes=API.getJobExit(dataset,query_time) if not len(exitcodes): TotEff = 'Not yet available' eff = 'eff::%s::%s'%('None',dataset) else: tot = len(exitcodes) countSucc = 0 for appl, wrapp in exitcodes: if wrapp == 0: countSucc += 1 TotEff = countSucc*1./tot eff = 'eff::%s::%s'%(query_time,dataset) user = '******'%(query_time,dataset) task = 'task::%s::%s'%(query_time,dataset) if dataset == 'None': dataset='User Private MC Production' html += '<td align="left">%s</td><td align="left"><a href=\"%s?user=%s\">%s</a></td>\ <td align="left"><a href=\"%s?task=%s\">%s</a></td>\ <td align="left">%s</td>\ <td align="left"><a href=\"%s?eff=%s\">%s</a></td>\n'\ %(str(dataset),self.baseDDUrl,user,users,\ self.baseDDUrl,task,tasks,\ jobs,\ self.baseDDUrl,eff,TotEff) html += '</tr>' html += "</table>\n" html += """</body></html>""" return html
def put(self, request, yang_id): ''' Update a YANG model ''' yang_model = request.data if API.updateYANG_model(yang_id, yang_model): return HttpResponse(status=200) return HttpResponse(status=404)
def post(self, request, yang_id): ''' Insert a new YANG model into the repository. Before saving the yang model into the DB, it is checked that the it is sintactically correct ''' yang_model = request.data res = API.addYANG_model(yang_id, yang_model) return HttpResponse(status=200)
def get(self, request, yang_id): ''' Retrieve a YANG model given the yang id ''' yang = API.getYANG_model(yang_id) if yang is None: return HttpResponse(status=404) return Response(data=yang)
def get(self, request): ''' Retrieve all tye YANG model stored into the repository ''' yang = API.getAllYANG_model() if yang is None: return HttpResponse(status=404) return Response(data=yang)
def get(self, request): """ Get the all VNF with the respectively template """ template = API.getVNFTemplate() if template is None: return HttpResponse(status=404) return Response(data=template)
def updateSchedules(events): global worker global schedules schedules = API.getSchedules(worker) runSchedules(events) events.enter(SCHEDULES_UPDATE_INTERVAL.total_seconds(), 1, updateSchedules, (events,))
def get(self, request, vnf_id): """ Get the VNF template of a VNF """ template = API.getVNFTemplate(vnf_id) if template is None: return HttpResponse(status=404) return Response(data=template)
def sync_trips_and_records(routes, session): Logger.log.info('Syncing trips...') Logger.log.info('Input routes: %s' % routes) to_save = [] for route in routes: vehicles = API.getV3('vehicles', 'route', route)['data'] for vehicle in vehicles: vehicle_route = vehicle['relationships']['route']['data'] vehicle_trip = vehicle['relationships']['trip']['data'] vehicle_trip_id = vehicle_trip['id'] vehicle_lat = vehicle['attributes']['latitude'] vehicle_lon = vehicle['attributes']['longitude'] trips_with_same_id = session.query(db.Trip).filter(db.Trip.api_id == vehicle_trip_id).filter( db.Trip.date == datetime.datetime.utcnow().date()) if trips_with_same_id.count() == 1: new_trip_record = db.TripRecord(trip_id=trips_with_same_id.first().id, location_lat=vehicle_lat, location_lng=vehicle_lon, stamp=datetime.datetime.utcnow()) to_save.append(new_trip_record) # Update the trip's last seen time session.query(db.Trip).filter(db.Trip.id == trips_with_same_id.first().id) \ .update({'stamp_last_seen': datetime.datetime.utcnow()}) elif trips_with_same_id.count() == 0: route_id = session.query(db.Route).filter(db.Route.name == route).first().id new_trip = db.Trip(api_id=vehicle_trip_id, route_id=route_id, direction_id=vehicle['attributes']['direction_id'], lead=vehicle['attributes']['label'], date=datetime.datetime.utcnow(), stamp_first_seen=datetime.datetime.utcnow(), stamp_last_seen=datetime.datetime.utcnow()) session.add(new_trip) session.commit() new_trip_record = db.TripRecord(trip_id=new_trip.id, location_lat=vehicle_lat, location_lng=vehicle_lon, stamp=datetime.datetime.utcnow()) to_save.append(new_trip_record) for object in to_save: session.merge(object) session.commit()
def get(self, request, yang_id): ''' Retrieve the YIN associated to a YANG model. The YIN is generated run time for each request by pyang ''' yin = API.getYINFromYangID(yang_id) if yin is None: return HttpResponse(status=404) return Response(data=yin)
def index( self, length, span, **rest ): _span=3600 if span == 'days': _span=24*3600 query_time = int(length)*_span end_time = time.time() - time.altzone start_time = end_time - query_time querydata = API.getTimeStatusJob( query_time ) errHtml = "<html><body><h2>No Graph Tools installed!!!</h2>\n " errHtml += "</body></html>" try: from graphtool.graphs.common_graphs import CumulativeGraph except ImportError: return errHtml interval = range(int(start_time),int(end_time),int(_span)) temp_dictOfList = {} cnt=0 list_status=[] for i in interval : temp_list = [] for a,b in querydata : list_status.append(a) t = time.mktime(b.timetuple()) - time.altzone if t > i and t < i+_span : temp_list.append(a) temp_dictOfList[cnt]=temp_list cnt +=1 binning={} num_stat = 0 for ii in set(list_status): c=0 dict_for_binning={} for i in temp_dictOfList.values(): cc=0 for stat in i: if stat == ii: cc+=1 dict_for_binning[interval[c]]=cc c += 1 binning[ii] = dict_for_binning num_stat += 1 pngfile = os.path.join(self.workingDir, "%s-JobCumulative.png" % (length) ) pngfileUrl = "%s?filepath=%s" % (self.imageServer, pngfile) data = binning metadata = {'title':' Cumulative of jobs per Status ', 'starttime':start_time, 'endtime':end_time, 'span':_span, 'is_cumulative':False } cum = CumulativeGraph() coords = cum.run( data, pngfile, metadata ) html = "<html><body><img src=\"../%s\"></body></html>" % pngfileUrl return html
def delete(self, request, nf_fgraph_id): """ Delete an existig Network Functions Forwarding Graph """ try: if API.deleteNF_FGraphs(nf_fgraph_id): return HttpResponse(status=200) return HttpResponse(status=404) except: return HttpResponse(status=400)
def put(self, request, nf_fgraph_id = None): """ Update a Network Functions Forwarding Graph """ if request.META['CONTENT_TYPE'] != 'application/json': return HttpResponse(status=415) try: ValidateNF_FG().validate(request.data) nffg = json.dumps(request.data) if nf_fgraph_id is None: graph_id = API.addNF_FGraphs(nffg) else: graph_id = API.updateNF_FGraphs(nf_fgraph_id, nffg) except: return HttpResponse(status=400) response_uuid = dict() response_uuid["nffg-uuid"] = graph_id return HttpResponse(json.dumps(response_uuid), status=200)
def get(self, request, capability): """ Get the all VNF with the respectively capability """ try: template = API.getTemplatesFromCapability(capability) if template is None: return HttpResponse(status=404) return Response(data=template) except: return HttpResponse(status=400)
def get(self, request): """ Get the all NFFGs digest """ try: digest = API.getnffg_digest() if digest is None: return HttpResponse(status=404) return Response(data = digest) except: return HttpResponse(status=400)
def get(self, request): """ Get the all Network Functions Forwarding Graph """ try: nffgs = API.getNF_FGraphs() if nffgs is None: return HttpResponse(status=404) return Response(data=nffgs) except: return HttpResponse(status=400)
def setLogLevel(self, level): api = API.Global() return api.setLogLevel(level)
def fenceNode(self, addr, port, agent, username, password, action, secure=False, options='', policy=None): api = API.Global() return api.fenceNode(addr, port, agent, username, password, action, secure, options, policy)
def vmSetIoTune(self, vmId, tunables): vm = API.VM(vmId) return vm.setIoTune(tunables)
def vmMigrationCreate(self, params): vm = API.VM(params['vmId']) return vm.migrationCreate(params)
def domainFormat(self, sdUUID, autoDetach=False, options=None): domain = API.StorageDomain(sdUUID) return domain.format(autoDetach)
def domainDetachForced(self, sdUUID, spUUID, options=None): domain = API.StorageDomain(sdUUID) return domain.detach(spUUID, None, None, force=True)
def vmDesktopLogoff(self, vmId, force): vm = API.VM(vmId) return vm.desktopLogoff(force)
def domainAttach(self, sdUUID, spUUID, options=None): domain = API.StorageDomain(sdUUID) return domain.attach(spUUID)
def setHaMaintenanceMode(self, mode, enabled): api = API.Global() return api.setHaMaintenanceMode(mode, enabled)
def vmMonitorCommand(self, vmId, cmd): vm = API.VM(vmId) return vm.monitorCommand(cmd)
def vmHibernate(self, vmId, hiberVolHandle): vm = API.VM(vmId) return vm.hibernate(hiberVolHandle)
def vmDesktopSendHcCommand(self, vmId, message): vm = API.VM(vmId) return vm.desktopSendHcCommand(message)
def setMOMPolicy(self, policy): api = API.Global() return api.setMOMPolicy(policy)
def domainDeactivate(self, sdUUID, spUUID, msdUUID, masterVersion, options=None): domain = API.StorageDomain(sdUUID) return domain.deactivate(spUUID, msdUUID, masterVersion)
def setMOMPolicyParameters(self, key_value_store): api = API.Global() return api.setMOMPolicyParameters(key_value_store)
def vmDesktopLogin(self, vmId, domain, user, password): vm = API.VM(vmId) return vm.desktopLogin(domain, user, password)
def domainActivate(self, sdUUID, spUUID, options=None): domain = API.StorageDomain(sdUUID) return domain.activate(spUUID)
def diskSizeExtend(self, vmId, driveSpecs, newSize): api = API.VM(vmId) return api.diskSizeExtend(driveSpecs, newSize)
def domainCreate(self, storageType, sdUUID, domainName, typeSpecificArg, domClass, domVersion=None, options=None): domain = API.StorageDomain(sdUUID) return domain.create(storageType, typeSpecificArg, domainName, domClass, domVersion)
def delNetwork(self, bridge, vlan=None, bond=None, nics=None, options=None): api = API.Global() return api.delNetwork(bridge, vlan, bond, nics, options)
def domainDetach(self, sdUUID, spUUID, msdUUID, masterVersion, options=None): domain = API.StorageDomain(sdUUID) return domain.detach(spUUID, msdUUID, masterVersion, force=False)
def editNetwork(self, oldBridge, newBridge, vlan=None, bond=None, nics=None, options=None): api = API.Global() return api.editNetwork(oldBridge, newBridge, vlan, bond, nics, options)
def domainExtend(self, sdUUID, spUUID, devlist, force=False, options=None): domain = API.StorageDomain(sdUUID) return domain.extend(spUUID, devlist, force)
def setupNetworks(self, networks, bondings, options): api = API.Global() return api.setupNetworks(networks, bondings, options)
def domainGetFileStats(self, sdUUID, pattern='*', caseSensitive=False, options=None): domain = API.StorageDomain(sdUUID) return domain.getFileStats(pattern, caseSensitive)
def ping(self): api = API.Global() return api.ping()
def vmDiskReplicateFinish(self, vmId, srcDisk, dstDisk): vm = API.VM(vmId) return vm.diskReplicateFinish(srcDisk, dstDisk)
def setSafeNetworkConfig(self): api = API.Global() return api.setSafeNetworkConfig()
def vmDesktopLock(self, vmId): vm = API.VM(vmId) return vm.desktopLock()
def diskGetAlignment(self, vmId, driveSpecs): api = API.VM(vmId) return api.getDiskAlignment(driveSpecs)