def _setReturnCode(self, code): """Set the return code :param: code :type code: integer or string returns success: [True|False] """ success = False if code in (200, "200", "ok"): web.ok() success = True elif code in (201, "201", "created"): web.created() success = True elif code in (400, "400", "badrequest"): web.badrequest() elif code in (401, "401", "unauthorized"): web.unauthorized() elif code in (404, "404", "notfound"): web.notfound() elif code in (409, "409", "conflict"): web.conflict() elif code in (500, "500", "internalerror"): web.internalerror() if success: logging.debug("[LayMan][_setReturnCode] Code: '%s'" % code) else: logging.error("[LayMan][_setReturnCode] Code: '%s'" % code) return success
def POST(self, pais=None): print("pais", pais) try: if pais is not None: raise Exception('No permitido', 404) input = web.input(code=None, nombre=None, iso=None) print("code, nombre,iso:", input['code'], input['nombre'], input['iso']) if not input['code'] or not input['nombre'] or not input['iso']: raise Exception("Faltan datos de entrada", 400) pais = int(input['code']) if pais in self.paises: raise Exception("Elemento existente", 409) self.paises[pais] = {input['nombre'], input['iso']} # print("self.paises:", self.paises) web.created() web.header('Location', '/paises/' + str(pais)) return '' except Exception as e: msg, code = e.args if len(e.args) == 2 else (e.args, 404) raise web.HTTPError(self.codes[code], data="Error: " + str(msg) + "\n")
def _POST(self, key, forceful=False): if forceful: self.datastore.set_value_in_all(str(key), web.data()) else: self.datastore.set_value(str(key), web.data()) web.created() location = "".join([web.ctx.home, _url_formatter(str(key))]) web.header("Location", location) return {"message": location}
def POST(self): lnDb = len(db) newindex = str(1 if lnDb == 0 else int(sorted(db.keys())[-1]) + 1) db[newindex] = json.loads(web.data()) body = json.dumps({'id': int(newindex), 'points': db[newindex]}) web.created(self, headers={ 'Location': 'http://0.0.0.0:8080/scans/' + str(newindex), 'Content-Type': 'application/json' }) return body
def POST(self): try: config = json.loads(web.data()) validators.validate(config, validators.AddVM) tags = config.get('tags', []) image = config['image'] m = re.match('^/images/(\d+)$', image) if not m: raise ValueError('Invalid image definition') image = int(m.groups()[0]) _ = model.getImage(web.ctx.veerezoDB, image) # check for image existance image = 'image-{0}'.format(image) ramMiB = config['ramMiB'] networkConfiguration = config['networkConfiguration'] networkCards = [] validNetworkIDs = model.getNetworkIDs(web.ctx.veerezoDB, web.ctx.username) # TODO somehow support 'global' / 'shared' networks everyone may use validNetworkIDs.append(1) # TODO XXX FIXME temporary workaround to allow VMs access to the outside world without 'global' / 'share'd networks for x in config['networkCards']: if x is None: networkCards.append(None) else: m = re.match(r'^/networks/(\d+)$', x) if not m: raise ValueError('At least one networkCard has an invalid network definition.') id = int(m.groups()[0]) if id not in validNetworkIDs: raise ValueError('At least one networkCard is attached to an unknown network.') networkCards.append(id) except (ValueError, KeyError) as e: web.badrequest() return {'error': 'ValueError: {0}'.format(e.message)} # we have to add the DB entry here so that we don't get confused by the async nature of the job queue id = model.addVM(web.ctx.veerezoDB, image, ramMiB, networkCards, networkConfiguration, web.ctx.username, tags) jobIDs = [] jobIDs.append(web.ctx.postBackendJob('createDiskImages', id)) jobIDs.append(web.ctx.postBackendJob('prepareDiskImages', id, ['root', 'swap', 'data'])) addJobIDsHeader(jobIDs) url = '/vms/{0}'.format(id) web.header('Content-Location', url) web.created() d = {} d['vm'] = url return d
def POST(self, platform, release, arch): """ Publish a new Sandbox Agent. :param os: :param agent_ver: :return: """ x = web.input(agent_file={}) if 'agent_file' not in x: # Raise the exception now. raise web.badrequest("Missing parameter agent_file.") if 'agent_file' in x: # Sanitize os and version. if not re.match("^[a-zA-Z0-9\_]+$", platform): raise web.badrequest("Platform value is unsafe and will not be accepted.") if not re.match("^[a-zA-Z0-9\_]+$", release): raise web.badrequest("Release value is unsafe and will not be accepted.") if not re.match("^[a-zA-Z0-9\_]+$", arch): raise web.badrequest("Arch value is unsafe and will not be accepted.") # Create the path dest_path = os.path.join(AGENT_CLIENT_DIR, platform, release, arch) if not os.path.isdir(dest_path): os.makedirs(dest_path) # Store the file there filename = os.path.join(dest_path, "agent.zip") fout = None try: with open(filename, 'w') as fout: # creates the file where the uploaded file should be stored fout.write(x.agent_file.file.read()) # writes the uploaded file to the newly created file. if not zipfile.is_zipfile(filename): raise web.badrequest("Corrupted or invalid zip file.") zfile = zipfile.ZipFile(file=filename) if zfile.testzip() is not None: raise web.badrequest("Invalid or corrupted zip file") # Everything OK. Return success. return web.created() except: log.exception("Error during agent client upload") # If file was too big or invalid, delete it. try: fout.close() except: pass # Remove the entire directory of agent. if os.path.isfile(filename): shutil.rmtree(dest_path) # Raise the exception now. raise web.badrequest("Error during upload of agent.")
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_rule(self, is_newrule=True): return web.badrequest(self.view.alert) kit = KaresansuiIpTables() kit.firewall_xml = kit.read_firewall_xml() rule_info = {"target" : self.input.target, "protocol" : self.input.protocol, "source" : self.input.source, "destination" : self.input.destination, "source-port" : self.input.sport, "destination-port" : self.input.dport, "in-interface" : self.input.inif, "out-interface" : self.input.outif, } if self.input.rule_id == "": rule_id = kit.add_rule(rule_info) else: rule_id = kit.insert_rule(int(self.input.rule_id),rule_info) kit.write_firewall_xml() self.view.host_id = host_id return web.created('%s/%d' % (web.ctx.path, rule_id,))
class Icon(Rest): @auth def _POST(self, *param, **params): if not validates_icon(self): self.logger.debug("Create Icon is failed, Invalid input value") return web.badrequest(add_prefix(self.view.alert, "400")) icon_filevalue = self.input.multi_icon.value icon_filename = "%s.%s" % (uniq_filename(), imghdr.what(None, icon_filevalue)) if is_path(icon_filename) is True: return web.badrequest("Not to include the path.") icon_realpath = ICON_DIR_TPL % (karesansui.dirname, icon_filename) icon_webpath = ICON_DIR_TPL % (web.ctx.homepath, icon_filename) if os.path.exists(icon_realpath): web.conflict(icon_webpath, add_prefix("icon already exists", "409")) try: create_file(icon_realpath, icon_filevalue) except IOError, ioe: self.logger.error("Failed to write icon file. - filename=%s" % icon_filename) return web.internalerror( add_prefix("Failed to create icon file.", "500")) return web.created(icon_webpath, icon_filename)
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_rule(self, is_newrule=True): return web.badrequest(self.view.alert) kit = KaresansuiIpTables() kit.firewall_xml = kit.read_firewall_xml() rule_info = { "target": self.input.target, "protocol": self.input.protocol, "source": self.input.source, "destination": self.input.destination, "source-port": self.input.sport, "destination-port": self.input.dport, "in-interface": self.input.inif, "out-interface": self.input.outif, } if self.input.rule_id == "": rule_id = kit.add_rule(rule_info) else: rule_id = kit.insert_rule(int(self.input.rule_id), rule_info) kit.write_firewall_xml() self.view.host_id = host_id return web.created('%s/%d' % ( web.ctx.path, rule_id, ))
def POST(self): '''Add entry to phonebook with firstname, surname, number, and optional address''' required_attrs = ['firstname','surname','number'] optional_attrs = ['address'] all_attrs = required_attrs + optional_attrs data = load_json(web.data()) # Checks all required attributes are present # Checks there are no unrecognized fields # Validates input # Raises 400 Bad request if not valid validate_fields(data, required_attrs, all_attrs) fn = data['firstname'] sn = data['surname'] nm = data['number'] # Check to see whether address is present, or None if not addr = 'address' in data.keys() and data['address'] or None row_id = db.insert('phonebook', seqname='id', firstname=fn, surname=sn, number=nm, address=addr) # Return 201 Created return web.created(headers={'Location':'/%d'%row_id})
def PUT(self, id): if id == 0: web.badrequest(self, message="Indexes start at 1") return updating = id in db db[id] = json.loads(web.data())['points'] body = json.dumps({'id': int(id), 'points': db[id]}) if updating: web.ok(self, headers={'Content-Type': 'application/json'}) else: web.created(self, headers={ 'Location': 'http://0.0.0.0:8080/scans/' + str(id), 'Content-Type': 'application/json' }) return body
def POST(self): x = web.input() if x.has_key("id"): if not s.exists(x['id']): item = s.get_item(x['id']) raise web.created() else: raise web.ok()
def POST(self): try: data = json.loads(web.data()) validators.validate(data, validators.AddSSHKey) except ValueError as e: web.badrequest() return {'error': 'ValueError: {0}'.format(e)} key = data['key'] id = model.addSSHKey(web.ctx.veerezoDB, key, web.ctx.username) url = '/sshkeys/{0}'.format(id) web.header('Content-Location', url) web.created() d = {} d['sshkey'] = url return d
def POST(self, db_name, _user=None): user_input = web.input(data_string=None) user_data = load_formatted_data(_user["data_format"], user_input.data_string) c = validate(user_data, self.valid_data_format) db_cnx = get_db_cnx(db_name) self.cur = db_cnx.cursor() c = self.check_data(c) self.cur.execute(self.query, c) db_cnx.commit() return web.created()
def POST(self): lab = 0 IDnew = notes[len(notes)-1]["ID"]+1 dataobject = json.loads(web.data()) titlenew = dataobject['title'] contentnew = dataobject['content'] categorynew = dataobject['category'] labelsnew = dataobject['labels'] labelsSplit = labelsnew.split(',') for i in range(len(labelsSplit)): for j in range(len(labels)): if json.dumps(labels[j]) == json.dumps(labelsSplit[i]): lab = 1 if lab == 0: labels.append(labelsSplit[i]) lab = 0 notes.append(dict(ID=IDnew, title=titlenew, category=categorynew, content=contentnew, labels=labelsSplit)) web.created() return notes[len(notes)-1]
def POST(self): try: msg = cgi.escape(web.data()) except Exception: msg = '' ip = web.ctx.ip now = datetime.now().strftime('%c') item = (ip, now, msg) connection_list.append(item) web.header('Content-Type', 'application/json') raise web.created(data=json.dumps(item))
def POST(self): try: data = json.loads(web.data()) validators.validate(data, validators.AddNetwork) except ValueError as e: web.badrequest() return {'error': 'ValueError: {0}'.format(e.message)} devices = data.get('devices', []) tags = data.get('tags', []) id = model.addNetwork(web.ctx.veerezoDB, devices, web.ctx.username, tags) jobID = web.ctx.postBackendJob('reconfigureNetworks') addJobIDsHeader([jobID]) url = '/networks/{0}'.format(id) web.header('Content-Location', url) web.created() d = {} d['network'] = url return d
def _POST(self, *param, **params): if not validates_tag(self): self.logger.debug("Failed to create tag. The value of input is invalid.") return web.badrequest(self.view.alert) tag = findby1name(self.orm, self.input.name) if tag: self.logger.debug("Failed to create tag. The same tag already exist - id='%s'" % (tag.id)) return web.conflict(web.ctx.path) new_tag = new(self.input.name) save(self.orm, new_tag) return web.created(None)
def POST(self, db_name, id, _user=None): user_input = web.input(data_string=None) user_data = {'id':id} d = load_formatted_data(_user["data_format"], user_input.data_string) user_data.update(d) d = validate(user_data, self.valid_data_format) #try: db_cnx = get_db_cnx(db_name) self.cur = db_cnx.cursor() d = self.check_data(d) self.cur.execute(self.query, d) db_cnx.commit() #except: #return web.internalerror() return web.created()
def POST(self): if not web.data(): # No data. Return 500 - call error print("No data found in the request") web.header('Content-Type', 'application/json','unique=True') raise web.InternalError("{\"message\": \"call error\"}") else: data = json.loads(web.data()) if not data: # No JSON. Return 500 - call error print("No JSON data found in the request") web.header('Content-Type', 'application/json','unique=True') raise web.InternalError("{\"message\": \"call error\"}") else: # Fetch destination from JSON destination = data["destination"] print("Destination: "+destination) if not destination: # Destination not found in request JSON. Return 500 - call error web.header('Content-Type', 'application/json','unique=True') raise web.InternalError("{\"message\": \"call error\"}") con = ESL.ESLconnection('127.0.0.1', '8021', 'ClueCon') if not con.connected(): # Unable to connect FreeSWITCH print("Unable to connect FreeSWITCH") web.header('Content-Type', 'application/json','unique=True') raise web.InternalError("{\"message\": \"call error\"}") temp = "{'origination_caller_id_number=3024561011'}user/"+destination+" &playback(http://s3.amazonaws.com/plivocloud/music.mp3)" cmd = str(temp) # Sending command to FreeSWITCH print("Initiating Call.. ") e = con.api("originate", cmd) if e: res = e.getBody() if (res.find("OK") != -1): # Call Successful. Return 201 -ok print("Call Successful") web.header('Content-Type', 'application/json','unique=True') raise web.created("{\"message\": \"ok\"}") else: # Call Failed. Return 500 - call error print("Call Failed") web.header('Content-Type', 'application/json','unique=True') raise web.InternalError("{\"message\": \"call error\"}")
def _POST(self, *param, **params): if not validates_tag(self): self.logger.debug( "Failed to create tag. The value of input is invalid.") return web.badrequest(self.view.alert) tag = findby1name(self.orm, self.input.name) if tag: self.logger.debug( "Failed to create tag. The same tag already exist - id='%s'" % (tag.id)) return web.conflict(web.ctx.path) new_tag = new(self.input.name) save(self.orm, new_tag) return web.created(None)
def POST(self, uuid=None): if uuid: raise web.badrequest() try: data_json = web.data() data = json.loads(data_json) except Exception: raise web.badrequest() # @throws web.badrequest bird = self.handler.create(data) # show reference to new entity, as per RFC 2616 headers = {'Location': '/birds/%s' % bird.get('id')} raise web.created(self.dump_json(bird), headers)
def GET(self, id, taskid): """Checks the status of a pending 'avatar-change' operation. The 'HTTP_ACCEPT' header is required to allow the controller to specify the acceptable media type for the response. There should be a logged-in user behind this request. The specified ``id`` should match the one of the logged-in user. If all these prerequisites hold true then the controller will check the status of a task with ID ``taskid``. If the task is still active the controller will return '200 OK'; clients are then supposed to come later and check again the status of the task. On the other hand a '201 Created' status message with the 'Location' header pointing to the uploaded avatar will be sent back to client if the task has exited normally. Note that a '415 Unsupported Media Type' status message is returned if the format of the uploaded avatar cannot be handled by the server. """ task = tasks.UsersAvatarChangeTask status, arg = workflows.check_avatar_change_status( web.ctx.logger, task, taskid) if status == workflows.TASK_RUNNING: raise web.ok() elif status == workflows.TASK_FAILED: if type(arg).__name__ == 'IOError': raise web.unsupportedmediatype() else: raise arg else: assert status == workflows.TASK_FINISHED ok, arg = arg if not ok: return jsonify(arg) else: web.header('Location', arg) raise web.created()
def _POST(self, *param, **params): if not validates_user(self): self.logger.debug("Failed to create account. the values of input are invalid.") return web.badrequest(self.view.alert) user = findby1email(self.orm, self.input.email) if user: self.logger.debug("Failed to create account. The same mail address '%s' already exist - user='******'" % (self.input.email, user.nickname)) return web.conflict(web.ctx.path) (password, salt) = sha1encrypt(self.input.new_password) new_user = new(self.input.email, password, salt, self.input.nickname, self.input.languages ) save(self.orm, new_user) return web.created(None)
def POST_AUTH(self, channel): try: post_data = json.loads(web.data().decode()) except JSONDecodeError: raise web.badrequest() if {'name', 'theme', 'validity'} != set(post_data.keys()): raise web.badrequest() if 'name' in post_data and len(post_data['name']) < 3: raise web.badrequest() validity_from, validity_to = post_data['validity'] if not (type(validity_from) == type(validity_to) == int) or validity_to < validity_from: raise web.badrequest() try: validity_from, validity_to = datetime.fromtimestamp( validity_from), datetime.fromtimestamp(validity_to) except (TypeError, ValueError): raise web.badrequest() try: c = EditorCapsule( name=post_data['name'], theme=post_data['theme'], validity_from=validity_from, validity_to=validity_to, channel=channel, c_order=EditorCapsule.selectBy(channel=channel).count()) except DuplicateEntryError: raise web.badrequest() EditorCapsule.rectify_c_order(channel.id) web.header('Location', '/channels/{}/api/capsules/{}'.format(channel.id, c.id)) raise web.created()
def POST_AUTH(self, capsule_id, channel): try: post_data = json.loads(web.data().decode()) except JSONDecodeError: raise web.badrequest() try: c = EditorCapsule.selectBy(id=int(capsule_id), channel=channel).getOne() except SQLObjectNotFound: raise web.notfound() if {'duration', 'template', 'content'} != set(post_data.keys()): raise web.badrequest() try: s = EditorSlide(s_order=c.slides.count(), capsule=c, **post_data) except SQLObjectIntegrityError: raise web.badrequest() EditorSlide.rectify_s_order(c.id) web.header( 'Location', '/channels/{}/api/capsules/{}/slides/{}'.format( channel.id, capsule_id, s.id)) raise web.created()
def POST(): try: package_object = json.loads(zlib.decompress(web.data())) if not pleasance.create_package(package_object['name']): return web.internalerror() if not pleasance.update_package_version( package_object['name'], package_object['version'], package_object['content-type'], base64.b64decode(package_object['contents'])): return web.internalerror() else: pleasance.update_package_metadata( package_object['name'], package_object['version'], json.loads(package_object['metadata'])) web.header( 'Location', web.ctx.home + '/packages/' + package_object['name'] + '/' + package_object['version']) return web.created() except pleasance.PackageIsPromotedError: return web.HTTPError(status='309 Conflict', headers={'Content-Type': 'text/plain'}, data='Cannot overwrite a promoted package') except (zlib.error, ValueError): return web.UnsupportedMediaType()
try: phraseid = model.add_phrase( phrase = query.password, code = base36decode(phraseCode), maxdays = int(query.maxdays), maxviews = int(query.maxviews) ) except(model.ModelError), e: web.internalerror(str(e)) return json.dumps(dict(error=str(e))) except(), e: web.internalerror(str(e)) return json.dumps(dict(error=str(e))) web.created() return json.dumps(dict( phrase = query.password, code = phraseCode )) # DELETE /password/foo HTTP/1.0 def DELETE(self, arg): # Change output to JSON web.header('Content-type', 'application/json') if not arg: web.internalerror() return json.dumps(dict(error='must have code')) try:
def PUT(self, modelId=None): """ Create Model :: POST /_models Data: Use the metric as returned by the datasource metric list. For example, create a Cloudwatch model as follows: :: curl http://localhost:8081/_models -X POST -d ' { "region": "us-east-1", "namespace": "AWS/EC2", "datasource": "cloudwatch", "metric": "CPUUtilization", "dimensions": { "InstanceId": "i-12345678" } }' Or to create a HTM-IT custom model, include the following data in the POST request (uid is the same for the metric and model): :: { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "datasource": "custom", "min": 0.0, "max": 5000.0 } The "min" and "max" options are optional for both Cloudwatch and HTM-IT custom metrics. """ if modelId: # ModelHandler is overloaded to handle both single-model requests, and # multiple-model requests. As a result, if a user makes a POST, or PUT # request, it's possible that the request can be routed to this handler # if the url pattern matches. This specific POST handler is not meant # to operate on a known model, therefore, raise an exception, and return # a `405 Method Not Allowed` response. raise NotAllowedResponse({"result": "Not supported"}) data = web.data() if data: try: if isinstance(data, basestring): request = utils.jsonDecode(data) else: request = data except ValueError as e: response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) if not isinstance(request, list): request = [request] response = [] for nativeMetric in request: try: # Attempt to validate the request data against a schema # TODO: Move this logic into datasource-specific adapters if ("type" in nativeMetric.keys() and nativeMetric["type"] == "autostack"): validate(nativeMetric, _AUTOSTACK_CREATION_SCHEMA) elif nativeMetric["datasource"] == "custom": validate(nativeMetric, _CUSTOM_MODEL_CREATION_SCHEMA) elif nativeMetric["datasource"] == "autostack": validate(nativeMetric, _AUTOSTACK_MODEL_IMPORT_SCHEMA) else: validate(nativeMetric, _CLOUDWATCH_MODEL_CREATION_SCHEMA) # Perform additional cloudwatch-specific validation that can't be # captured properly in schema. if "metricSpec" in nativeMetric: # New-style arg metricSpec = nativeMetric["metricSpec"] else: # Legacy arg metricSpec = nativeMetric if (not isinstance(metricSpec["dimensions"], dict) or not metricSpec["dimensions"] or not all(key and value for (key, value) in metricSpec["dimensions"].iteritems())): raise ValidationError("At least one dimension is required") except ValidationError as e: # Catch ValidationError if validation fails # InvalidRequestResponse produces an HTTP 400 error code response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) else: # Metric data is missing log.error("Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing") try: self.addStandardHeaders() metricRowList = self.createModels(data) metricDictList = [formatMetricRowProxy(metricRow) for metricRow in metricRowList] response = utils.jsonEncode(metricDictList) raise web.created(response) except web.HTTPError as ex: if bool(re.match("([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("PUT Failed") raise web.internalerror(str(ex) or repr(ex))
def GET(self, db_name, _user=None): """ create the database """ web.header('Content-type', "text/html; charset=utf-8") initialize_database_tables(db_name) return web.created();
def POST(self, path): path = unquote(path) tokens = [x for x in path.split("/") if x] if len(tokens) == 1: x = web.input() if x.has_key("version"): item = s.get_item(tokens[0]) if x['version'] not in item.versions: if x.has_key("old_version"): item.clone_version(x['old_version'], x['version']) raise web.created() else: item.create_new_version(x['version'], date=datetime.now().isoformat()) raise web.created() else: raise web.ok() elif len(tokens) == 2: if s.exists(tokens[0]): item = s.get_item(tokens[0]) if tokens[1] not in item.versions: item.create_new_version(tokens[1], date=datetime.now().isoformat()) raise web.created() else: x = web.input(part={}) if x.has_key("part"): item.set_version_cursor(tokens[1]) path = x['part'].filename if x.has_key("path") and x['path']: path = x['path'] new = True if path in item.files: new = False item.put_stream(path, x['part'].file) x['part'].file.close() if new: raise web.created() else: raise web.ok() else: raise web.ok() else: raise web.notfound() elif len(tokens) > 2: if s.exists(tokens[0]): item = s.get_item(tokens[0]) if tokens[1] not in item.versions: item.create_new_version(tokens[1], date=datetime.now().isoformat()) x = web.input(part={}) item.set_version_cursor(tokens[1]) #mimetype = "application/octet-stream" #if params['part'].headers.has_key('content-type'): # mimetype = params['part'].headers['content-type'] path = x['part'].filename if x.has_key("path") and x['path']: path = x['path'] new = True if path in item.files: new = False item.put_stream(path, x['part'].file) x['part'].file.close() if new: raise web.created() else: raise web.ok() else: raise web.notfound()
def _POST(self, *param, **params): if not validates_host_add(self): return web.badrequest(self.view.alert) uniq_key_check = findby1uniquekey(self.orm, self.input.m_uuid) if uniq_key_check is not None and config['application.uniqkey'] != self.input.m_uuid: return web.conflict(web.ctx.path) hostname_check = findby1hostname(self.orm, self.input.m_hostname) if hostname_check is not None: return web.conflict(web.ctx.path) # notebook note_title = None if is_param(self.input, "note_title"): note_title = self.input.note_title note_value = None if is_param(self.input, "note_value"): note_value = self.input.note_value _notebook = n_new(note_title, note_value) # tags _tags = None if is_param(self.input, "tags"): _tags = [] tag_array = comma_split(self.input.tags) tag_array = uniq_sort(tag_array) for x in tag_array: if t_count(self.orm, x) == 0: _tags.append(t_new(x)) else: _tags.append(t_name(self.orm, x)) uniq_key = self.input.m_uuid name = self.input.m_name hostname = self.input.m_hostname model = findby1uniquekey(self.orm, uniq_key, is_deleted = True) if model is None: host = m_new(created_user=self.me, modified_user=self.me, uniq_key=uni_force(uniq_key), name=name, hostname=hostname, attribute=MACHINE_ATTRIBUTE['HOST'], hypervisor=MACHINE_HYPERVISOR['REAL'], notebook=_notebook, tags=_tags, icon=None, is_deleted=False) m_save(self.orm, host) return web.created(None) else: model.name = name model.hostname = hostname model.uniq_key = uniq_key model.notebook.title = note_title model.notebook.value = note_value model.tags = _tags model.is_deleted = False m_update(self.orm, model) return web.created(None)
class Init(Rest): def _GET(self, *param, **params): self.view.database_bind = karesansui.config['database.bind'] self.view.default_locale = karesansui.config['application.default.locale'] self.view.locales = DEFAULT_LANGS.keys() if karesansui_database_exists() is True: return web.tempredirect("/", absolute=False) if self.is_mode_input(): return True else: return True return True def _POST(self, *param, **params): if not validates_user(self): return web.badrequest(self.view.alert) engine = get_engine() metadata = get_metadata() session = get_session() try: metadata.drop_all() metadata.tables['machine2jobgroup'].create() metadata.create_all() except Exception, e: traceback.format_exc() raise Exception('Initializing/Updating a database error - %s' % ''.join(e.args)) (password, salt) = sha1encrypt(self.input.password) user = User(u"%s" % self.input.email, unicode(password), unicode(salt), u"%s" % self.input.nickname, u"%s" % self.input.languages, ) session.add(user) session.commit() # Tag Table set. tag = Tag(u"default") session.add(tag) session.commit() # Machine Table set. #user = session.query(User).filter(User.email == self.input.email).first() uuid = string_from_uuid(generate_uuid()) fqdn = socket.gethostname() notebook = Notebook(u"", u"") machine = Machine(user, user, u"%s" % uuid, u"%s" % fqdn, MACHINE_ATTRIBUTE['HOST'], MACHINE_HYPERVISOR['REAL'], notebook, [tag], u"%s" % fqdn, u'icon-guest1.png', False, None, ) session.add(machine) session.commit() session.close() return web.created(None)
def PUT(self, modelId=None): """ Create Model :: POST /_models Data: Use the metric as returned by the datasource metric list. For example, create a custom model, include the following data in the POST request (uid is the same for the metric and model): :: { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "datasource": "custom", "min": 0.0, "max": 5000.0 } The "min" and "max" options are optional. """ if modelId: # ModelHandler is overloaded to handle both single-model requests, and # multiple-model requests. As a result, if a user makes a POST, or PUT # request, it's possible that the request can be routed to this handler # if the url pattern matches. This specific POST handler is not meant # to operate on a known model, therefore, raise an exception, and return # a `405 Method Not Allowed` response. raise NotAllowedResponse({"result": "Not supported"}) data = web.data() if data: try: if isinstance(data, basestring): request = utils.jsonDecode(data) else: request = data except ValueError as e: response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) if not isinstance(request, list): request = [request] response = [] for nativeMetric in request: try: validate(nativeMetric, _CUSTOM_MODEL_CREATION_SCHEMA) except ValidationError as e: # Catch ValidationError if validation fails # InvalidRequestResponse produces an HTTP 400 error code response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) else: # Metric data is missing log.error("Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing") try: self.addStandardHeaders() metricRowList = self.createModels(data) metricDictList = [formatMetricRowProxy(metricRow) for metricRow in metricRowList] response = utils.jsonEncode(metricDictList) raise web.created(response) except web.HTTPError as ex: if bool(re.match("([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("PUT Failed") raise web.internalerror(str(ex) or repr(ex))
def _PUT(self, *param, **params): """<comment-ja> Japanese Comment </comment-ja> <comment-en> TODO: English Comment </comment-en> """ (host_id, guest_id) = self.chk_guestby1(param) if guest_id is None: return web.notfound() if not validates_graphics(self): return web.badrequest(self.view.alert) model = findbyguest1(self.orm, guest_id) # virt kvc = KaresansuiVirtConnection() try: domname = kvc.uuid_to_domname(model.uniq_key) if not domname: return web.conflict(web.ctx.path) virt = kvc.search_kvg_guests(domname)[0] info = virt.get_graphics_info()["setting"] used_ports = kvc.list_used_graphics_port() origin_port = info["port"] finally: kvc.close() options = {} options["name"] = domname if self.input.change_passwd == "random": options["random-passwd"] = None elif self.input.change_passwd == "empty": options["passwd"] = "" options["port"] = self.input.port options["listen"] = self.input.listen options["keymap"] = self.input.keymap options["type"] = self.input.graphics_type if int(self.input.port) != origin_port and int( self.input.port) in used_ports: return web.badrequest( "Graphics port number has been already used by other service. - port=%s" % (self.input.port, )) _cmd = dict2command( "%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_SET_GRAPHICS), options) cmdname = "Set Graphics" _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('%s command' % cmdname, 0, _cmd)) _machine2jobgroup = m2j_new( machine=model, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration( self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.created(None)
def POST(self, autostackId, data=None): # pylint: disable=C0103,R0201 """ Create one or more Autostack Metric(s) :: POST /_autostacks/{autostackId}/metrics [ { "namespace": "AWS/EC2", "metric": "CPUUtilization" }, ... ] Request body is a list of items, each of which are a subset of the standard cloudwatch native metric, specifying only: :param namespace: AWS Namespace :type namespace: str :param metric: AWS Metric name :type str: `datasource`, `region`, and `dimensions` normally required when creating models are not necessary. """ try: self.addStandardHeaders() with web.ctx.connFactory() as conn: autostackRow = repository.getAutostack(conn, autostackId) data = data or utils.jsonDecode(web.data()) for nativeMetric in data: try: if nativeMetric["namespace"] == "Autostacks": slaveDatasource = "autostack" else: slaveDatasource = "cloudwatch" # only support cloudwatch for now modelParams = {} if "min" and "max" in nativeMetric: modelParams["min"] = nativeMetric["min"] modelParams["max"] = nativeMetric["max"] modelSpec = { "datasource": "autostack", "metricSpec": { "autostackId": autostackRow.uid, "slaveDatasource": slaveDatasource, "slaveMetric": nativeMetric }, "modelParams": modelParams } metricId = (createAutostackDatasourceAdapter(). monitorMetric(modelSpec)) with web.ctx.connFactory() as conn: metricRow = repository.getMetric(conn, metricId) metricDict = convertMetricRowToMetricDict(metricRow) except KeyError: raise web.badrequest("Missing details in request") except ValueError: response = {"result": "failure"} raise web.badrequest(utils.jsonEncode(response)) response = {"result": "success", "metric": metricDict} raise web.created(utils.jsonEncode(response)) except ObjectNotFoundError: raise web.notfound("Autostack not found: Autostack ID: %s" % autostackId) except (web.HTTPError) as ex: if bool(re.match(r"([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("POST Failed") raise web.internalerror(str(ex) or repr(ex))
def PUT(self, modelId=None): """ Create Model :: POST /_models Data: Use the metric as returned by the datasource metric list. For example, create a custom model, include the following data in the POST request (uid is the same for the metric and model): :: { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "datasource": "custom", "min": 0.0, "max": 5000.0 } The "min" and "max" options are optional. """ if modelId: # ModelHandler is overloaded to handle both single-model requests, and # multiple-model requests. As a result, if a user makes a POST, or PUT # request, it's possible that the request can be routed to this handler # if the url pattern matches. This specific POST handler is not meant # to operate on a known model, therefore, raise an exception, and return # a `405 Method Not Allowed` response. raise NotAllowedResponse({"result": "Not supported"}) data = web.data() if data: try: if isinstance(data, basestring): request = utils.jsonDecode(data) else: request = data except ValueError as e: response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) if not isinstance(request, list): request = [request] response = [] for nativeMetric in request: try: validate(nativeMetric, _CUSTOM_MODEL_CREATION_SCHEMA) except ValidationError as e: # Catch ValidationError if validation fails # InvalidRequestResponse produces an HTTP 400 error code response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) else: # Metric data is missing log.error( "Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing") try: self.addStandardHeaders() metricRowList = self.createModels(data) metricDictList = [ formatMetricRowProxy(metricRow) for metricRow in metricRowList ] response = utils.jsonEncode(metricDictList) raise web.created(response) except web.HTTPError as ex: if bool(re.match("([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("PUT Failed") raise web.internalerror(str(ex) or repr(ex))
def POST(self): """ Create new Annotation Request:: POST /_annotations { "device", "1231AC32FE", "timestamp":"2013-08-27 16:45:00", "user":"******", "server":" AWS/EC2/i-12345678", "message": "The CPU Utilization was high ...", "data": { JSON Object } } :param device: Device ID if the annotation was created by the mobile app or Service UID if the annotation was created by a service :param timestamp: The date and time to be annotated :param user: User name who created the annotation if the annotation was created by the mobile app or service name if the annotation was created by a service :param server: Instance ID associated with the annotation :param message: Annotation message (Optional if data is provided) :param data: Service specific data associated with this annotation (Optional if message is provided) Response:: HTTP Status 201 Created { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "device", "1231AC32FE", "created":"2013-08-27 16:46:51", "timestamp":"2013-08-27 16:45:00", "user":"******", "server":" AWS/EC2/i-12345678", "message": "The CPU Utilization was high ...", "data": {JSON Object } } """ self.addStandardHeaders() webdata = web.data() if webdata: try: if isinstance(webdata, basestring): webdata = utils.jsonDecode(webdata) except ValueError as e: raise web.badrequest("Invalid JSON in request: " + repr(e)) if "device" in webdata: device = webdata["device"] else: raise web.badrequest("Missing 'device' in request") if "timestamp" in webdata: timestamp = webdata["timestamp"] else: raise web.badrequest("Missing 'timestamp' in request") if "user" in webdata: user = webdata["user"] else: raise web.badrequest("Missing 'user' in request") if "server" in webdata: server = webdata["server"] else: raise web.badrequest("Missing 'server' in request") if "message" in webdata: message = webdata["message"] else: message = None if "data" in webdata: data = webdata["data"] else: data = None if data is None and message is None: raise web.badrequest( "Annotation must contain either 'message' or 'data'") # lower timestamp resolution to seconds because the database rounds up # microsecond to the nearest second created = datetime.datetime.utcnow().replace(microsecond=0) uid = utils.createGuid() try: with web.ctx.connFactory() as conn: repository.addAnnotation(conn=conn, timestamp=timestamp, device=device, user=user, server=server, message=message, data=data, created=created, uid=uid) # Prepare response with generated "uid" and "created" fields filled response = utils.jsonEncode({ "uid": uid, "created": created, "device": device, "timestamp": timestamp, "user": user, "server": server, "message": message, "data": data, }) raise web.created(response) except app_exceptions.ObjectNotFoundError as ex: raise web.badrequest(str(ex) or repr(ex))
def POST(self, autostackId, data=None): # pylint: disable=C0103,R0201 """ Create one or more Autostack Metric(s) :: POST /_autostacks/{autostackId}/metrics [ { "namespace": "AWS/EC2", "metric": "CPUUtilization" }, ... ] Request body is a list of items, each of which are a subset of the standard cloudwatch native metric, specifying only: :param namespace: AWS Namespace :type namespace: str :param metric: AWS Metric name :type str: `datasource`, `region`, and `dimensions` normally required when creating models are not necessary. """ try: self.addStandardHeaders() with web.ctx.connFactory() as conn: autostackRow = repository.getAutostack(conn, autostackId) data = data or utils.jsonDecode(web.data()) for nativeMetric in data: try: if nativeMetric["namespace"] == "Autostacks": slaveDatasource = "autostack" else: slaveDatasource = "cloudwatch" # only support cloudwatch for now modelParams = {} if "min" and "max" in nativeMetric: modelParams["min"] = nativeMetric["min"] modelParams["max"] = nativeMetric["max"] modelSpec = { "datasource": "autostack", "metricSpec": { "autostackId": autostackRow.uid, "slaveDatasource": slaveDatasource, "slaveMetric": nativeMetric }, "modelParams": modelParams } metricId = (createAutostackDatasourceAdapter() .monitorMetric(modelSpec)) with web.ctx.connFactory() as conn: metricRow = repository.getMetric(conn, metricId) metricDict = convertMetricRowToMetricDict(metricRow) except KeyError: raise web.badrequest("Missing details in request") except ValueError: response = {"result": "failure"} raise web.badrequest(utils.jsonEncode(response)) response = {"result": "success", "metric": metricDict} raise web.created(utils.jsonEncode(response)) except ObjectNotFoundError: raise web.notfound("Autostack not found: Autostack ID: %s" % autostackId) except (web.HTTPError) as ex: if bool(re.match(r"([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("POST Failed") raise web.internalerror(str(ex) or repr(ex))
def POST(self): """ Create new Annotation Request:: POST /_annotations { "device", "1231AC32FE", "timestamp":"2013-08-27 16:45:00", "user":"******", "server":" AWS/EC2/i-12345678", "message": "The CPU Utilization was high ...", "data": { JSON Object } } :param device: Device ID if the annotation was created by the mobile app or Service UID if the annotation was created by a service :param timestamp: The date and time to be annotated :param user: User name who created the annotation if the annotation was created by the mobile app or service name if the annotation was created by a service :param server: Instance ID associated with the annotation :param message: Annotation message (Optional if data is provided) :param data: Service specific data associated with this annotation (Optional if message is provided) Response:: HTTP Status 201 Created { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "device", "1231AC32FE", "created":"2013-08-27 16:46:51", "timestamp":"2013-08-27 16:45:00", "user":"******", "server":" AWS/EC2/i-12345678", "message": "The CPU Utilization was high ...", "data": {JSON Object } } """ self.addStandardHeaders() webdata = web.data() if webdata: try: if isinstance(webdata, basestring): webdata = utils.jsonDecode(webdata) except ValueError as e: raise web.badrequest("Invalid JSON in request: " + repr(e)) if "device" in webdata: device = webdata["device"] else: raise web.badrequest("Missing 'device' in request") if "timestamp" in webdata: timestamp = webdata["timestamp"] else: raise web.badrequest("Missing 'timestamp' in request") if "user" in webdata: user = webdata["user"] else: raise web.badrequest("Missing 'user' in request") if "server" in webdata: server = webdata["server"] else: raise web.badrequest("Missing 'server' in request") if "message" in webdata: message = webdata["message"] else: message = None if "data" in webdata: data = webdata["data"] else: data = None if data is None and message is None: raise web.badrequest("Annotation must contain either 'message' or 'data'") # lower timestamp resolution to seconds because the database rounds up # microsecond to the nearest second created = datetime.datetime.utcnow().replace(microsecond=0) uid = utils.createGuid() try: with web.ctx.connFactory() as conn: repository.addAnnotation( conn=conn, timestamp=timestamp, device=device, user=user, server=server, message=message, data=data, created=created, uid=uid, ) # Prepare response with generated "uid" and "created" fields filled response = utils.jsonEncode( { "uid": uid, "created": created, "device": device, "timestamp": timestamp, "user": user, "server": server, "message": message, "data": data, } ) raise web.created(response) except app_exceptions.ObjectNotFoundError as ex: raise web.badrequest(str(ex) or repr(ex))
def POST(self): i = web.input() n = db.insert('response', complaint = i.complaint, advice = i.advice, advisor = i.advisor) web.created(headers = dict(Location = "/advice/" + str(n)))
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_watch(self): self.logger.debug("Set watch failed. Did not validate.") return web.badrequest(self.view.alert) plugin = self.input.watch_target plugin_instance = None type = None type_instance = None plugin_ds = None libvirt_host = None if plugin == COLLECTD_PLUGIN_CPU: #cpu method plugin_instance = string.atoi(self.input.logical_cpu_number) - 1 type_instance = self.input.cpu_status type = COLLECTD_CPU_TYPE plugin_ds = COLLECTD_CPU_DS elif plugin == COLLECTD_PLUGIN_MEMORY: #memory method type_instance = self.input.memory_status type = COLLECTD_MEMORY_TYPE plugin_ds = COLLECTD_MEMORY_DS elif plugin == COLLECTD_PLUGIN_DF: #df method type = COLLECTD_DF_TYPE type_instance = self.input.df_target_fs type_instance = re.sub(r'^/dev/', '', type_instance) type_instance = re.sub(r'/', '_', type_instance) plugin_ds = self.input.df_disk_status elif plugin == COLLECTD_PLUGIN_INTERFACE: #interface method type = self.input.network_status type_instance = self.input.network_target_interface plugin_ds = self.input.network_direction elif plugin == COLLECTD_PLUGIN_LIBVIRT: #libvirt method libvirt_host = self.input.libvirt_target_machine if self.input.libvirt_target == "cpu": if self.input.libvirt_vcpu_target == "total": type = COLLECTD_LIBVIRT_TYPE['CPU_TOTAL'] else: type = COLLECTD_LIBVIRT_TYPE['VCPU'] type_instance = self.input.libvirt_vcpu_target plugin_ds = COLLECTD_CPU_DS elif self.input.libvirt_target == "disk": type = COLLECTD_LIBVIRT_TYPE['DISK_OCTETS'] type_instance = self.input.libvirt_disk_target plugin_ds = self.input.libvirt_disk_value_type elif self.input.libvirt_target == "network": type = "if_" + self.input.libvirt_network_status type_instance = self.input.libvirt_target_interface plugin_ds = self.input.libvirt_network_direction elif plugin == COLLECTD_PLUGIN_LOAD: #load method type = COLLECTD_LOAD_TYPE plugin_ds = self.input.load_term else: self.logger.debug("Set watch failed. Unknown plugin type.") return web.badrequest() plugin_selector = create_plugin_selector(plugin_instance, type, type_instance, plugin_ds, libvirt_host) ## text continuation_count = self.input.continuation_count prohibition_period = self.input.prohibition_period threshold_val1 = self.input.threshold_val1 threshold_val2 = self.input.threshold_val2 threshold_type = self.input.threshold_type if is_param(self.input, 'warning_script'): warning_script = self.input.warning_script else: warning_script = "" if is_param(self.input, 'warning_mail_body'): warning_mail_body = self.input.warning_mail_body else: warning_mail_body = "" if is_param(self.input, 'failure_script'): failure_script = self.input.failure_script else: failure_script = "" if is_param(self.input, 'failure_mail_body'): failure_mail_body = self.input.failure_mail_body else: failure_mail_body = "" if is_param(self.input, 'okay_script'): okay_script = self.input.okay_script else: okay_script = "" if is_param(self.input, 'okay_mail_body'): okay_mail_body = self.input.okay_mail_body else: okay_mail_body = "" if is_param(self.input, 'notify_mail_to'): notify_mail_to = self.input.notify_mail_to else: notify_mail_to = "" if is_param(self.input, 'notify_mail_from'): notify_mail_from = self.input.notify_mail_from else: notify_mail_from = "" ## bool bool_input_key = [ "use_percentage", "enable_warning_mail", "enable_failure_mail", "enable_okay_mail", "enable_warning_script", "enable_failure_script", "enable_okay_script" ] bool_values = {} for key in bool_input_key: if self.input.has_key(key): bool_values.update({key: True}) else: bool_values.update({key: False}) if threshold_type == "max": warning_value = create_threshold_value(min_value=None, max_value=threshold_val1) failure_value = create_threshold_value(min_value=None, max_value=threshold_val2) elif threshold_type == "min": warning_value = create_threshold_value(min_value=threshold_val2, max_value=None) failure_value = create_threshold_value(min_value=threshold_val1, max_value=None) else: self.logger.debug("Set watch failed. Unknown threshold type.") return web.badrequest() machine = m_findby1(self.orm, host_id) if w_is_uniq_duplication(self.orm, machine, plugin, plugin_selector) is True: self.logger.debug("Set watch failed. Duplicate watch DB.") return web.badrequest("Set watch failed. Duplication watch") _watch = w_new( created_user=self.me, modified_user=self.me, name=self.input.watch_name, plugin=plugin, plugin_selector=plugin_selector, karesansui_version=get_karesansui_version(), collectd_version=get_collectd_version(), machine=machine, continuation_count=continuation_count, prohibition_period=prohibition_period, warning_value=warning_value, is_warning_percentage=bool_values.get("use_percentage"), is_warning_script=bool_values.get("enable_warning_script"), warning_script=warning_script, is_warning_mail=bool_values.get("enable_warning_mail"), warning_mail_body=warning_mail_body, failure_value=failure_value, is_failure_percentage=bool_values.get("use_percentage"), is_failure_script=bool_values.get("enable_failure_script"), failure_script=failure_script, is_failure_mail=bool_values.get("enable_failure_mail"), failure_mail_body=failure_mail_body, is_okay_script=bool_values.get("enable_okay_script"), okay_script=okay_script, is_okay_mail=bool_values.get("enable_okay_mail"), okay_mail_body=okay_mail_body, notify_mail_to=notify_mail_to, notify_mail_from=notify_mail_from, is_deleted=False, ) w_save(self.orm, _watch) modules = ["collectdplugin"] host = m_findbyhost1(self.orm, host_id) extra_args = {'include': '^threshold_'} #extra_args = {} dop = read_conf(modules, webobj=self, machine=host, extra_args=extra_args) if dop is False: self.logger.debug("Set watch failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read Conf)') params = {} if threshold_type == "max": params['WarningMax'] = str(threshold_val1) params['FailureMax'] = str(threshold_val2) elif threshold_type == "min": params['WarningMin'] = str(threshold_val2) params['FailureMin'] = str(threshold_val1) params['Percentage'] = str(bool_values.get("use_percentage")).lower() params['Persist'] = "true" set_threshold(plugin, plugin_selector, params, dop=dop, webobj=self, host=host) extra_args = {} command = "/etc/init.d/collectd condrestart" extra_args = {"post-command": command} retval = write_conf(dop, webobj=self, machine=host, extra_args=extra_args) if retval is False: self.logger.debug("Set watch failed. Failed write conf.") return web.internalerror('Internal Server Error. (Write Conf)') return web.created(None)
def POST(self): # pylint: disable=C0103 r""" Create an Autostack :: POST /_autostacks { "name": {name}, "region": {region}, "filters": { "tag:{Name}": ["{value}", "{value}", ...], "tag:{Description}": ["{value}", "{value}", ...], "tag:{etc}": ["{value}", "{value}", ...] } } Request body must be a dictionary that includes: :param name: Unique autostack name :type name: str :param region: AWS region :type region: str :param filters: AWS Tag value pattern :type filters: dict The creation request will be rejected if the filters match more than MAX_INSTANCES_PER_AUTOSTACK. From http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Filtering.html: :: You can also use wildcards with the filter values. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one character. For example, you can use *database* as a filter value to get all EBS snapshots that include database in the description. If you were to specify database as the filter value, then only snapshots whose description equals database would be returned. Filter values are case sensitive. We support only exact string matching, or substring matching (with wildcards). Tip Your search can include the literal values of the wildcard characters; you just need to escape them with a backslash before the character. For example, a value of \*numenta\?\\ searches for the literal string *numenta?\. """ try: self.addStandardHeaders() data = web.data() if not data: raise web.badrequest("Metric data is missing") nativeMetric = utils.jsonDecode(data) try: stackSpec = { "name": nativeMetric["name"], "aggSpec": { "datasource": "cloudwatch", # only support cloudwatch for now "region": nativeMetric["region"], "resourceType": "AWS::EC2::Instance", # only support EC2 for now "filters": nativeMetric["filters"] } } adapter = createAutostackDatasourceAdapter() with web.ctx.connFactory() as conn: checkQuotaForInstanceAndRaise(conn, None) autostack = adapter.createAutostack(stackSpec) result = dict(autostack.items()) except DuplicateRecordError: # TODO [MER-3543]: Make sure this actually gets hit raise web.internalerror( "The name you are trying to use, '%s', is already in use in AWS " "region '%s'. Please enter a unique Autostack name." % (nativeMetric.get( "name", "None"), nativeMetric.get("region", "None"))) raise web.created(utils.jsonEncode(result)) except (web.HTTPError, QuotaError) as ex: if bool(re.match(r"([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("POST Failed") raise web.internalerror(str(ex) or repr(ex))
def POST(self, name): """ >>> document = u'<entry><title>Hello World!</title><author><name>Stefan Freudenberg</name></author><updated>2011-04-10T15:10:00+0200</updated><category term=\"webpy\"/><content>Lorem ipsum dolor sit</content></entry>' >>> headers = {'Slug': 'hello-world', 'Content-Type': 'application/atom+xml;type=entry'} >>> req = app_atom.request('/collection/entries', method='POST', headers=headers, data=document) >>> req.status '201 Created' >>> req.headers['Location'] 'http://0.0.0.0:8080/document/hello-world' >>> req.headers['Location'] == req.headers['Content-Location'] True >>> req.headers['Content-Type'] 'application/atom+xml;type=entry;charset=\"utf-8\"' >>> len(req.data) == int(req.headers['Content-Length']) True >>> len(req.data) > len(document) True """ if name not in db.collection_names(): raise web.notfound() collection = db[name] if web.ctx.env.get('CONTENT_TYPE') != 'application/atom+xml;type=entry': raise web.notacceptable() entry = objectify.fromstring(web.data()) if 'HTTP_SLUG' in web.ctx.env: slug = defaultfilters.slugify(web.ctx.env.get('HTTP_SLUG')) else: slug = defaultfilters.slugify(entry.title.text) try: entry.updated except AttributeError: entry.updated = A.updated(datetime.datetime.now()) entries = db.entries entries.insert({ 'slug': slug, 'title': entry.title.text, 'updated': entry.updated.text, 'author': entry.author.name.text, 'content': etree.tostring(entry.content), 'categories': [cat.get('term') for cat in entry.category] }) location = web.ctx.home + '/document/' + slug tree = xmlify(db.entries.find_one({'slug': slug}), web.ctx.home + '/document/') body = etree.tostring(tree) web.header( 'Location', location ) web.header( 'Content-Type', 'application/atom+xml;type=entry;charset="utf-8"' ) web.header( 'Content-Length', len(body) ) web.header( 'Content-Location', location ) raise web.created(body)
def _POST(self, *param, **params): host_id = self.chk_hostby1(param) if host_id is None: return web.notfound() if not validates_watch(self): self.logger.debug("Set watch failed. Did not validate.") return web.badrequest(self.view.alert) plugin = self.input.watch_target plugin_instance = None type = None type_instance = None plugin_ds = None libvirt_host = None if plugin == COLLECTD_PLUGIN_CPU: #cpu method plugin_instance = string.atoi(self.input.logical_cpu_number) - 1 type_instance = self.input.cpu_status type = COLLECTD_CPU_TYPE plugin_ds = COLLECTD_CPU_DS elif plugin == COLLECTD_PLUGIN_MEMORY: #memory method type_instance = self.input.memory_status type = COLLECTD_MEMORY_TYPE plugin_ds = COLLECTD_MEMORY_DS elif plugin == COLLECTD_PLUGIN_DF: #df method type = COLLECTD_DF_TYPE type_instance = self.input.df_target_fs type_instance = re.sub(r'^/dev/', '', type_instance) type_instance = re.sub(r'/', '_', type_instance) plugin_ds = self.input.df_disk_status elif plugin == COLLECTD_PLUGIN_INTERFACE: #interface method type = self.input.network_status type_instance = self.input.network_target_interface plugin_ds = self.input.network_direction elif plugin == COLLECTD_PLUGIN_LIBVIRT: #libvirt method libvirt_host = self.input.libvirt_target_machine if self.input.libvirt_target == "cpu": if self.input.libvirt_vcpu_target == "total": type = COLLECTD_LIBVIRT_TYPE['CPU_TOTAL'] else: type = COLLECTD_LIBVIRT_TYPE['VCPU'] type_instance = self.input.libvirt_vcpu_target plugin_ds = COLLECTD_CPU_DS elif self.input.libvirt_target == "disk": type = COLLECTD_LIBVIRT_TYPE['DISK_OCTETS'] type_instance = self.input.libvirt_disk_target plugin_ds = self.input.libvirt_disk_value_type elif self.input.libvirt_target == "network": type = "if_" + self.input.libvirt_network_status type_instance = self.input.libvirt_target_interface plugin_ds = self.input.libvirt_network_direction elif plugin == COLLECTD_PLUGIN_LOAD: #load method type = COLLECTD_LOAD_TYPE plugin_ds = self.input.load_term else: self.logger.debug("Set watch failed. Unknown plugin type.") return web.badrequest() plugin_selector = create_plugin_selector(plugin_instance, type, type_instance, plugin_ds, libvirt_host) ## text continuation_count = self.input.continuation_count prohibition_period = self.input.prohibition_period threshold_val1 = self.input.threshold_val1 threshold_val2 = self.input.threshold_val2 threshold_type = self.input.threshold_type if is_param(self.input, 'warning_script'): warning_script = self.input.warning_script else: warning_script = "" if is_param(self.input, 'warning_mail_body'): warning_mail_body = self.input.warning_mail_body else: warning_mail_body = "" if is_param(self.input, 'failure_script'): failure_script = self.input.failure_script else: failure_script = "" if is_param(self.input, 'failure_mail_body'): failure_mail_body = self.input.failure_mail_body else: failure_mail_body = "" if is_param(self.input, 'okay_script'): okay_script = self.input.okay_script else: okay_script = "" if is_param(self.input, 'okay_mail_body'): okay_mail_body = self.input.okay_mail_body else: okay_mail_body = "" if is_param(self.input, 'notify_mail_to'): notify_mail_to = self.input.notify_mail_to else: notify_mail_to = "" if is_param(self.input, 'notify_mail_from'): notify_mail_from = self.input.notify_mail_from else: notify_mail_from = "" ## bool bool_input_key = ["use_percentage", "enable_warning_mail", "enable_failure_mail", "enable_okay_mail", "enable_warning_script", "enable_failure_script", "enable_okay_script"] bool_values = {} for key in bool_input_key: if self.input.has_key(key): bool_values.update({key:True}) else: bool_values.update({key:False}) if threshold_type == "max": warning_value = create_threshold_value(min_value=None, max_value=threshold_val1) failure_value = create_threshold_value(min_value=None, max_value=threshold_val2) elif threshold_type == "min": warning_value = create_threshold_value(min_value=threshold_val2, max_value=None) failure_value = create_threshold_value(min_value=threshold_val1, max_value=None) else: self.logger.debug("Set watch failed. Unknown threshold type.") return web.badrequest() machine = m_findby1(self.orm, host_id) if w_is_uniq_duplication(self.orm, machine, plugin, plugin_selector) is True: self.logger.debug("Set watch failed. Duplicate watch DB.") return web.badrequest("Set watch failed. Duplication watch") _watch = w_new(created_user = self.me, modified_user = self.me, name = self.input.watch_name, plugin = plugin, plugin_selector = plugin_selector, karesansui_version = get_karesansui_version(), collectd_version = get_collectd_version(), machine = machine, continuation_count = continuation_count, prohibition_period = prohibition_period, warning_value = warning_value, is_warning_percentage = bool_values.get("use_percentage"), is_warning_script = bool_values.get("enable_warning_script"), warning_script = warning_script, is_warning_mail = bool_values.get("enable_warning_mail"), warning_mail_body = warning_mail_body, failure_value = failure_value, is_failure_percentage = bool_values.get("use_percentage"), is_failure_script = bool_values.get("enable_failure_script"), failure_script = failure_script, is_failure_mail = bool_values.get("enable_failure_mail"), failure_mail_body = failure_mail_body, is_okay_script = bool_values.get("enable_okay_script"), okay_script = okay_script, is_okay_mail = bool_values.get("enable_okay_mail"), okay_mail_body = okay_mail_body, notify_mail_to = notify_mail_to, notify_mail_from = notify_mail_from, is_deleted = False, ) w_save(self.orm, _watch) modules = ["collectdplugin"] host = m_findbyhost1(self.orm, host_id) extra_args = {'include':'^threshold_'} #extra_args = {} dop = read_conf(modules, webobj=self, machine=host, extra_args=extra_args) if dop is False: self.logger.debug("Set watch failed. Failed read conf.") return web.internalerror('Internal Server Error. (Read Conf)') params = {} if threshold_type == "max": params['WarningMax'] = str(threshold_val1) params['FailureMax'] = str(threshold_val2) elif threshold_type == "min": params['WarningMin'] = str(threshold_val2) params['FailureMin'] = str(threshold_val1) params['Percentage'] = str(bool_values.get("use_percentage")).lower() params['Persist'] = "true" set_threshold(plugin,plugin_selector,params,dop=dop,webobj=self, host=host) extra_args = {} command = "/etc/init.d/collectd condrestart" extra_args = {"post-command": command} retval = write_conf(dop, webobj=self, machine=host, extra_args=extra_args) if retval is False: self.logger.debug("Set watch failed. Failed write conf.") return web.internalerror('Internal Server Error. (Write Conf)') return web.created(None)
def PUT(self, modelId=None): """ Create Model :: POST /_models Data: Use the metric as returned by the datasource metric list. For example, create a Cloudwatch model as follows: :: curl http://localhost:8081/_models -X POST -d ' { "region": "us-east-1", "namespace": "AWS/EC2", "datasource": "cloudwatch", "metric": "CPUUtilization", "dimensions": { "InstanceId": "i-12345678" } }' Or to create a Grok custom model, include the following data in the POST request (uid is the same for the metric and model): :: { "uid": "2a123bb1dd4d46e7a806d62efc29cbb9", "datasource": "custom", "min": 0.0, "max": 5000.0 } The "min" and "max" options are optional for both Cloudwatch and Grok custom metrics. """ if modelId: # ModelHandler is overloaded to handle both single-model requests, and # multiple-model requests. As a result, if a user makes a POST, or PUT # request, it's possible that the request can be routed to this handler # if the url pattern matches. This specific POST handler is not meant # to operate on a known model, therefore, raise an exception, and return # a `405 Method Not Allowed` response. raise NotAllowedResponse({"result": "Not supported"}) data = web.data() if data: try: if isinstance(data, basestring): request = utils.jsonDecode(data) else: request = data except ValueError as e: response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) if not isinstance(request, list): request = [request] response = [] for nativeMetric in request: try: # Attempt to validate the request data against a schema # TODO: Move this logic into datasource-specific adapters if ("type" in nativeMetric.keys() and nativeMetric["type"] == "autostack"): validate(nativeMetric, _AUTOSTACK_CREATION_SCHEMA) elif nativeMetric["datasource"] == "custom": validate(nativeMetric, _CUSTOM_MODEL_CREATION_SCHEMA) elif nativeMetric["datasource"] == "autostack": validate(nativeMetric, _AUTOSTACK_MODEL_IMPORT_SCHEMA) else: validate(nativeMetric, _CLOUDWATCH_MODEL_CREATION_SCHEMA) # Perform additional cloudwatch-specific validation that can't be # captured properly in schema. if "metricSpec" in nativeMetric: # New-style arg metricSpec = nativeMetric["metricSpec"] else: # Legacy arg metricSpec = nativeMetric if (not isinstance(metricSpec["dimensions"], dict) or not metricSpec["dimensions"] or not all(key and value for (key, value) in metricSpec["dimensions"].iteritems())): raise ValidationError( "At least one dimension is required") except ValidationError as e: # Catch ValidationError if validation fails # InvalidRequestResponse produces an HTTP 400 error code response = "InvalidArgumentsError(): " + repr(e) raise InvalidRequestResponse({"result": response}) else: # Metric data is missing log.error( "Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing") try: self.addStandardHeaders() metricRowList = self.createModels(data) metricDictList = [ formatMetricRowProxy(metricRow) for metricRow in metricRowList ] response = utils.jsonEncode(metricDictList) raise web.created(response) except web.HTTPError as ex: if bool(re.match("([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("PUT Failed") raise web.internalerror(str(ex) or repr(ex))
def PUT(self, deviceId): """ Create, or update notification settings for device. :: PUT /_notifications/{deviceId}/settings { "email_addr": "*****@*****.**", "windowsize": 3600, "sensitivity": 0.99999 } :param email_addr: Target email address associated with device :type email_addr: string :param windowsize: Notification window in seconds during which no other notifications for a given instance should be sent to a given device :type windowsize: int :param sensitivity: Anomaly score threshold that should trigger a notification :type sensitivity: float """ data = web.data() if data: data = utils.jsonDecode(data) if isinstance(data, basestring) else data try: with web.ctx.connFactory() as conn: settingsRow = repository.getDeviceNotificationSettings(conn, deviceId) settingsDict = dict([(col.name, settingsRow[col.name]) for col in schema.notification_settings.c]) except ObjectNotFoundError: settingsDict = None if settingsDict: # Update existing changes = dict() if "windowsize" in data: changes["windowsize"] = data["windowsize"] if "sensitivity" in data: changes["sensitivity"] = data["sensitivity"] if "email_addr" in data: changes["email_addr"] = data["email_addr"] if changes: log.info("Notification settings updated for email=%s, " "deviceid=%s, %r", anonymizeEmail(settingsDict["email_addr"]), deviceId, changes.keys()) with web.ctx.connFactory() as conn: repository.updateDeviceNotificationSettings(conn, deviceId, changes) self.addStandardHeaders() for (header, value) in web.ctx.headers: if header == "Content-Type": web.ctx.headers.remove((header, value)) raise web.HTTPError(status="204 No Content") else: # Create new settings if "windowsize" in data: windowsize = data["windowsize"] else: windowsize = 60*60 # TODO: Configurable default if "sensitivity" in data: sensitivity = data["sensitivity"] else: sensitivity = 0.99999 # TODO: Configurable default if "email_addr" in data: email_addr = data["email_addr"] else: email_addr = None with web.ctx.connFactory() as conn: repository.addDeviceNotificationSettings(conn, deviceId, windowsize, sensitivity, email_addr) log.info("Notification settings created for deviceid=%s", deviceId) self.addStandardHeaders() raise web.created("") else: # Metric data is missing log.error("Data is missing in request, raising BadRequest exception") raise web.badrequest("Metric data is missing")
def POST(self): # pylint: disable=C0103 r""" Create an Autostack :: POST /_autostacks { "name": {name}, "region": {region}, "filters": { "tag:{Name}": ["{value}", "{value}", ...], "tag:{Description}": ["{value}", "{value}", ...], "tag:{etc}": ["{value}", "{value}", ...] } } Request body must be a dictionary that includes: :param name: Unique autostack name :type name: str :param region: AWS region :type region: str :param filters: AWS Tag value pattern :type filters: dict The creation request will be rejected if the filters match more than MAX_INSTANCES_PER_AUTOSTACK. From http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Filtering.html: :: You can also use wildcards with the filter values. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one character. For example, you can use *database* as a filter value to get all EBS snapshots that include database in the description. If you were to specify database as the filter value, then only snapshots whose description equals database would be returned. Filter values are case sensitive. We support only exact string matching, or substring matching (with wildcards). Tip Your search can include the literal values of the wildcard characters; you just need to escape them with a backslash before the character. For example, a value of \*numenta\?\\ searches for the literal string *numenta?\. """ try: self.addStandardHeaders() data = web.data() if not data: raise web.badrequest("Metric data is missing") nativeMetric = utils.jsonDecode(data) try: stackSpec = { "name": nativeMetric["name"], "aggSpec": { "datasource": "cloudwatch", # only support cloudwatch for now "region": nativeMetric["region"], "resourceType": "AWS::EC2::Instance", # only support EC2 for now "filters": nativeMetric["filters"] } } adapter = createAutostackDatasourceAdapter() with web.ctx.connFactory() as conn: checkQuotaForInstanceAndRaise(conn, None) autostack = adapter.createAutostack(stackSpec) result = dict(autostack.items()) except DuplicateRecordError: # TODO [MER-3543]: Make sure this actually gets hit raise web.internalerror( "The name you are trying to use, '%s', is already in use in AWS " "region '%s'. Please enter a unique Autostack name." % ( nativeMetric.get("name", "None"), nativeMetric.get("region", "None"))) raise web.created(utils.jsonEncode(result)) except (web.HTTPError, QuotaError) as ex: if bool(re.match(r"([45][0-9][0-9])\s?", web.ctx.status)): # Log 400-599 status codes as errors, ignoring 200-399 log.error(str(ex) or repr(ex)) raise except Exception as ex: log.exception("POST Failed") raise web.internalerror(str(ex) or repr(ex))
def POST(self, itemid): i = web.input() n = db.insert('response', complaint = itemid, advice = i.advice, advisor = i.advisor) web.created(headers = dict(Location = "/advice/" + str(n))) return dict(results = ["/advice/" + str(n)])
def _POST(self, *param, **params): if not validates_host_add(self): return web.badrequest(self.view.alert) if self.input.m_connect_type == "karesansui": uniq_key_check = findby1uniquekey(self.orm, self.input.m_uuid) if uniq_key_check is not None and config['application.uniqkey'] != self.input.m_uuid: return web.conflict(web.ctx.path) hostname_check = findby1hostname(self.orm, self.input.m_hostname) if hostname_check is not None: return web.conflict(web.ctx.path) # notebook note_title = None if is_param(self.input, "note_title"): note_title = self.input.note_title note_value = None if is_param(self.input, "note_value"): note_value = self.input.note_value _notebook = n_new(note_title, note_value) # tags _tags = None if is_param(self.input, "tags"): _tags = [] tag_array = comma_split(self.input.tags) tag_array = uniq_sort(tag_array) for x in tag_array: if t_count(self.orm, x) == 0: _tags.append(t_new(x)) else: _tags.append(t_name(self.orm, x)) name = self.input.m_name if self.input.m_connect_type == "karesansui": uniq_key = self.input.m_uuid attribute = MACHINE_ATTRIBUTE['HOST'] if is_param(self.input, "m_hostname"): hostname = self.input.m_hostname if self.input.m_connect_type == "libvirt": uniq_key = string_from_uuid(generate_uuid()) attribute = MACHINE_ATTRIBUTE['URI'] if is_param(self.input, "m_uri"): segs = uri_split(self.input.m_uri) if is_param(self.input, "m_auth_user") and self.input.m_auth_user: segs["user"] = self.input.m_auth_user if is_param(self.input, "m_auth_passwd") and self.input.m_auth_passwd: segs["passwd"] = self.input.m_auth_passwd hostname = uri_join(segs) model = findby1uniquekey(self.orm, uniq_key, is_deleted = True) if model is None: host = m_new(created_user=self.me, modified_user=self.me, uniq_key=uni_force(uniq_key), name=name, hostname=hostname, attribute=attribute, hypervisor=MACHINE_HYPERVISOR['REAL'], notebook=_notebook, tags=_tags, icon=None, is_deleted=False) m_save(self.orm, host) return web.created(None) else: model.name = name model.hostname = hostname model.uniq_key = uniq_key model.notebook.title = note_title model.notebook.value = note_value model.tags = _tags model.is_deleted = False m_update(self.orm, model) return web.created(None)
def POST(self): try: config = json.loads(web.data()) validators.validate(config, validators.AddVM) tags = config.get('tags', []) image = config['image'] m = re.match('^/images/(\d+)$', image) if not m: raise ValueError('Invalid image definition') image = int(m.groups()[0]) _ = model.getImage(web.ctx.veerezoDB, image) # check for image existance image = 'image-{0}'.format(image) ramMiB = config['ramMiB'] networkConfiguration = config['networkConfiguration'] networkCards = [] validNetworkIDs = model.getNetworkIDs( web.ctx.veerezoDB, web.ctx.username ) # TODO somehow support 'global' / 'shared' networks everyone may use validNetworkIDs.append( 1 ) # TODO XXX FIXME temporary workaround to allow VMs access to the outside world without 'global' / 'share'd networks for x in config['networkCards']: if x is None: networkCards.append(None) else: m = re.match(r'^/networks/(\d+)$', x) if not m: raise ValueError( 'At least one networkCard has an invalid network definition.' ) id = int(m.groups()[0]) if id not in validNetworkIDs: raise ValueError( 'At least one networkCard is attached to an unknown network.' ) networkCards.append(id) except (ValueError, KeyError) as e: web.badrequest() return {'error': 'ValueError: {0}'.format(e.message)} # we have to add the DB entry here so that we don't get confused by the async nature of the job queue id = model.addVM(web.ctx.veerezoDB, image, ramMiB, networkCards, networkConfiguration, web.ctx.username, tags) jobIDs = [] jobIDs.append(web.ctx.postBackendJob('createDiskImages', id)) jobIDs.append( web.ctx.postBackendJob('prepareDiskImages', id, ['root', 'swap', 'data'])) addJobIDsHeader(jobIDs) url = '/vms/{0}'.format(id) web.header('Content-Location', url) web.created() d = {} d['vm'] = url return d
def _PUT(self, *param, **params): """<comment-ja> Japanese Comment </comment-ja> <comment-en> TODO: English Comment </comment-en> """ (host_id, guest_id) = self.chk_guestby1(param) if guest_id is None: return web.notfound() if not validates_graphics(self): return web.badrequest(self.view.alert) model = findbyguest1(self.orm, guest_id) # virt kvc = KaresansuiVirtConnection() try: domname = kvc.uuid_to_domname(model.uniq_key) if not domname: return web.conflict(web.ctx.path) virt = kvc.search_kvg_guests(domname)[0] info = virt.get_graphics_info()["setting"] used_ports = kvc.list_used_graphics_port() origin_port = info["port"] finally: kvc.close() options = {} options["name"] = domname if self.input.change_passwd == "random": options["random-passwd"] = None elif self.input.change_passwd == "empty": options["passwd"] = "" options["port"] = self.input.port options["listen"] = self.input.listen options["keymap"] = self.input.keymap options["type"] = self.input.graphics_type if int(self.input.port) != origin_port and int(self.input.port) in used_ports: return web.badrequest("Graphics port number has been already used by other service. - port=%s" % (self.input.port,)) _cmd = dict2command("%s/%s" % (karesansui.config['application.bin.dir'], VIRT_COMMAND_SET_GRAPHICS), options) cmdname = "Set Graphics" _jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey']) _jobgroup.jobs.append(Job('%s command' % cmdname, 0, _cmd)) _machine2jobgroup = m2j_new(machine=model, jobgroup_id=-1, uniq_key=karesansui.sheconf['env.uniqkey'], created_user=self.me, modified_user=self.me, ) save_job_collaboration(self.orm, self.pysilhouette.orm, _machine2jobgroup, _jobgroup, ) return web.created(None)