def _merge_cloud(json_data, clouddef_id=None): if json_data is None or not 'name' in json_data or not 'cloudprovider_id' in json_data: return json_status(False, 400, "Missing required parameters") with database.get_session() as session: new_cloud = CloudDef( name=json_data.get('name'), cloudprovider_id=json_data.get('cloudprovider_id'), virtual_device_layer_id=json_data.get('virtual_device_layer_id', None), test_layer_id=json_data.get('test_layer_id', None), client_startup_parameters=json_data.get( 'client_startup_parameters', None)) if str(new_cloud.virtual_device_layer_id) == "0": new_cloud.virtual_device_layer_id = None if str(new_cloud.test_layer_id) == "0": new_cloud.test_layer_id = None if clouddef_id is not None: new_cloud.clouddef_id = clouddef_id new_cloud = session.merge(new_cloud) session.commit() _merge_ec2(json_data, new_cloud) return json_status(True, url=api_restful.url_for( CloudApi, clouddef_id=new_cloud.clouddef_id))
def _merge_job(json_data, job_id=None): if json_data is None or not 'name' in json_data or not 'clouddef_id' in json_data: return json_status(False, 400, "Missing required parameters") with database.get_session() as session: new_job = Job(job_id=job_id, clouddef_id=json_data.get('clouddef_id'), run_layer_id=json_data.get('run_layer_id', None), script_id=json_data.get('script_id', None), source_datadef_id=json_data.get('source_datadef_id', None), destination_datadef_id=json_data.get( 'destination_datadef_id', None), name=json_data.get('name'), task_timeout=json_data.get('task_timeout', 0)) if new_job.run_layer_id == "0": new_job.run_layer_id = None if new_job.script_id == "0": new_job.script_id = None # Merge will automatically add a new instance if the primary key does not already exist new_job = session.merge(new_job) session.commit() return json_status(True, url=api_restful.url_for(JobApi, job_id=new_job.job_id))
def _merge_script(json_data, script_id=None): if json_data is None or not 'name' in json_data or not 'content' in json_data: return json_status(False, 400, "Missing required parameters") with database.get_session() as session: new_script = Script(script_id=script_id, name=json_data['name'], content=json_data['content']) new_script = session.merge(new_script) session.commit() return json_status(True, url=api_restful.url_for( ScriptApi, script_id=new_script.script_id))
def _merge_layer(json_data, layer_id=None): if json_data is None or not 'name' in json_data or not 'content' in json_data: return json_status(False, 400, "Missing required parameters") with database.get_session() as session: new_layer = Layer( layer_id = layer_id, name = json_data.get('name'), content = json_data.get('content'), tag = json_data.get('tag', None), parent_id = json_data.get('parent_id', None) ) if new_layer.parent_id == "0": new_layer.parent_id = None # Merge will automatically add a new instance if the primary key does not already exist new_layer = session.merge(new_layer) session.commit() return json_status(True, url=api_restful.url_for(LayerApi, layer_id=new_layer.layer_id))
def _merge_dataprovider(json_data, datadef_id=None): if json_data is None or not 'name' in json_data or not 'dataprovider_id' in json_data: return json_status(False, 400, "Missing required parameters") with database.get_session() as session: new_dataprovider = DataDef( name = json_data.get('name'), dataprovider_id = json_data.get('dataprovider_id'), source = json_data.get('source', None), source_path_filter = json_data.get('source_path_filter', None), source_filename_filter = json_data.get('source_filename_filter', None), destination = json_data.get('destination', None) ) if datadef_id is not None: new_dataprovider.datadef_id = datadef_id new_dataprovider = session.merge(new_dataprovider) session.commit() _merge_s3(json_data, new_dataprovider) return json_status(True, url=api_restful.url_for(DataProviderApi, datadef_id=new_dataprovider.datadef_id))
def post(self, clouddef_id, action): action = action.lower() json_data = request.get_json() or {} cm = CloudManager(clouddef_id) if action == "createsnapshot": if 'name' in json_data: snapshot_name = json_data['name'] else: inst = _get_cloud(clouddef_id)[0] snapshot_name = inst.name cm.deploy_to_snapshot(snapshot_name, run_async=True) elif action == "startinstance": json_data = request.get_json() if json_data is None or not 'number_of_instances' in json_data: return json_status(False, 400, "Missing required parameters") number_of_instances = json_data['number_of_instances'] instance_type = json_data.get('instance_type', None) cm.launch_instances(number_of_instances, instance_type, run_async=True) elif action == "refreshinstances": cm.check_instance_database_cache() elif action == "startdeploymentinstance": # Start an instance, start the deployment but do not take a snapshot automatically json_data = request.get_json() or {} instance_type = json_data.get('instance_type', 't2.medium') inst = _get_cloud(clouddef_id)[0] snapshot_name = inst.name cm.deploy_to_snapshot(snapshot_name, shutdown_and_take_snapshot=False, instance_type=instance_type, run_async=True) return json_success()
def put(self, **kwargs): return json_status(False, 405, "Tasks are readonly through the api.")
def post(self): return json_status(False, 405, "Tasks are readonly through the api.")
def delete(self, **kwargs): return json_status(False, 405, "Instance are readonly through the api.")