def _prepare_put_request(self, archive_location, application_file_name, visibility, progress_callback): query_params = {'visibility': visibility} if application_file_name is not None: query_params['application_file_name'] = \ urlquote(application_file_name) # For a Windows path (e.g. "C:\aaa\bbb.zip") scheme is the # drive letter and therefore the 2nd condition is present if urlparse(archive_location).scheme and \ not os.path.exists(archive_location): # archive location is URL query_params['blueprint_archive_url'] = archive_location data = None else: # archive location is a system path data = bytes_stream_utils.request_data_file_stream( archive_location, progress_callback=progress_callback, client=self.api) return query_params, data
def upload(self, snapshot_path, snapshot_id, progress_callback=None): """ Uploads snapshot archive to Cloudify's manager. :param snapshot_path: Path to snapshot archive. :param snapshot_id: Id of the uploaded snapshot. :param progress_callback: Progress bar callback method :return: Uploaded snapshot. Snapshot archive should be the same file that had been created and downloaded from Cloudify's manager as a result of create snapshot / download snapshot commands. """ assert snapshot_path assert snapshot_id uri = '/snapshots/{0}/archive'.format(snapshot_id) query_params = {} if urlparse(snapshot_path).scheme and \ not os.path.exists(snapshot_path): query_params['snapshot_archive_url'] = snapshot_path data = None else: data = bytes_stream_utils.request_data_file_stream( snapshot_path, progress_callback=progress_callback, client=self.api) response = self.api.put(uri, params=query_params, data=data, expected_status_code=201) return Snapshot(response)
def _upload(self, archive_location, blueprint_id, application_file_name=None, visibility=VisibilityState.TENANT, progress_callback=None): query_params = {'visibility': visibility} if application_file_name is not None: query_params['application_file_name'] = \ urllib.quote(application_file_name) uri = '/{self._uri_prefix}/{id}'.format(self=self, id=blueprint_id) # For a Windows path (e.g. "C:\aaa\bbb.zip") scheme is the # drive letter and therefore the 2nd condition is present if urlparse(archive_location).scheme and \ not os.path.exists(archive_location): # archive location is URL query_params['blueprint_archive_url'] = archive_location data = None else: # archive location is a system path data = bytes_stream_utils.request_data_file_stream( archive_location, progress_callback=progress_callback, client=self.api) return self.api.put(uri, params=query_params, data=data, expected_status_code=201)
def upload(self, plugin_path, plugin_title=None, visibility=VisibilityState.TENANT, progress_callback=None, _plugin_id=None, _uploaded_at=None, _created_by=None): """Uploads a plugin archive to the manager :param plugin_path: Path to plugin archive. :param plugin_title: Plugin title to be used e.g. in UI for presentation purposes in Topology widget. :param visibility: The visibility of the plugin, can be 'private', 'tenant' or 'global' :param progress_callback: Progress bar callback method :param _plugin_id: Internal use only :param _uploaded_at: Internal use only :param _created_by: Internal use only :return: Plugin object """ query_params = {'visibility': visibility} if _plugin_id: query_params['id'] = _plugin_id if _uploaded_at: query_params['uploaded_at'] = _uploaded_at if _created_by: query_params['created_by'] = _created_by if plugin_title: query_params['title'] = plugin_title timeout = self.api.default_timeout_sec if urlparse(plugin_path).scheme and not os.path.exists(plugin_path): query_params['plugin_archive_url'] = plugin_path data = None # if we have a timeout set, let's only use a connect timeout, # and skip the read timeout - this request can take a long # time before the server actually returns a response if timeout is not None and isinstance(timeout, (int, float)): timeout = (timeout, None) else: data = bytes_stream_utils.request_data_file_stream( plugin_path, progress_callback=progress_callback, client=self.api) response = self.api.post('/{self._uri_prefix}'.format(self=self), params=query_params, data=data, timeout=timeout, expected_status_code=201) if 'metadata' in response and 'items' in response: # This is a list of plugins - for caravan return self._wrap_list(response) else: return self._wrapper_cls(response)
def _prepare_put_request(self, archive_location, application_file_name, visibility, progress_callback, async_upload, labels=None, created_at=None, owner=None, state=None, skip_execution=False): query_params = { 'visibility': visibility, 'async_upload': async_upload, 'skip_execution': skip_execution } if application_file_name is not None: query_params['application_file_name'] = \ urlquote(application_file_name) if labels is not None: labels_params = [] for label in labels: if (not isinstance(label, dict)) or len(label) != 1: raise CloudifyClientError( 'Labels must be a list of 1-entry dictionaries: ' '[{<key1>: <value1>}, {<key2>: [<value2>, <value3>]}, ' '...]') [(key, value)] = label.items() value = value.replace('=', '\\=').replace(',', '\\,') labels_params.append('{0}={1}'.format(key, value)) query_params['labels'] = ','.join(labels_params) if created_at: query_params['created_at'] = created_at if owner: query_params['owner'] = owner if state: query_params['state'] = state # For a Windows path (e.g. "C:\aaa\bbb.zip") scheme is the # drive letter and therefore the 2nd condition is present if urlparse(archive_location).scheme and \ not os.path.exists(archive_location): # archive location is URL query_params['blueprint_archive_url'] = archive_location data = None else: # archive location is a system path data = bytes_stream_utils.request_data_file_stream( archive_location, progress_callback=progress_callback, client=self.api) return query_params, data
def validate(self, path, entity_id, blueprint_filename=None, visibility=VisibilityState.TENANT, progress_callback=None, skip_size_limit=True): """ Validates a blueprint with Cloudify's manager. :param path: Main blueprint yaml file path. :param entity_id: Id of the uploaded blueprint. :param blueprint_filename: The archive's main blueprint yaml filename. :param visibility: The visibility of the blueprint, can be 'private', 'tenant' or 'global'. :param progress_callback: Progress bar callback method :param skip_size_limit: Indicator whether to check size limit on blueprint folder Blueprint path should point to the main yaml file of the response to be uploaded. Its containing folder will be packed to an archive and get uploaded to the manager. Validation is basically an upload without the storage part being done. """ tempdir = tempfile.mkdtemp() tar_path = None application_file = None try: if not urlparse(path).scheme or os.path.exists(path): # path is not a URL, create archive tar_path, application_file = self._validate_blueprint_size( path, tempdir, skip_size_limit) response = self._validate(tar_path or path, blueprint_id=entity_id, application_file_name=application_file or blueprint_filename, visibility=visibility, progress_callback=progress_callback) finally: shutil.rmtree(tempdir) if response: # on cloudify earlier than 6.4, response is None (204 no content) return Execution(response)
def _update_from_archive(deployment_id, archive_path, application_file_name=None, inputs=None): """Create a deployment update transaction for an archived app. :param archive_path: the path for the archived app. :param application_file_name: the main blueprint filename. :param deployment_id: the deployment id to update. :return: DeploymentUpdate dict :rtype: DeploymentUpdate """ assert deployment_id mime_types = MimeTypes() data_form = {} params = {} # all the inputs are passed through the query if inputs: inputs_file = tempfile.TemporaryFile() json.dump(inputs, inputs_file) inputs_file.seek(0) data_form['inputs'] = ('inputs', inputs_file, 'text/plain') if application_file_name: params['application_file_name'] = \ urllib.quote(application_file_name) # For a Windows path (e.g. "C:\aaa\bbb.zip") scheme is the # drive letter and therefore the 2nd condition is present if all( [urlparse(archive_path).scheme, not os.path.exists(archive_path)]): # archive location is URL params['blueprint_archive_url'] = archive_path else: data_form['blueprint_archive'] = ( os.path.basename(archive_path), open(archive_path, 'rb'), # Guess the archive mime type mime_types.guess_type(urllib.pathname2url(archive_path))) return data_form, params