def _lucene_builder(self, value, action=None, escape_query_options=EscapeQueryOptions.EscapeAll): if isinstance(value, str): if escape_query_options == EscapeQueryOptions.EscapeAll: value = Utils.escape(value, False, False) elif escape_query_options == EscapeQueryOptions.AllowPostfixWildcard: value = Utils.escape(value, False, False) elif escape_query_options == EscapeQueryOptions.AllowAllWildcards: value = Utils.escape(value, False, False) value = re.sub(r'"\\\*(\s|$)"', "*${1}", value) elif escape_query_options == EscapeQueryOptions.RawQuery: value = Utils.escape(value, False, False).replace("\\*", "*") lucene_text = Utils.to_lucene(value, action=action) if len(self.query_builder) > 0 and not self.query_builder.endswith( ' '): self.query_builder += ' ' if self.negate: self.negate = False self.query_builder += '-' return lucene_text
def create_database(self, database_document): """ Creates a database @param database_document: has to be DatabaseDocument type """ if "Raven/DataDir" not in database_document.settings: raise exceptions.InvalidOperationException( "The Raven/DataDir setting is mandatory") db_name = database_document.database_id.replace( "Raven/Databases/", "") Utils.name_validation(db_name) path = "databases/{0}".format(Utils.quote_key(db_name)) response = self.requests_handler.http_request_handler( path, "PUT", database_document.to_json(), admin=True) if response.status_code == 502: raise exceptions.ErrorResponseException( "Connection failed please check your connection to {0}". format(self.requests_handler.url)) if response.status_code != 200: raise exceptions.ErrorResponseException( "Database with the name '{0}' already exists".format( database_document.database_id)) return response
def query(self, index_name, index_query, includes=None, metadata_only=False, index_entries_only=False, force_read_from_master=False): """ @param index_name: A name of an index to query @param force_read_from_master: If True the reading also will be from the master :type bool :type str @param index_query: A query definition containing all information required to query a specified index. :type IndexQuery @param includes: An array of relative paths that specify related documents ids which should be included in a query result. :type list @param metadata_only: True if returned documents should include only metadata without a document body. :type bool @param index_entries_only: True if query results should contain only index entries. :type bool @return:json :rtype:dict """ if not index_name: raise ValueError("index_name cannot be None or empty") if index_query is None: raise ValueError("None query is invalid") if not isinstance(index_query, IndexQuery): raise ValueError("query must be IndexQuery type") path = "indexes/{0}?".format(Utils.quote_key(index_name)) if index_query.default_operator is QueryOperator.AND: path += "&operator={0}".format(index_query.default_operator.value) if index_query.query: path += "&query={0}".format(Utils.quote_key(index_query.query)) if index_query.sort_hints: for hint in index_query.sort_hints: path += "&{0}".format(hint) if index_query.sort_fields: for field in index_query.sort_fields: path += "&sort={0}".format(field) if index_query.fetch: for item in index_query.fetch: path += "&fetch={0}".format(item) if metadata_only: path += "&metadata-only=true" if index_entries_only: path += "&debug=entries" if includes and len(includes) > 0: path += "".join("&include=" + item for item in includes) if index_query.start: path += "&start={0}".format(index_query.start) path += "&pageSize={0}".format(index_query.page_size) response = self._requests_handler.http_request_handler( path, "GET", force_read_from_master=force_read_from_master).json() if "Error" in response: raise exceptions.ErrorResponseException(response["Error"][:100]) return response
def json_default(o): if isinstance(o, datetime): return Utils.datetime_to_string(o) elif isinstance(o, timedelta): return Utils.timedelta_to_str(o) elif getattr(o, "__dict__", None): return o.__dict__ else: raise TypeError(repr(o) + " is not JSON serializable (Try add a json default method to store convention)")
def setUpClass(cls): super(TestConversion, cls).setUpClass() cls.db.put("times/3", {"td": Utils.timedelta_to_str(timedelta(days=20, minutes=23, seconds=59, milliseconds=254)), "dt": Utils.datetime_to_string(datetime.now())}, {"Raven-Entity-Name": "Times"}) cls.db.put("times/4", {"td": Utils.timedelta_to_str(timedelta(minutes=23, seconds=59, milliseconds=254)), "dt": Utils.datetime_to_string(datetime.now())}, {"Raven-Entity-Name": "Times"}) cls.document_store = documentstore(cls.default_url, cls.default_database) cls.document_store.initialize()
def get(self, key_or_keys, includes=None, metadata_only=False, force_read_from_master=False): """ @param key_or_keys: the key of the documents you want to retrieve (key can be a list of ids) :type str or list @param includes: array of paths in documents in which server should look for a 'referenced' document :type list @param metadata_only: specifies if only document metadata should be returned :type bool @return: A list of the id or ids we looked for (if they exists) :rtype: dict @param force_read_from_master: If True the reading also will be from the master :type bool """ if key_or_keys is None: raise ValueError("None Key is not valid") path = "queries/?" method = "GET" data = None if includes: path += "".join("&include=" + Utils.quote_key(item) for item in includes) # make get method handle a multi document requests in a single request if isinstance(key_or_keys, list): key_or_keys = collections.OrderedDict.fromkeys(key_or_keys) if metadata_only: path += "&metadata-only=True" # If it is too big, we drop to POST (note that means that we can't use the HTTP cache any longer) if (sum(len(x) for x in key_or_keys)) > 1024: method = "POST" data = list(key_or_keys) else: path += "".join("&id=" + Utils.quote_key(item) for item in key_or_keys) else: path += "&id={0}".format(Utils.quote_key(key_or_keys)) response = self._requests_handler.http_request_handler( path, method, data=data, force_read_from_master=force_read_from_master) if response.status_code == 200: response = response.json() return response
def __init__(self, database_name, replication_factor=1, settings=None, secure_settings=None): super(CreateDatabaseOperation, self).__init__() self.replication_factor = replication_factor self.database_record = CreateDatabaseOperation.get_default_database_record( ) Utils.database_name_validation(database_name) self.database_record["DatabaseName"] = database_name if settings: self.database_record["Settings"] = settings if secure_settings: self.database_record["SecuredSettings"] = secure_settings
def update_by_index(self, index_name, query, scripted_patch=None, options=None): """ @param index_name: name of an index to perform a query on :type str @param query: query that will be performed :type IndexQuery @param options: various operation options e.g. AllowStale or MaxOpsPerSec :type BulkOperationOptions @param scripted_patch: JavaScript patch that will be executed on query results( Used only when update) :type ScriptedPatchRequest @return: json :rtype: dict """ if not isinstance(query, IndexQuery): raise ValueError("query must be IndexQuery Type") path = Utils.build_path(index_name, query, options, with_page_size=False) if scripted_patch: if not isinstance(scripted_patch, ScriptedPatchRequest): raise ValueError( "scripted_patch must be ScriptedPatchRequest Type") scripted_patch = scripted_patch.to_json() response = self._requests_handler.http_request_handler( path, "EVAL", data=scripted_patch) if response.status_code != 200 and response.status_code != 202: raise response.raise_for_status() return response.json()
def _execute_query(self): self.session.increment_requests_count() conventions = self.session.conventions end_time = time.time() + conventions.timeout while True: index_query = self.get_index_query() if self._page_size is not None: index_query.page_size = self._page_size response = self.session.database_commands.query(self.index_name, index_query, includes=self.includes) if response["IsStale"] and self.wait_for_non_stale_results: if time.time() > end_time: raise ErrorResponseException("The index is still stale after reached the timeout") time.sleep(0.1) continue break results = [] response_results = response.pop("Results") response_includes = response.pop("Includes") for result in response_results: entity, metadata, original_metadata = Utils.convert_to_entity(result, self.object_type, conventions, self.nested_object_types, fetch=False if not self.fetch else True) if self.object_type != dict and not self.fetch: self.session.save_entity(key=original_metadata.get("@id", None), entity=entity, original_metadata=original_metadata, metadata=metadata, document=result) results.append(entity) self.session.save_includes(response_includes) if self._with_statistics: return results, response return results
def _convert_and_save_entity(self, key, document, object_type, nested_object_types): if key not in self._entities_by_key: entity, metadata, original_metadata = Utils.convert_to_entity( document, object_type, self.conventions, nested_object_types) self.save_entity(key, entity, original_metadata, metadata, document)
def update_by_index(self, index_name, query, scripted_patch=None, options=None): """ @param index_name: name of an index to perform a query on :type str @param query: query that will be performed :type IndexQuery @param options: various operation options e.g. AllowStale or MaxOpsPerSec :type BulkOperationOptions @param scripted_patch: JavaScript patch that will be executed on query results( Used only when update) :type ScriptedPatchRequest @return: json :rtype: dict """ if not isinstance(query, IndexQuery): raise ValueError("query must be IndexQuery Type") path = Utils.build_path(index_name, query, options, with_page_size=False) if scripted_patch: if not isinstance(scripted_patch, ScriptedPatchRequest): raise ValueError("scripted_patch must be ScriptedPatchRequest Type") scripted_patch = scripted_patch.to_json() response = self._requests_handler.http_request_handler(path, "EVAL", data=scripted_patch) if response.status_code != 200 and response.status_code != 202: raise response.raise_for_status() return response.json()
def initialize(self): if not self._initialize: self._database_commands = database_commands.DatabaseCommands( self._requests_handler) if self.database is None: raise exceptions.InvalidOperationException( "None database is not valid") if not self.database.lower() == self.conventions.system_database: path = "Raven/Databases/{0}".format(self.database) response = self._requests_handler.check_database_exists( "docs?id=" + Utils.quote_key(path)) # here we unsure database exists if not create new one if response.status_code == 404: try: raise exceptions.ErrorResponseException( "Could not open database named: {0}, database does not exists" .format(self.database)) except exceptions.ErrorResponseException: print(traceback.format_exc()) self._database_commands.admin_commands.create_database( DatabaseDocument(self.database, { "Raven/DataDir": "~\\{0}".format(self.database) })) self._requests_handler.get_replication_topology() self.generator = HiloGenerator(self.conventions.max_ids_to_catch, self._database_commands) self._initialize = True
def query(self, index_name, index_query, includes=None, metadata_only=False, index_entries_only=False, force_read_from_master=False): """ @param index_name: A name of an index to query @param force_read_from_master: If True the reading also will be from the master :type bool :type str @param index_query: A query definition containing all information required to query a specified index. :type IndexQuery @param includes: An array of relative paths that specify related documents ids which should be included in a query result. :type list @param metadata_only: True if returned documents should include only metadata without a document body. :type bool @param index_entries_only: True if query results should contain only index entries. :type bool @return:json :rtype:dict """ if not index_name: raise ValueError("index_name cannot be None or empty") if index_query is None: raise ValueError("None query is invalid") if not isinstance(index_query, IndexQuery): raise ValueError("index_query must be IndexQuery type") path = "indexes/{0}?".format(Utils.quote_key(index_name)) path += self._build_query_request_path(index_query=index_query, includes=includes, metadata_only=metadata_only, index_entries_only=index_entries_only) response = self._requests_handler.http_request_handler(path, "GET", force_read_from_master=force_read_from_master).json() if "Error" in response: raise exceptions.ErrorResponseException(response["Error"][:100]) return response
def static_query(high_price=500, delay_days=35): with DocumentStoreHolder.get_store().open_session() as session: query = list( session.query(index_name="CostlyOrders"). where_greater_than("Price", high_price). and_also(). where_greater_than("Delay", Utils.timedelta_tick(timedelta(days=delay_days)))) return query[0] if query else None
def delete_database(self, db_name, hard_delete=False): db_name = db_name.replace("Rave/Databases/", "") path = "databases/{0}".format(Utils.quote_key(db_name)) if hard_delete: path += "?hard-delete=true" response = self.requests_handler.http_request_handler(path, "DELETE", admin=True) if response.content != '' and response.content != b'': raise exceptions.ErrorResponseException(response.content) return response
def static_query(high_price=500, delay_days=35): with DocumentStoreHolder.get_store().open_session() as session: query = list( session.query(index_name="CostlyOrders").where_greater_than( "Price", high_price).and_also().where_greater_than( "Delay", Utils.timedelta_tick(timedelta(days=delay_days)))) return query[0] if query else None
def where_between(self, field_name, start, end): if isinstance(start, timedelta): start = Utils.timedelta_tick(start) if isinstance(end, timedelta): end = Utils.timedelta_tick(end) value = start or end if self.session.conventions.uses_range_type(value) and not field_name.endswith("_Range"): sort_hint = self.session.conventions.get_default_sort_option(type(value).__name__) if sort_hint: field_name = "{0}_Range".format(field_name) if sys.version_info.major > 2: if value > 2147483647: sort_hint = self.session.conventions.get_default_sort_option("long") self._sort_hints.add("SortHint-{0}={1}".format(field_name, sort_hint)) lucene_text = self._lucene_builder([start, end], action="between") self.query_builder += "{0}:{1}".format(field_name, lucene_text) return self
def delete_index(self, index_name): """ @param index_name: Name of the index you like to get or delete :type str @return: json or None :rtype: dict """ if not index_name: raise ValueError("None or empty index_name is invalid") path = "indexes/{0}".format(Utils.quote_key(index_name)) self._requests_handler.http_request_handler(path, "DELETE")
def get(self, key_or_keys, includes=None, metadata_only=False, force_read_from_master=False): """ @param key_or_keys: the key of the documents you want to retrieve (key can be a list of ids) :type str or list @param includes: array of paths in documents in which server should look for a 'referenced' document :type list @param metadata_only: specifies if only document metadata should be returned :type bool @return: A list of the id or ids we looked for (if they exists) :rtype: dict @param force_read_from_master: If True the reading also will be from the master :type bool """ if key_or_keys is None: raise ValueError("None Key is not valid") path = "queries/?" method = "GET" data = None if includes: path += "".join("&include=" + Utils.quote_key(item) for item in includes) # make get method handle a multi document requests in a single request if isinstance(key_or_keys, list): key_or_keys = collections.OrderedDict.fromkeys(key_or_keys) if metadata_only: path += "&metadata-only=True" # If it is too big, we drop to POST (note that means that we can't use the HTTP cache any longer) if (sum(len(x) for x in key_or_keys)) > 1024: method = "POST" data = list(key_or_keys) else: path += "".join("&id=" + Utils.quote_key(item) for item in key_or_keys) else: path += "&id={0}".format(Utils.quote_key(key_or_keys)) response = self._requests_handler.http_request_handler(path, method, data=data, force_read_from_master=force_read_from_master) if response.status_code == 200: response = response.json() return response
def open_session(self, database=None, api_key=None, force_read_from_master=False): self._assert_initialize() session_id = uuid.uuid4() database_commands_for_session = self._database_commands if database is not None: requests_handler = HttpRequestsFactory(self.url, database, self.conventions, force_get_topology=True, api_key=api_key) path = "Raven/Databases/{0}".format(database) response = requests_handler.check_database_exists("docs?id=" + Utils.quote_key(path)) if response.status_code != 200: raise exceptions.ErrorResponseException("Could not open database named:{0}".format(database)) database_commands_for_session = database_commands.DatabaseCommands(requests_handler) return documentsession(database, self, database_commands_for_session, session_id, force_read_from_master)
def delete(self, key, etag=None): if key is None: raise ValueError("None Key is not valid") if not isinstance(key, str): raise ValueError("key must be {0}".format(type(""))) headers = {} if etag is not None: headers["If-None-Match"] = etag key = Utils.quote_key(key) path = "docs/{0}".format(key) response = self._requests_handler.http_request_handler(path, "DELETE", headers=headers) if response.status_code != 204: raise exceptions.ErrorResponseException(response.json()["Error"])
def delete(self, key, etag=None): if key is None: raise ValueError("None Key is not valid") if not isinstance(key, str): raise ValueError("key must be {0}".format(type(""))) headers = {} if etag is not None: headers["If-None-Match"] = "\"" + etag + "\"" key = Utils.quote_key(key) path = "docs/{0}".format(key) response = self._requests_handler.http_request_handler(path, "DELETE", headers=headers) if response.status_code != 204: raise exceptions.ErrorResponseException(response.json()["Error"])
def create_database(self, database_document): """ Creates a database @param database_document: has to be DatabaseDocument type """ if "Raven/DataDir" not in database_document.settings: raise exceptions.InvalidOperationException("The Raven/DataDir setting is mandatory") db_name = database_document.database_id.replace("Raven/Databases/", "") Utils.name_validation(db_name) path = "databases/{0}".format(Utils.quote_key(db_name)) response = self.requests_handler.http_request_handler(path, "PUT", database_document.to_json(), admin=True) if response.status_code == 502: raise exceptions.ErrorResponseException( "Connection failed please check your connection to {0}".format(self.requests_handler.url)) if response.status_code != 200: raise exceptions.ErrorResponseException( "Database with the name '{0}' already exists".format(database_document.database_id)) return response
def _lucene_builder(self, value, action=None, escape_query_options=EscapeQueryOptions.EscapeAll): if isinstance(value, str): if escape_query_options == EscapeQueryOptions.EscapeAll: value = Utils.escape(value, False, False) elif escape_query_options == EscapeQueryOptions.AllowPostfixWildcard: value = Utils.escape(value, False, False) elif escape_query_options == EscapeQueryOptions.AllowAllWildcards: value = Utils.escape(value, True, False) value = re.sub(r'"\\\*(\s|$)"', "*${1}", value) elif escape_query_options == EscapeQueryOptions.RawQuery: value = Utils.escape(value, False, False).replace("\\*", "*") lucene_text = Utils.to_lucene(value, action=action) if len(self.query_builder) > 0 and not self.query_builder.endswith(' '): self.query_builder += ' ' if self.negate: self.negate = False self.query_builder += '-' return lucene_text
def set_response(self, response): if response is None: raise ValueError("response is invalid.") data = {} try: response = response.json()["Results"] if len(response) > 1: raise ValueError("response is Invalid") for key, value in response[0].items(): data[Utils.convert_to_snake_case(key)] = value return CertificateDefinition(**data) except ValueError: raise response.raise_for_status()
def get_index(self, index_name, force_read_from_master=False): """ @param index_name: Name of the index you like to get or delete :type str @return: json or None :rtype: dict @param force_read_from_master: If True the reading also will be from the master :type bool """ path = "indexes/{0}?definition=yes".format(Utils.quote_key(index_name)) response = self._requests_handler.http_request_handler(path, "GET", force_read_from_master=force_read_from_master) if response.status_code != 200: return None return response.json()
def search(self, field_name, search_terms, escape_query_options=EscapeQueryOptions.RawQuery): """ for more complex text searching @param field_name:The field name in the index you want to query. :type str @param search_terms: the terms you want to query :type str @param escape_query_options: the way we should escape special characters :type EscapeQueryOptions """ search_terms = Utils.quote_key(str(search_terms)) search_terms = self._lucene_builder(search_terms, "search", escape_query_options) self.query_builder += "{0}:{1}".format(field_name, search_terms) return self
def get_index(self, index_name, force_read_from_master=False): """ @param index_name: Name of the index you like to get or delete :type str @return: json or None :rtype: dict @param force_read_from_master: If True the reading also will be from the master :type bool """ path = "indexes/{0}?definition=yes".format(Utils.quote_key(index_name)) response = self._requests_handler.http_request_handler( path, "GET", force_read_from_master=force_read_from_master) if response.status_code != 200: return None return response.json()
def embedded(): # region embedded_example EmbeddedServer().start_server() with EmbeddedServer().get_document_store("Embedded") as store: with store.open_session() as session: # Your Code Here pass # endregion # region start_server # Start RavenDB Embedded Server with default options EmbeddedServer().start_server() # endregion # region start_server_with_options server_options = ServerOptions(data_directory="C:\\RavenData", server_url="http://127.0.0.1:8080") EmbeddedServer().start_server(server_options) # endregion # region get_document_store EmbeddedServer().get_document_store("Embedded") # endregion # region get_document_store_with_database_options database_options = DatabaseOptions(database_name="Embedded", skip_creating_database=True) EmbeddedServer().get_document_store(database_options) # endregion # region security server_options = ServerOptions() server_options.secured(certificate_path="PathToServerCertificate.pfx", certificate_password="******") # endregion # region security2 server_options_with_exec = ServerOptions() server_options_with_exec.secured_with_exec( "powershell", "C:\\secrets\\give_me_cert.ps1", Utils.get_cert_file_fingerprint("PATH_TO_PEM_CERT_FILE"), "PATH_TO_PEM_CERT_FILE") EmbeddedServer.start_server(server_options_with_exec) # endregion # region run_with_dotnet_path EmbeddedServer().start_server( ServerOptions(dotnet_path="PATH_TO_DOTNET_EXEC"))
def embedded(): # region embedded_example EmbeddedServer().start_server() with EmbeddedServer().get_document_store("Embedded") as store: with store.open_session() as session: # Your Code Here pass # endregion # region start_server # Start RavenDB Embedded Server with default options EmbeddedServer().start_server() # endregion # region start_server_with_options server_options = ServerOptions(data_directory="C:\\RavenData", server_url="http://127.0.0.1:8080") EmbeddedServer().start_server(server_options) # endregion # region get_document_store EmbeddedServer().get_document_store("Embedded") # endregion # region get_document_store_with_database_options database_options = DatabaseOptions(database_name="Embedded", skip_creating_database=True) EmbeddedServer().get_document_store(database_options) # endregion # region security server_options = ServerOptions() server_options.secured(certificate_path="PathToServerCertificate.pfx", certificate_password="******") # endregion # region security2 server_options_with_exec = ServerOptions() server_options_with_exec.secured_with_exec("powershell", "C:\\secrets\\give_me_cert.ps1", Utils.get_cert_file_fingerprint("PATH_TO_PEM_CERT_FILE"), "PATH_TO_PEM_CERT_FILE") EmbeddedServer.start_server(server_options_with_exec) # endregion # region run_with_dotnet_path EmbeddedServer().start_server(ServerOptions(dotnet_path="PATH_TO_DOTNET_EXEC"))
def put_index(self, index_name, index_def, overwrite=False): """ @param index_name:The name of the index @param index_def: IndexDefinition class a definition of a RavenIndex @param overwrite: if set to True overwrite """ if index_name is None: raise ValueError("None index_name is not valid") if not isinstance(index_def, IndexDefinition): raise ValueError("index_def must be IndexDefinition type") index_name = Utils.quote_key(index_name) path = "indexes/{0}?definition=yes".format(index_name) response = self._requests_handler.http_request_handler(path, "GET") if not overwrite and response.status_code != 404: raise exceptions.InvalidOperationException("Cannot put index:{0},index already exists".format(index_name)) data = index_def.to_json() return self._requests_handler.http_request_handler(path, "PUT", data=data).json()
def _prepare_for_puts_commands(self, data): for entity in self._entities_and_metadata: if self._has_change(entity): key = self._entities_and_metadata[entity]["key"] metadata = self._entities_and_metadata[entity]["metadata"] etag = None if self.advanced.use_optimistic_concurrency \ or self._entities_and_metadata[entity]["force_concurrency_check"]: etag = self._entities_and_metadata[entity][ "etag"] or metadata.get("@etag", Utils.empty_etag()) data.entities.append(entity) if key is not None: self._entities_by_key.pop(key) document = entity.__dict__.copy() document.pop('Id', None) data.commands.append( commands_data.PutCommandData(key, etag, document, metadata))
def delete_by_index(self, index_name, query, options=None): """ @param index_name: name of an index to perform a query on :type str @param query: query that will be performed :type IndexQuery @param options: various operation options e.g. AllowStale or MaxOpsPerSec :type BulkOperationOptions @return: json :rtype: dict """ path = Utils.build_path(index_name, query, options) response = self._requests_handler.http_request_handler(path, "DELETE") if response.status_code != 200 and response.status_code != 202: try: raise exceptions.ErrorResponseException(response.json()["Error"][:100]) except ValueError: raise response.raise_for_status() return response.json()
def delete_by_index(self, index_name, query, options=None): """ @param index_name: name of an index to perform a query on :type str @param query: query that will be performed :type IndexQuery @param options: various operation options e.g. AllowStale or MaxOpsPerSec :type BulkOperationOptions @return: json :rtype: dict """ path = Utils.build_path(index_name, query, options, with_page_size=False) response = self._requests_handler.http_request_handler(path, "DELETE") if response.status_code != 200 and response.status_code != 202: try: raise exceptions.ErrorResponseException(response.json()["Error"][:100]) except ValueError: raise response.raise_for_status() return response.json()
def secured(self, certificate_path, certificate_password=None): """ :param certificate_path: The path to a pfx file :param certificate_password: The password of the certificate file :type certificate_path: str :type certificate_password: str """ if certificate_path is None: raise ValueError("certificate_path cannot be None") if self.security is not None: raise InvalidOperationException("The security has already been setup for this ServerOptions object") server_cert_fingerprint = Utils.get_cert_file_fingerprint(certificate_path) client_cert_path = _SecurityOptions.get_pem_file(certificate_path, certificate_password) self.security = _SecurityOptions(certificate_path, certificate_password, client_cert_path, server_cert_fingerprint=server_cert_fingerprint)
def _build_query_request_path(index_query, includes=None, metadata_only=False, index_entries_only=False, include_query=True, add_page_size=True): path = "" if index_query.default_operator is QueryOperator.AND: try: operator = index_query.default_operator.value except AttributeError: operator = index_query.default_operator path += "&operator={0}".format(operator) if index_query.query and include_query: path += "&query={0}".format( Utils.quote_key(index_query.query, safe='/')) if index_query.sort_hints: for hint in index_query.sort_hints: path += "&{0}".format(hint) if index_query.sort_fields: for field in index_query.sort_fields: path += "&sort={0}".format(field) if index_query.fetch: for item in index_query.fetch: path += "&fetch={0}".format(item) if metadata_only: path += "&metadata-only=true" if index_entries_only: path += "&debug=entries" if includes and len(includes) > 0: path += "".join("&include=" + item for item in includes) if index_query.start and index_query.start != 0: path += "&start={0}".format(index_query.start) if add_page_size and index_query.page_size: path += "&pageSize={0}".format(index_query.page_size) if index_query.wait_for_non_stale_results: path += "&waitForNonStaleResultsAsOfNow=true" return path
def query(self, index_name, index_query, includes=None, metadata_only=False, index_entries_only=False, force_read_from_master=False): """ @param index_name: A name of an index to query @param force_read_from_master: If True the reading also will be from the master :type bool :type str @param index_query: A query definition containing all information required to query a specified index. :type IndexQuery @param includes: An array of relative paths that specify related documents ids which should be included in a query result. :type list @param metadata_only: True if returned documents should include only metadata without a document body. :type bool @param index_entries_only: True if query results should contain only index entries. :type bool @return:json :rtype:dict """ if not index_name: raise ValueError("index_name cannot be None or empty") if index_query is None: raise ValueError("None query is invalid") if not isinstance(index_query, IndexQuery): raise ValueError("index_query must be IndexQuery type") path = "indexes/{0}?".format(Utils.quote_key(index_name)) path += self._build_query_request_path( index_query=index_query, includes=includes, metadata_only=metadata_only, index_entries_only=index_entries_only) response = self._requests_handler.http_request_handler( path, "GET", force_read_from_master=force_read_from_master).json() if "Error" in response: raise exceptions.ErrorResponseException(response["Error"][:100]) return response
def initialize(self): if not self._initialize: self._operations = Operations(self._requests_handler) self._database_commands = database_commands.DatabaseCommands(self._requests_handler) if self.database is None: raise exceptions.InvalidOperationException("None database is not valid") if not self.database.lower() == self.conventions.system_database: path = "Raven/Databases/{0}".format(self.database) response = self._requests_handler.check_database_exists("docs?id=" + Utils.quote_key(path)) # here we unsure database exists if not create new one if response.status_code == 404: try: raise exceptions.ErrorResponseException( "Could not open database named: {0}, database does not exists".format(self.database)) except exceptions.ErrorResponseException: print(traceback.format_exc()) self._database_commands.admin_commands.create_database( DatabaseDocument(self.database, {"Raven/DataDir": "~\\{0}".format(self.database)})) self._requests_handler.get_replication_topology() self.generator = HiloGenerator(self.conventions.max_ids_to_catch, self._database_commands) self._initialize = True
def get_next_range(self, type_tag_name, request_handler): while True: path = "Raven/Hilo/{0}&id=Raven/ServerPrefixForHilo".format(type_tag_name) document = None try: document = self.database_commands.get(path)["Results"][0] except (exceptions.ErrorResponseException, IndexError): pass min_id = 1 if document is None: etag = Utils.empty_etag() max_id = self.capacity else: min_id = document["Max"] + 1 max_id = document["Max"] + self.capacity etag = document["@metadata"]["@etag"] try: request_handler.call_hilo(type_tag_name, max_id, etag) return Range(min_id, max_id) except exceptions.FetchConcurrencyException: pass
def open_session(self, database=None, api_key=None, force_read_from_master=False): self._assert_initialize() session_id = uuid.uuid4() database_commands_for_session = self._database_commands if database is not None: requests_handler = HttpRequestsFactory(self.url, database, self.conventions, force_get_topology=True, api_key=api_key) path = "Raven/Databases/{0}".format(database) response = requests_handler.check_database_exists( "docs?id=" + Utils.quote_key(path)) if response.status_code != 200: raise exceptions.ErrorResponseException( "Could not open database named:{0}".format(database)) database_commands_for_session = database_commands.DatabaseCommands( requests_handler) return documentsession(database, self, database_commands_for_session, session_id, force_read_from_master)
def stream(self, query, object_type=None, nested_object_types=None): index_query = query.get_index_query() if index_query.wait_for_non_stale_results: raise exceptions.NotSupportedException( "Since stream() does not wait for indexing (by design), " "streaming query with wait_for_non_stale_results is not supported." ) self.session.increment_requests_count() response = self.session.database_commands.stream_query( query.index_name, index_query) basic_parse = IncrementalJsonParser.basic_parse(response) parser = ijson.backend.common.parse(basic_parse) results = ijson.backend.common.items(parser, "Results") for result in next(results, None): document, metadata, _ = Utils.convert_to_entity( result, object_type, self.session.conventions, nested_object_types) yield { "document": document, "metadata": metadata, "key": metadata.get("@id", None), "etag": metadata.get("@etag", None) }
def _build_query_request_path(index_query, includes=None, metadata_only=False, index_entries_only=False, include_query=True, add_page_size=True): path = "" if index_query.default_operator is QueryOperator.AND: try: operator = index_query.default_operator.value except AttributeError: operator = index_query.default_operator path += "&operator={0}".format(operator) if index_query.query and include_query: path += "&query={0}".format(Utils.quote_key(index_query.query, safe='/')) if index_query.sort_hints: for hint in index_query.sort_hints: path += "&{0}".format(hint) if index_query.sort_fields: for field in index_query.sort_fields: path += "&sort={0}".format(field) if index_query.fetch: for item in index_query.fetch: path += "&fetch={0}".format(item) if metadata_only: path += "&metadata-only=true" if index_entries_only: path += "&debug=entries" if includes and len(includes) > 0: path += "".join("&include=" + item for item in includes) if index_query.start and index_query.start != 0: path += "&start={0}".format(index_query.start) if add_page_size and index_query.page_size: path += "&pageSize={0}".format(index_query.page_size) if index_query.wait_for_non_stale_results: path += "&waitForNonStaleResultsAsOfNow=true" return path
def do_auth_request(self, api_key, oauth_source, second_api_key=None): api_name, secret = api_key.split('/', 1) tries = 1 headers = {"grant_type": "client_credentials"} data = None with requests.session() as session: while True: oath = session.request(method="POST", url=oauth_source, headers=headers, data=data) if oath.reason == "Precondition Failed": if tries > 1: if not (second_api_key and self.api_key != second_api_key and tries < 3): raise exceptions.ErrorResponseException( "Unauthorized") api_name, secret = second_api_key.split('/', 1) tries += 1 authenticate = oath.headers.__getitem__( "www-authenticate")[len("Raven "):] challenge_dict = dict( item.split("=", 1) for item in authenticate.split(',')) exponent_str = challenge_dict.get("exponent", None) modulus_str = challenge_dict.get("modulus", None) challenge = challenge_dict.get("challenge", None) exponent = bytes_to_long( base64.standard_b64decode(exponent_str)) modulus = bytes_to_long( base64.standard_b64decode(modulus_str)) rsa = RSA.construct((modulus, exponent)) cipher = PKCS1_OAEP.new(rsa) iv = get_random_bytes(16) key = get_random_bytes(32) encoder = PKCS7Encoder() cipher_text = cipher.encrypt(key + iv) results = [] results.extend(cipher_text) aes = AES.new(key, AES.MODE_CBC, iv) sub_data = Utils.dict_to_string({ "api key name": api_name, "challenge": challenge, "response": base64.b64encode( hashlib.sha1('{0};{1}'.format( challenge, secret).encode('utf-8')).digest()) }) results.extend(aes.encrypt(encoder.encode(sub_data))) data = Utils.dict_to_string({ "exponent": exponent_str, "modulus": modulus_str, "data": base64.standard_b64encode(bytearray(results)) }) if exponent is None or modulus is None or challenge is None: raise exceptions.InvalidOperationException( "Invalid response from server, could not parse raven authentication information:{0} " .format(authenticate)) tries += 1 elif oath.status_code == 200: oath_json = oath.json() body = oath_json["Body"] signature = oath_json["Signature"] if not sys.version_info.major > 2: body = body.encode('utf-8') signature = signature.encode('utf-8') with self.lock: self._token = "Bearer {0}".format({ "Body": body, "Signature": signature }) self.headers.update({"Authorization": self._token}) break else: raise exceptions.ErrorResponseException(oath.reason)
def get_pem_file(cert_path, cert_password): dir_name, filename = os.path.split(cert_path) if filename.lower().endswith(".pfx"): return Utils.pfx_to_pem(os.path.join(dir_name, filename.replace(".pfx", ".pem")), cert_path, cert_password) return cert_path
def do_auth_request(self, api_key, oauth_source, second_api_key=None): api_name, secret = api_key.split('/', 1) tries = 1 headers = {"grant_type": "client_credentials"} data = None with requests.session() as session: while True: oath = session.request(method="POST", url=oauth_source, headers=headers, data=data) if oath.reason == "Precondition Failed": if tries > 1: if not (second_api_key and self.api_key != second_api_key and tries < 3): raise exceptions.ErrorResponseException("Unauthorized") api_name, secret = second_api_key.split('/', 1) tries += 1 authenticate = oath.headers.__getitem__("www-authenticate")[len("Raven "):] challenge_dict = dict(item.split("=", 1) for item in authenticate.split(',')) exponent_str = challenge_dict.get("exponent", None) modulus_str = challenge_dict.get("modulus", None) challenge = challenge_dict.get("challenge", None) exponent = bytes_to_long(base64.standard_b64decode(exponent_str)) modulus = bytes_to_long(base64.standard_b64decode(modulus_str)) rsa = RSA.construct((modulus, exponent)) cipher = PKCS1_OAEP.new(rsa) iv = get_random_bytes(16) key = get_random_bytes(32) encoder = PKCS7Encoder() cipher_text = cipher.encrypt(key + iv) results = [] results.extend(cipher_text) aes = AES.new(key, AES.MODE_CBC, iv) sub_data = Utils.dict_to_string({"api key name": api_name, "challenge": challenge, "response": base64.b64encode(hashlib.sha1( '{0};{1}'.format(challenge, secret).encode( 'utf-8')).digest())}) results.extend(aes.encrypt(encoder.encode(sub_data))) data = Utils.dict_to_string({"exponent": exponent_str, "modulus": modulus_str, "data": base64.standard_b64encode(bytearray(results))}) if exponent is None or modulus is None or challenge is None: raise exceptions.InvalidOperationException( "Invalid response from server, could not parse raven authentication information:{0} ".format( authenticate)) tries += 1 elif oath.status_code == 200: oath_json = oath.json() body = oath_json["Body"] signature = oath_json["Signature"] if not sys.version_info.major > 2: body = body.encode('utf-8') signature = signature.encode('utf-8') with self.lock: self._token = "Bearer {0}".format( {"Body": body, "Signature": signature}) self.headers.update({"Authorization": self._token}) break else: raise exceptions.ErrorResponseException(oath.reason)