def add(self, message): """Queue a message for delivery. @param message: a C{dict} with a C{type} key and other keys conforming to the L{Message} schema for that specific message type. @return: message_id, which is an identifier for the added message or C{None} if the message was rejected. """ assert "type" in message if self._persist.get("blackhole-messages"): logging.debug("Dropped message, awaiting resync.") return server_api = self.get_server_api() if "api" not in message: message["api"] = server_api # We apply the schema with the highest API version that is greater # or equal to the API version the message is tagged with. schemas = self._schemas[message["type"]] for api in sort_versions(schemas.keys()): if is_version_higher(server_api, api): schema = schemas[api] break message = schema.coerce(message) message_data = bpickle.dumps(message) filename = self._get_next_message_filename() temp_path = filename + ".tmp" create_binary_file(temp_path, message_data) os.rename(temp_path, filename) if not self.accepts(message["type"]): filename = self._set_flags(filename, HELD) # For now we use the inode as the message id, as it will work # correctly even faced with holding/unholding. It will break # if the store is copied over for some reason, but this shouldn't # present an issue given the current uses. In the future we # should have a nice transactional storage (e.g. sqlite) which # will offer a more strong primary key. message_id = os.stat(filename).st_ino return message_id
def store_public_key_data(config, certificate_data): """ Write out the data from the SSL certificate provided to us, either from a bootstrap.conf file, or from EC2-style user-data. @param config: The L{BrokerConfiguration} object in use. @param certificate_data: a string of data that represents the contents of the file to be written. @return the L{BrokerConfiguration} object that was passed in, updated to reflect the path of the ssl_public_key file. """ key_filename = os.path.join( config.data_path, os.path.basename(config.get_config_filename() + ".ssl_public_key")) print_text("Writing SSL CA certificate to %s..." % key_filename) create_binary_file(key_filename, certificate_data) return key_filename
def init_channels(self, binaries=()): """Initialize the Apt channels as needed. @param binaries: A possibly empty list of 3-tuples of the form (hash, id, deb), holding the hash, the id and the content of additional Debian packages that should be loaded in the channels. """ binaries_path = self._config.binaries_path # Clean up the binaries we wrote in former runs self._clear_binaries() if binaries: hash_ids = {} for hash, id, deb in binaries: create_binary_file(os.path.join(binaries_path, "%d.deb" % id), base64.decodestring(deb)) hash_ids[hash] = id self._store.set_hash_ids(hash_ids) self._facade.add_channel_deb_dir(binaries_path) self._facade.reload_channels(force_reload_binaries=True) self._facade.ensure_channels_reloaded()
def fetch_ok(data): create_binary_file(hash_id_db_filename, data) logging.info("Downloaded hash=>id database from %s" % url)
def create_deb(target_dir, pkg_name, pkg_data): """Create a Debian package in the specified C{target_dir}.""" path = os.path.join(target_dir, pkg_name) data = base64.decodestring(pkg_data) create_binary_file(path, data)