def handle_requests(self, authentication, proxy_requests, route_name, container_ids, container_interface): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy2 (key text PRIMARY KEY, host text, port integer, container_ids text, container_interface text)''') except Exception: pass delete = '''DELETE FROM gxproxy2 WHERE key=?''' c.execute(delete, (key, )) insert = '''INSERT INTO gxproxy2 (key, host, port, container_ids, container_interface) VALUES (?, ?, ?, ?, ?)''' c.execute(insert, (key, proxy_requests.host, proxy_requests.port, json.dumps(container_ids), container_interface)) conn.commit() finally: conn.close()
def remove(self, **kwd): """ Remove entry from a key, key_type, token, value store that is can be used for coordinating with external resources. Remove entries that match all provided key=values """ assert kwd, ValueError("You must provide some values to key upon") delete = 'DELETE FROM %s WHERE' % (DATABASE_TABLE_NAME) value_list = [] for i, (key, value) in enumerate(kwd.items()): if i != 0: delete += ' and' delete += ' %s=?' % (key) value_list.append(value) with FileLock(self.sqlite_filename): conn = sqlite3.connect(self.sqlite_filename) try: c = conn.cursor() try: # Delete entry # NB: This does not invalidate in-memory caches used by uwsgi (if any) c.execute(delete, tuple(value_list)) except Exception as e: log.debug('Error removing entry (%s): %s', delete, e) conn.commit() finally: conn.close()
def get(self, key, key_type): with FileLock(self.sqlite_filename): conn = sqlite3.connect(self.sqlite_filename) try: c = conn.cursor() select = '''SELECT token, host, port, info FROM %s WHERE key=? and key_type=?''' % ( DATABASE_TABLE_NAME) c.execute(select, ( key, key_type, )) try: token, host, port, info = c.fetchone() except TypeError: log.warning('get(): invalid key: %s key_type %s', key, key_type) return None return dict(key=key, key_type=key_type, token=token, host=host, port=port, info=info) finally: conn.close()
def update_requests(self, authentication, host=None, port=None): key = authentication.cookie_value with FileLock(self.proxy_session_map): session_map = json.load(open(self.proxy_session_map)) session_map[key]['host'] = host session_map[key]['port'] = port new_json_data = json.dumps(session_map) open(self.proxy_session_map, "w").write(new_json_data)
def update_requests(self, authentication, host=None, port=None): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() update = '''UPDATE gxproxy2 SET host = ?, port = ? WHERE key = ?''' c.execute(update, (host, port, key)) conn.commit() finally: conn.close()
def handle_requests(self, authentication, proxy_requests, route_name, container_ids, container_interface): key = authentication.cookie_value with FileLock(self.proxy_session_map): if not os.path.exists(self.proxy_session_map): open(self.proxy_session_map, "w").write("{}") json_data = open(self.proxy_session_map, "r").read() session_map = json.loads(json_data) session_map[key] = { 'host': proxy_requests.host, 'port': proxy_requests.port, 'container_ids': container_ids, 'container_interface': container_interface, } new_json_data = json.dumps(session_map) open(self.proxy_session_map, "w").write(new_json_data)
def save(self, key, key_type, token, host, port, info=None): """ Writeout a key, key_type, token, value store that is can be used for coordinating with external resources. """ assert key, ValueError("A non-zero length key is required.") assert key_type, ValueError("A non-zero length key_type is required.") assert token, ValueError("A non-zero length token is required.") with FileLock(self.sqlite_filename): conn = sqlite3.connect(self.sqlite_filename) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE %s (key text, key_type text, token text, host text, port integer, info text, PRIMARY KEY (key, key_type) )''' % (DATABASE_TABLE_NAME)) except Exception: pass delete = '''DELETE FROM %s WHERE key=? and key_type=?''' % ( DATABASE_TABLE_NAME) c.execute(delete, ( key, key_type, )) insert = '''INSERT INTO %s (key, key_type, token, host, port, info) VALUES (?, ?, ?, ?, ?, ?)''' % ( DATABASE_TABLE_NAME) c.execute(insert, ( key, key_type, token, host, port, info, )) conn.commit() finally: conn.close()
def handle_requests(self, authentication, proxy_requests, route_name, container_ids): key = "%s:%s" % (proxy_requests.host, proxy_requests.port) secure_id = authentication.cookie_value with FileLock(self.proxy_session_map): if not os.path.exists(self.proxy_session_map): open(self.proxy_session_map, "w").write("{}") json_data = open(self.proxy_session_map, "r").read() session_map = json.loads(json_data) to_remove = [] for k, value in session_map.items(): if value == secure_id: to_remove.append(k) for k in to_remove: del session_map[k] session_map[key] = secure_id new_json_data = json.dumps(session_map) open(self.proxy_session_map, "w").write(new_json_data)
def ensure_installed(installable_context, install_func, auto_init): """Make sure target is installed - handle multiple processes potentially attempting installation.""" parent_path = installable_context.parent_path desc = installable_context.installable_description def _check(): if not installable_context.is_installed(): if auto_init: if installable_context.can_install(): if install_func(installable_context): installed = False log.warning( f"{desc} installation requested and failed.") else: installed = installable_context.is_installed() if not installed: log.warning( f"{desc} installation requested, seemed to succeed, but not found." ) else: installed = False else: installed = False log.warning("%s not installed and auto-installation disabled.", desc) else: installed = True return installed if not os.path.lexists(parent_path): os.mkdir(parent_path) try: if auto_init and os.access(parent_path, os.W_OK): with FileLock(os.path.join(parent_path, desc.lower()), timeout=300): return _check() else: return _check() except FileLockException: raise Exception( f"Failed to get file lock for {os.path.join(parent_path, desc.lower())}" )
def handle_requests(self, authentication, proxy_requests, route_name, container_ids): key = "%s:%s" % (proxy_requests.host, proxy_requests.port) secure_id = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() try: # Create table c.execute('''CREATE TABLE gxproxy (key text PRIMARY_KEY, secret text)''') except Exception: pass insert_tmpl = '''INSERT INTO gxproxy (key, secret) VALUES ('%s', '%s');''' insert = insert_tmpl % (key, secure_id) c.execute(insert) conn.commit() finally: conn.close()
def fetch_requests(self, authentication): key = authentication.cookie_value with FileLock(self.proxy_session_map): conn = sqlite.connect(self.proxy_session_map) try: c = conn.cursor() select = '''SELECT host, port, container_ids, container_interface FROM gxproxy2 WHERE key=?''' c.execute(select, (key, )) try: host, port, container_ids, container_interface = c.fetchone( ) except TypeError: log.warning('fetch_requests(): invalid key: %s', key) return None return ProxyMapping(host=host, port=port, container_ids=json.loads(container_ids), container_interface=container_interface) finally: conn.close()
def _add_entry(self, entry, allow_duplicates=True, persist=False, entry_source=None, **kwd): # accepts dict or list of columns if isinstance(entry, dict): fields = [] for column_name in self.get_column_name_list(): if column_name not in entry: log.debug( "Using default column value for column '%s' when adding data table entry (%s) to table '%s'.", column_name, entry, self.name) field_value = self.get_empty_field_by_name(column_name) else: field_value = entry[column_name] fields.append(field_value) else: fields = entry if self.largest_index < len(fields): fields = self._replace_field_separators(fields) if (allow_duplicates and self.allow_duplicate_entries ) or fields not in self.get_fields(): self.data.append(fields) else: raise MessageException( f"Attempted to add fields ({fields}) to data table '{self.name}', but this entry already exists and allow_duplicates is False." ) else: raise MessageException( f"Attempted to add fields ({fields}) to data table '{self.name}', but there were not enough fields specified ( {len(fields)} < {self.largest_index + 1} )." ) filename = None if persist: filename = self.get_filename_for_source(entry_source) if filename is None: # If we reach this point, there is no data table with a corresponding .loc file. raise MessageException( f"Unable to determine filename for persisting data table '{self.name}' values: '{self.fields}'." ) else: log.debug("Persisting changes to file: %s", filename) with FileLock(filename): try: if os.path.exists(filename): data_table_fh = open(filename, 'r+b') if os.stat(filename).st_size > 0: # ensure last existing line ends with new line data_table_fh.seek(-1, 2) # last char in file last_char = data_table_fh.read(1) if last_char not in [b'\n', b'\r']: data_table_fh.write(b'\n') else: data_table_fh = open(filename, 'wb') except OSError as e: log.exception('Error opening data table file (%s): %s', filename, e) raise fields = f"{self.separator.join(fields)}\n" data_table_fh.write(fields.encode('utf-8'))