def onInit(): values = CopyOnWriteArrayList() values.addAll([ "apple", "orange", "lemon", "banana", "cherry", "grapes", "peach", "mango", "grapefruit", "kiwi", "plum" ]) sponge.setVariable("fruits", values)
class Storage: def __init__(self): self.actions = CopyOnWriteArrayList() self.currentId = AtomicInteger(0) def addAction(self, actionMeta): if list(filter(lambda action: action.name == actionMeta.name, self.actions)): raise Exception("The action {} has already been added".format(actionMeta.name)) self.actions.add(actionMeta) def getAction(self, name): return filter(lambda action: action.name == name, self.actions)[0] def updateAction(self, name, actionMeta): action = self.getAction(name) action.name = actionMeta.name action.label = actionMeta.label action.description = actionMeta.description action.callable = actionMeta.callable action.activatable = actionMeta.activatable
def __init__(self, family=None, type=None, proto=None): # FIXME verify these are supported self.family = family self.type = type self.proto = proto self.blocking = True self.timeout = None self.channel = None self.bind_addr = None self.selectors = CopyOnWriteArrayList() if self.type == SOCK_DGRAM: self.socket_type = DATAGRAM_SOCKET self.connected = False self.incoming = LinkedBlockingQueue() # list of read buffers self.incoming_head = None # allows msg buffers to be broken up self.python_inbound_handler = None self.can_write = True else: self.socket_type = UNKNOWN_SOCKET
class Library: def __init__(self): self.books = CopyOnWriteArrayList() self.currentId = AtomicInteger(0) self.readOnly = False def addBook(self, author, title, cover=None, force=False): if list( filter( lambda book: book.author == author and book.title == title, self.books)): raise Exception("This book has already been added to the library") if not self.readOnly or force: self.books.add( Book(self.currentId.incrementAndGet(), author, title, cover)) def getBook(self, bookId): return filter(lambda book: book.id == bookId, self.books)[0] def updateBook(self, bookId, author, title, cover=None): book = self.getBook(bookId) if not self.readOnly: book.author = author book.title = title if cover: book.cover = cover def removeBook(self, bookId): if not self.readOnly: self.books.removeIf(PyPredicate(lambda book: book.id == bookId)) def findBooks(self, searchString): return list(filter(lambda book: searchString is None or re.search(searchString.upper(), book.author.upper())\ or re.search(searchString.upper(), book.title.upper()), self.books)) def getAuthors(self): return sorted(list(set(list(map(lambda book: book.author, self.books)))), key=lambda author: author.lower())
from java.lang import System from java.util.logging import LogManager from java.util.concurrent import Executors, CopyOnWriteArrayList from java.io import File from com.threecrickets.scripturian.document import DocumentFileSource from com.threecrickets.scripturian.exception import DocumentNotFoundException from com.threecrickets.prudence.service import ApplicationService # # Common # common_libraries_document_source = DocumentFileSource('common/libraries/', File(document.source.basePath, 'common/libraries/'), 'default', 'py', 5000) common_fragments_document_source = DocumentFileSource('common/web/fragments/', File(document.source.basePath, 'common/web/fragments/'), 'index', 'py', 5000) common_tasks_document_sources = CopyOnWriteArrayList() common_tasks_document_sources.add(DocumentFileSource('common/tasks/', File(document.source.basePath, 'common/tasks/'), 'default', 'py', 5000)) common_handlers_document_sources = CopyOnWriteArrayList() common_handlers_document_sources.add(DocumentFileSource('common/handlers/', File(document.source.basePath, 'common/handlers/'), 'default', 'py', 5000)) document.librarySources.add(common_libraries_document_source) # # Utilities # def execute_or_default(name, default=None): try: document.execute(name) except DocumentNotFoundException: if default is None:
class _socketobject(object): def __init__(self, family=None, type=None, proto=None): # FIXME verify these are supported self.family = family self.type = type self.proto = proto self.blocking = True self.timeout = None self.channel = None self.bind_addr = None self.selectors = CopyOnWriteArrayList() if self.type == SOCK_DGRAM: self.socket_type = DATAGRAM_SOCKET self.connected = False self.incoming = LinkedBlockingQueue() # list of read buffers self.incoming_head = None # allows msg buffers to be broken up self.python_inbound_handler = None self.can_write = True else: self.socket_type = UNKNOWN_SOCKET def _register_selector(self, selector): self.selectors.addIfAbsent(selector) def _unregister_selector(self, selector): return self.selectors.remove(selector) def _notify_selectors(self): for selector in self.selectors: selector.notify() def _handle_channel_future(self, future, reason): # All differences between nonblocking vs blocking with optional timeouts # is managed by this method. # All sockets can be selected on, regardless of blocking/nonblocking def workaround_jython_bug_for_bound_methods(_): self._notify_selectors() future.addListener(workaround_jython_bug_for_bound_methods) if self.blocking: if self.timeout is None: return future.sync() else: future.await(self.timeout * _TO_NANOSECONDS, TimeUnit.NANOSECONDS) return future else: return future def setblocking(self, mode): self.blocking = mode def settimeout(self, timeout): if not timeout: self.blocking = False else: self.timeout = timeout def bind(self, address): # Netty 4 supports binding a socket to multiple addresses; # apparently this is the not the case for C API sockets self.bind_addr = address # CLIENT METHODS # Calling connect/connect_ex means this is a client socket; these # in turn use _connect, which uses Bootstrap, not ServerBootstrap def _init_client_mode(self, channel=None): # this is client socket specific self.socket_type = CLIENT_SOCKET self.incoming = LinkedBlockingQueue() # list of read buffers self.incoming_head = None # allows msg buffers to be broken up self.python_inbound_handler = None self.can_write = True self.connect_handlers = [] self.peer_closed = False self.connected = False if channel: self.channel = channel self.python_inbound_handler = PythonInboundHandler(self) self.connect_handlers = [self.python_inbound_handler] self.connected = True def _connect(self, addr): print "Begin _connect" self._init_client_mode() self.connected = True self.python_inbound_handler = PythonInboundHandler(self) bootstrap = Bootstrap().group(NIO_GROUP).channel(NioSocketChannel) # add any options # FIXME really this is just for SSL handling if self.connect_handlers: for handler in self.connect_handlers: print "Adding connect handler", handler bootstrap.handler(handler) else: print "Adding read adapter", self.python_inbound_handler bootstrap.handler(self.python_inbound_handler) # FIXME also support any options here def completed(f): self._notify_selectors() print "Connection future - connection completed", f host, port = addr future = bootstrap.connect(host, port) future.addListener(completed) self._handle_channel_future(future, "connect") self.channel = future.channel() print "Completed _connect on {}".format(self) def _post_connect(self): # Post-connect step is necessary to handle SSL setup, # otherwise the read adapter can race in seeing encrypted # messages from the peer if self.connect_handlers: print "Adding read adapter", self.python_inbound_handler self.channel.pipeline().addLast(self.python_inbound_handler) def peer_closed(x): print "Peer closed channel {} {}".format(self, x) self.incoming.put(_PEER_CLOSED) self._notify_selectors() self.channel.closeFuture().addListener(peer_closed) def connect(self, addr): # Unwrapped sockets can immediately perform the post-connect step self._connect(addr) self._post_connect() print "Completed connect {} to {}".format(self, addr) def connect_ex(self, addr): self.connect(addr) if self.blocking: return errno.EISCONN else: return errno.EINPROGRESS # SERVER METHODS # Calling listen means this is a server socket def listen(self, backlog): self.socket_type = SERVER_SOCKET b = ServerBootstrap() b.group(NIO_GROUP) b.channel(NioServerSocketChannel) b.option(ChannelOption.SO_BACKLOG, backlog) # FIXME pass through child options from self; note that C API sockets do not distinguish # EXAMPLE - b.childOption(ChannelOption.SO_KEEPALIVE, True) # FIXME per http://stackoverflow.com/questions/9774023/netty-throttling-accept-on-boss-thread, # should set a parentHandler to ensure throttling to avoid denial of service attacks against this layer; # it's up to using Python code to do this, but at the very least there should be some sort of blocking # to ensure we don't exceed the desired backlog in this chunk of code; # right now, assumption is a ArrayBlockingQueue of sufficient size should suffice instead self.client_queue = ArrayBlockingQueue(backlog) # FIXME this should queue up sockets that are wrapped accordingly; # in particular they should be wrapped SSLSocket objects (inheriting SSLEngine settings) b.childHandler(ClientSocketHandler(self)) # returns a ChannelFuture, but regardless for blocking/nonblocking, return immediately b.bind(_get_inet_addr(self.bind_addr)) def accept(self): s = self.client_queue.take() return s, s.getpeername() # DATAGRAM METHODS # needs to implicitly bind to 0 if not specified def _datagram_connect(self): # FIXME raise exception if not of the right family if not self.connected: print "Connecting datagram socket to", self.bind_addr self.connected = True self.python_inbound_handler = PythonInboundHandler(self) bootstrap = Bootstrap().group(NIO_GROUP).channel(NioDatagramChannel) bootstrap.handler(self.python_inbound_handler) # add any options # such as .option(ChannelOption.SO_BROADCAST, True) future = bootstrap.bind(_get_inet_addr(self.bind_addr)) self._handle_channel_future(future, "bind") self.channel = future.channel() print "Completed _datagram_connect on {}".format(self) def sendto(self, string, arg1, arg2=None): # Unfortunate overloading if arg2 is not None: flags = arg1 address = arg2 else: flags = None address = arg1 print "Sending data", string self._datagram_connect() # need a helper function to select proper address; # this should take in account if AF_INET, AF_INET6 packet = DatagramPacket(Unpooled.wrappedBuffer(string), _get_inet_addr(address)) future = self.channel.writeAndFlush(packet) self._handle_channel_future(future, "sendto") return len(string) # GENERAL METHODS def close(self): future = self.channel.close() self._handle_channel_future(future, "close") def shutdown(self, how): if how & SHUT_RD: try: self.channel.pipeline().remove(self.python_inbound_handler) except NoSuchElementException: pass # already removed, can safely ignore (presumably) if how & SHUT_WR: self.can_write = False def _readable(self): if self.socket_type == CLIENT_SOCKET: return ((self.incoming_head is not None and self.incoming_head.readableBytes()) or self.incoming.peek()) elif self.socket_type == SERVER_SOCKET: return bool(self.client_queue.peek()) else: return False def _writable(self): return self.channel.isActive() and self.channel.isWritable() def send(self, data): data = str(data) # FIXME temporary fix if data is of type buffer print "Sending data <<<{}>>>".format(data) if not self.can_write: raise Exception("Cannot write to closed socket") # FIXME use actual exception future = self.channel.writeAndFlush(Unpooled.wrappedBuffer(data)) self._handle_channel_future(future, "send") # FIXME are we sure we are going to be able to send this much data, especially async? return len(data) sendall = send # see note above! def _get_incoming_msg(self): if self.incoming_head is None: if self.blocking: if self.timeout is None: self.incoming_head = self.incoming.take() else: self.incoming_head = self.incoming.poll(self.timeout * _TO_NANOSECONDS, TimeUnit.NANOSECONDS) else: self.incoming_head = self.incoming.poll() # Could be None # Only return _PEER_CLOSED once msg = self.incoming_head if msg is _PEER_CLOSED: self.incoming_head = None return msg def recv(self, bufsize, flags=0): # For obvious reasons, concurrent reads on the same socket # have to be locked; I don't believe it is the job of recv to # do this; in particular this is the policy of SocketChannel, # which underlies Netty's support for such channels. msg = self._get_incoming_msg() if msg is None: return None elif msg is _PEER_CLOSED: return "" msg_length = msg.readableBytes() buf = jarray.zeros(min(msg_length, bufsize), "b") msg.readBytes(buf) if msg.readableBytes() == 0: msg.release() # return msg ByteBuf back to Netty's pool self.incoming_head = None return buf.tostring() def recvfrom(self, bufsize, flags=0): # FIXME refactor common code from recv self._datagram_connect() packet = self._get_incoming_msg() if packet is None: return None elif packet is _PEER_CLOSED: return "" msg = packet.content() msg_length = msg.readableBytes() buf = jarray.zeros(min(msg_length, bufsize), "b") msg.readBytes(buf) remote_addr = packet.sender() # may not be available on non datagram channels sender = remote_addr.getHostString(), remote_addr.getPort() if msg.readableBytes() == 0: packet.release() # return msg ByteBuf back to Netty's pool self.incoming_head = None return buf.tostring(), sender def fileno(self): return self def getsockopt(self, level, option): return 0 def getpeername(self): remote_addr = self.channel.remoteAddress() return remote_addr.getHostString(), remote_addr.getPort() def _unlatch(self): pass # no-op once mutated from ChildSocket to normal _socketobject
from com.threecrickets.scripturian.exception import DocumentNotFoundException from com.threecrickets.prudence.service import ApplicationService # # Common # common_libraries_document_source = DocumentFileSource( 'common/libraries/', File(document.source.basePath, 'common/libraries/'), 'default', 'py', 5000) common_fragments_document_source = DocumentFileSource( 'common/web/fragments/', File(document.source.basePath, 'common/web/fragments/'), 'index', 'py', 5000) common_tasks_document_sources = CopyOnWriteArrayList() common_tasks_document_sources.add( DocumentFileSource('common/tasks/', File(document.source.basePath, 'common/tasks/'), 'default', 'py', 5000)) common_handlers_document_sources = CopyOnWriteArrayList() common_handlers_document_sources.add( DocumentFileSource('common/handlers/', File(document.source.basePath, 'common/handlers/'), 'default', 'py', 5000)) document.librarySources.add(common_libraries_document_source) # # Utilities #
def __init__(self): self.actions = CopyOnWriteArrayList() self.currentId = AtomicInteger(0)
last_modified = str(applications_file.lastModified()) if not applications_file.directory and applications_file.name[ -4:] == '.zip' and properties.getProperty(applications_file.name, '') != last_modified: print 'Unpacking applications "' + applications_file.name + '"...' IoUtil.unzip(applications_file, applications_dir) properties.setProperty(applications_file.name, last_modified) save_properties = True if save_properties: IoUtil.saveProperties(properties, properties_file) # Applications applications = component.context.attributes[ 'com.threecrickets.prudence.applications'] = CopyOnWriteArrayList() application_dirs = applications_dir.listFiles() for application_dir in application_dirs: if application_dir.directory and not application_dir.hidden: application_name = application_dir.name application_internal_name = application_dir.name application_logger_name = application_dir.name application_base_path = application_dir.path application_default_url = '/' + application_dir.name application_base = 'applications/' + application_dir.name execute_or_default(application_base, 'defaults/application/') applications.add(application_instance) if len(applications) == 0: print 'No applications found. Exiting.'
def __init__(self): self.books = CopyOnWriteArrayList() self.currentId = AtomicInteger(0) self.readOnly = False
# for url in url_add_trailing_slash: url = fix_url(url) if len(url) > 0: if url[-1] == '/': url = url[:-1] router.attach(url, add_trailing_slash) language_manager = executable.manager # # Libraries # libraries_document_sources = CopyOnWriteArrayList() libraries_document_sources.add( DocumentFileSource(application_base + libraries_base_path, application_base_path + libraries_base_path, documents_default_name, 'py', minimum_time_between_validity_checks)) libraries_document_sources.add(common_libraries_document_source) # # Dynamic web # dynamic_web_document_source = DocumentFileSource( application_base + dynamic_web_base_path, application_base_path + dynamic_web_base_path, dynamic_web_default_document, 'py', minimum_time_between_validity_checks)