def __init__(self, host=None, username=None, password=None): """ Initialize the client with connect established. Args: host: The address of the IMAP SSL host. username: The username. password: The password. """ if not host: host = raw_input('host: ') if not username: username = raw_input('username: '******'Login information: ' + host + ' ' + username + ' ' + password) self.client = client.IMAPClient(host, username, password) self.curr = '' print(WELCOME) print('Login to ' + host + '.\n') while 1: command = raw_input(username + '$ ').lstrip().split(' ') try: # Avoid accessing private methods. if len(command[0]) > 0: if command[0][0] == '_': raise CommandError(command[0], 'command not found') try: getattr(self, command[0])(*command[1:]) except AttributeError: raise CommandError(command[0], 'command not found') except TypeError: raise CommandError(command[0], 'command with invalid parameters') except CommandError, e: printe(e)
def _recv_line(self): """ Receive one response line with ending CRLF removed. """ msg_line = '' # Retrieve an complete line end with CRLF. while 1: line = self.buffer.readline() msg_line += line if line[-2:] == CRLF: break printd(msg_line) # Remove the ending CRLF. return msg_line[:-2].split(' ', 1)
def clean_b3ds(): """ This is a dev method, used to clean up errors in b3ds. Use sparingly! :return: """ printl('<< CLEANING B3DS >>') # printl("These are the b3ds that will need fixing!") set_isBead_after = False adjusted_b3d_minmax = 0 for b3d in Blob3d.all.values(): if not hasattr(b3d, 'isBead'): b3d.isBead = None set_isBead_after = True remove_children = [] for child in b3d.children: if child not in Blob3d.all: remove_children.append(child) if len(remove_children): for child in remove_children: b3d.children.remove(child) printl(' While cleaning b3d:' + str(b3d) + ' had to remove children that no longer existed ' + str(remove_children)) if b3d.parent_id is None and b3d.recursive_depth != 0: printd(' Found b3d with None parent_id: ' + str(b3d), Config.debug_b3d_merge) elif b3d.parent_id is not None and b3d.parent_id not in Blob3d.all: printl(' While cleaning b3d:' + str(b3d) + ' had to set parent_id to None, because parent_id: ' + str(b3d.parent_id) + ' is not a valid blob3d-id') b3d.parent_id = None if set_isBead_after: printl( ' While cleaning b3ds, found b3ds without isBead attr, so setting isBead for all b3ds' ) Blob3d.tag_all_beads() if adjusted_b3d_minmax: warn("Had to adjust the ranges for a total of " + str(adjusted_b3d_minmax) + ' blob3ds because their b2ds were out of range') # FIXME
def check_bead(self, indent=1): prefix = ' ' * indent printd(prefix + 'Called check_bead on b3d: ' + str(self), Config.debug_bead_tagging) child_bead_count = 0 for child in self.children: printd(prefix + 'Checking if child of ' + str(self) + ' is bead:', Config.debug_bead_tagging) child_is_bead = Blob3d.get(child).check_bead(indent=indent + 1) if child_is_bead: child_bead_count += 1 printd( prefix + 'Number of direct children which are beads = ' + str(child_bead_count) + ' / ' + str(len(self.children)), Config.debug_bead_tagging) # printl('Calling check_bead, max_subbeads_to_be_a_bead = ' + str(max_subbeads_to_be_a_bead), end='') # printl(', max_pixels_to_be_a_bead = ' + str(max_pixels_to_be_a_bead) + ', child_bead_difference = ' + str(child_bead_difference)) # if self.recursive_depth > 0: # DEBUG self.isBead = \ ((child_bead_count < Config.max_subbeads_to_be_a_bead) and (self.get_edge_pixel_count() <= Config.max_pixels_to_be_a_bead)) \ or (self.recursive_depth == 0 and len(self.children) == 0) # and (self.recursive_depth > 0) # <== This makes bead tagging greedy and merges otherwise correctly disconnected beads printd(prefix + ' set isBead = ' + str(self.isBead), Config.debug_bead_tagging) # DEBUG printd( prefix + ' ^ was decided as: (' + str(child_bead_count) + ' < ' + str(Config.max_subbeads_to_be_a_bead) + ' and ' + str(self.get_edge_pixel_count()) + ' <= ' + str(Config.max_pixels_to_be_a_bead) + ') OR (' + str(self.recursive_depth) + ' == 0 and ' + str(len(self.children)) + ' == 0)', Config.debug_bead_tagging) # and (child_bead_count > (len(self.children) - config.child_bead_difference)) return self.isBead
def tag_all_beads(): printd('Tagging bead blob3ds', Config.debug_bead_tagging) base_b3ds = Blob3d.at_depth(0, ids=False) printl( str(len(base_b3ds)) + ' / ' + str(len(Blob3d.all)) + ' blob3ds are at recursive_depth=0') # DEBUG num_base_with_children = len( list(b3d for b3d in base_b3ds if len(b3d.children))) printl( str(num_base_with_children) + ' / ' + str(len(base_b3ds)) + ' base b3ds have children!') for b3d in base_b3ds: b3d.check_bead() printd( ' ' + str(len(base_b3ds)) + ' of the ' + str(len(base_b3ds)) + ' base b3ds were tagged as beads', Config.debug_bead_tagging) # clean up unset = sorted( list(b3d for b3d in Blob3d.all.values() if b3d.isBead is None), key=lambda b3d: b3d.recursive_depth) # Do by recursive depth if len(unset): printd( 'When tagging all beads, there were ' + str(len(unset)) + ' b3ds which could not be reached from base b3ds', Config.debug_bead_tagging) printd( ' They are: ' + str(unset), Config.debug_bead_tagging ) # Want this to always be zero, otherwise theres a tree problem for b3d in unset: b3d.check_bead() printl("Total number of beads = " + str(sum(b3d.isBead for b3d in Blob3d.all.values())) + ' / ' + str(len(Blob3d.all)))
def __init__(self, lowerblobid, upperblobid, overscan_scale, num_bins, quiet=True): self.overscan_scale = overscan_scale self.num_bins = num_bins self.lowerheight = Blob2d.get(lowerblobid).height self.upperheight = Blob2d.get(upperblobid).height self.lowerblob = lowerblobid self.upperblob = upperblobid self.upperpixels = self.edgepixelsinbounds(upperblobid, lowerblobid) self.lowerpixels = self.edgepixelsinbounds(lowerblobid, upperblobid) self.isReduced = False # True when have chosen a subset of the edge pixels to reduce computation self.stitches = [] self.cost = -1 # -1 to indicate that it is unset if self.upperpixels is not None and len(self.upperpixels) != 0 and len(self.lowerpixels) != 0: # HACK # NOTE planning to reduce to a subset # NOTE 1:28 for (203,301) pre-opt, :37 for (174, 178), 66mins for (640, 616) -> 4 mins after optimization (picking half of each) -> 59 seconds with selective[::3] # NOTE After ::2 opt, total time for [:3] data slides = 10 mins 19 seconds, instead of ~ 2 hours, after selective[::3], total time = 6mins 49 seconds # selective [::3] with 5 slides = 36 mins if len(self.upperpixels) > Config.max_pixels_to_stitch or len( self.lowerpixels) > Config.max_pixels_to_stitch: if not quiet: printd('-->Too many pixels in the below stitch, reducing to a subset, originally was: ' + str( len(self.lowerpixels)) + '/' + str(len(self.lowerblob.edge_pixels)) + ' lower blob pixels and ' + str( len(self.upperpixels)) + '/' + str(len(self.upperblob.edge_pixels)) + ' upper blob pixels.', quiet) pickoneovers = max(1, math.ceil(len(self.upperpixels) / Config.max_pixels_to_stitch)), max(1, math.ceil( len( self.lowerpixels) / Config.max_pixels_to_stitch)) # HACK TODO Modify these values to be more suitable dependent on computation time self.isReduced = True self.upperpixels = self.upperpixels[::pickoneovers[0]] # Every pickoneover'th element self.lowerpixels = self.lowerpixels[ ::pickoneovers[1]] # HACK this is a crude way of reducing the number of pixels self.isConnected = True self.set_shape_contexts(num_bins) # Set lower and upper context bins if not quiet: printd(' ' + str(self), quiet) self.gen_stitches() # Now have set self.cost and self.indeces and self.connect Blob2d.all[lowerblobid].pairings.append(self) # TODO TODO convert this to use ids Blob2d.all[upperblobid].pairings.append(self) else: self.isConnected = False printd('Just created stitch: ' + str(self), Config.debug_stitches)
def merge(b3dlist): printd('Called merge on b3dlist: ' + str(b3dlist), Config.debug_b3d_merge) Blob3d.lists_of_merged_blob3ds.append( [Blob3d.get(b3d) for b3d in b3dlist]) res = b3dlist.pop() # DEBUG all_parent_ids = [Blob3d.get(b3d).parent_id for b3d in b3dlist] while len(b3dlist): cur = b3dlist.pop() res = Blob3d.merge2(res, cur) printd( ' Final result of calling merge on b3dlist is b3d: ' + str(Blob3d.get(res)), Config.debug_b3d_merge) printd(' DB all parents of b3ds which were merged:', Config.debug_b3d_merge) # DEBUG # for parent_id in all_parent_ids: # DEBUG # if parent_id is not None: # printd('--%s' % Blob3d.get(parent_id), Config.debug_b3d_merge) # DEBUG return res
def __init__(self, blob2dlist, r_depth=0): self.id = Blob3d.next_id Blob3d.next_id += 1 self.blob2ds = blob2dlist # List of the blob 2ds used to create this blob3d # Now find my pairings self.pairings = [] self.lowslideheight = min( Blob2d.get(blob).height for blob in self.blob2ds) self.highslideheight = max( Blob2d.get(blob).height for blob in self.blob2ds) self.recursive_depth = r_depth self.children = [] self.parent_id = None self.isBead = None ids_that_are_removed_due_to_reusal = set() for blobid in self.blob2ds: blob = Blob2d.get(blobid) if Blob2d.all[ blob. id].b3did != -1: # DEBUG #FIXME THE ISSUES COME BACK TO THIS, find the source # warn('NOT assigning a new b3did (' + str(self.id) + ') to blob2d: ' + str(Blob2d.all[blob.id])) printl('---NOT assigning a new b3did (' + str(self.id) + ') to blob2d: ' + str(Blob2d.all[blob.id])) Blob3d.possible_merges.append( (Blob2d.all[blob.id].b3did, self.id, blob.id)) ids_that_are_removed_due_to_reusal.add(blobid) # HACK else: # Note not adding to the new b3d Blob2d.all[blob.id].b3did = self.id for stitch in blob.pairings: if stitch not in self.pairings: # TODO set will be faster self.pairings.append(stitch) # self.blob2ds = list(set(self.blob2ds) - ids_that_are_removed_due_to_reusal) # TODO fixed typo 10/10, check doesn't impact elsewhere before uncommenting self.maxx = max(Blob2d.get(blob).maxx for blob in self.blob2ds) self.maxy = max(Blob2d.get(blob).maxy for blob in self.blob2ds) self.miny = min(Blob2d.get(blob).miny for blob in self.blob2ds) self.minx = min(Blob2d.get(blob).minx for blob in self.blob2ds) self.avgx = sum(Blob2d.get(blob).avgx for blob in self.blob2ds) / len(self.blob2ds) self.avgy = sum(Blob2d.get(blob).avgy for blob in self.blob2ds) / len(self.blob2ds) self.avgz = (self.lowslideheight + self.highslideheight) / 2 self.isSingular = False self.note = '' # This is a note that can be manually added for identifying certain characteristics.. if r_depth != 0: """ This is one of the most convoluted and complicated parts of the project This occurs only when a blob3d is being created as a result of blooming The idea is that a blob3d is being creating from some blob2ds, which ideally were bloomed from a single blob2d However, sometimes bloomed blob2ds from multiple blob3ds end up being stitched together. The idea here is to combine those blob3ds together This is complicated because it may need to be recursively applied, to keep the condition that each blob2d and each blob3d are dervied from a single blob3d In the event that a blob3d would have multiple parent blob3ds, it's parents are combined """ all_b2d_parents = [ Blob2d.get(Blob2d.get(b2d).parent_id) for b2d in blob2dlist ] # printl('All b2d_parents of our b2ds that are going into a new b3d: ' + str(all_b2d_parents)) parent_b3dids = set( [b2d.b3did for b2d in all_b2d_parents if b2d.b3did != -1]) # printl('Their b3dids: ' + str(parent_b3dids)) if len(parent_b3dids) > 0: printd( 'Attempting to create a new b3d with id: ' + str(self.id) + '\nAll b2d_parents of our b2ds that are going into a new b3d: ' + str(all_b2d_parents) + '\nAll of the b2ds\'_parents\' b3dids: ' + str(parent_b3dids), Config.debug_b3d_merge) if len(parent_b3dids) > 1: printd( '*Found more than one b3d parent for b3d: ' + str(self) + ', attempting to merge parents: ' + str(list(Blob3d.get(b3d) for b3d in parent_b3dids)), Config.debug_b3d_merge) Blob3d.merge(list(parent_b3dids)) new_parent_b3dids = list( set([ b2d.b3did for b2d in all_b2d_parents if b2d.b3did != -1 ])) # TODO can remove this, just for safety for now printd( ' Post merging b3d parents, updated available-parent b3dids: ' + str(new_parent_b3dids), Config.debug_b3d_merge) else: new_parent_b3dids = list(parent_b3dids) self.parent_id = new_parent_b3dids[0] # HACK HACK HACK if len(new_parent_b3dids) != 0 or self.parent_id == -1: printd( " Updating b3d " + str(self.id) + '\'s parent_id to: ' + str(self.parent_id) + ' from new_parent_ids(after regen after merge): ' + str( list(Blob3d.getb3d(b3d)) for b3d in new_parent_b3dids), Config.debug_b3d_merge) Blob3d.all[self.parent_id].children.append(self.id) printd( ' Added b3d ' + str(self.id) + ' to parent\'s list of children, updated parent: ' + str(Blob3d.all[self.parent_id]), Config.debug_b3d_merge) if len(new_parent_b3dids) != 1: warn('New b3d (' + str(self.id) + ') should have ended up with more than one parent!') else: warn('Creating a b3d at depth ' + str(r_depth) + ' with id ' + str(self.id) + ' which could not find a b3d parent') self.validate() printd("Done creating new b3d:" + str(self), Config.debug_b3d_merge)
def merge2(b1, b2): """ Merges two blob3ds, and updates the entires of all data structures that link to these b3ds The chosen id to merge2 to is the smaller of the two available Returns the new merged blob3d in addition to updating its entry in Blob3d.all :param b1: The first b3d to merge2 :param b2: The second b3d to merge2 :return: """ if b1 == -1 or b2 == -1: warn( '***Skipping merging b3ds' + str(b1) + ' and ' + str(b2) + ' because at least one of them is -1, this should be fixed soon..' ) # TODO else: b1 = Blob3d.get(b1) b2 = Blob3d.get(b2) printd(' Merging two b3ds: ' + str(b1) + ' ' + str(b2), Config.debug_b3d_merge) if b1.id < b2.id: # HACK TODO revert this once issue is solved. This just makes things simpler to DEBUG smaller = b1 larger = b2 else: smaller = b2 larger = b1 for blob2d in larger.blob2ds: Blob2d.all[blob2d].b3did = smaller.id Blob3d.all[smaller.id].blob2ds.append(blob2d) # smaller.children += larger.children # CHANGED DEBUG Blob3d.all[smaller.id].children += [ child for child in larger.children if child not in Blob3d.all[smaller.id].children ] if larger.parent_id is not None: Blob3d.all[larger.parent_id].children.remove(larger.id) if smaller.id not in Blob3d.all[ larger. parent_id].children: # Would occur if they have the same parent Blob3d.all[larger.parent_id].children.append(smaller.id) for child in larger.children: Blob3d.all[child].parent_id = smaller.id if smaller.parent_id is not None: printd( ' After Merging, the parent of the original smaller is: ' + str(Blob3d.get(smaller.parent_id)), Config.debug_b3d_merge) if larger.parent_id is not None: printd( ' After Merging, the parent of the original larger is: ' + str(Blob3d.get(larger.parent_id)), Config.debug_b3d_merge) del Blob3d.all[larger.id] # TEST ---------- if smaller.parent_id is not None and larger.parent_id is not None and smaller.parent_id != larger.parent_id: # Recursively merging parents together printd( "**** Merging parents of ids: %s and %s(now deleted): %s & %s" % (smaller.id, larger.id, smaller.parent_id, larger.parent_id), Config.debug_b3d_merge) Blob3d.lists_of_merged_blob3ds.append([ Blob3d.get(smaller.parent_id), Blob3d.get(larger.parent_id) ]) Blob3d.list_of_merged_blob3d_parents.append([ Blob3d.get(smaller.parent_id), Blob3d.get(larger.parent_id) ]) # DEBUG Blob3d.merge2(smaller.parent_id, larger.parent_id) printd( ' Result of merge2(%s, %s) = %s' % (smaller.id, larger.id, Blob3d.all[smaller.id]), Config.debug_b3d_merge) return smaller.id
def mergeblobs(bloblist): """ Returns a NEW list of blobs, which have been merged after having their ids updated (externally, beforehand) Use the global variable 'debug_set_merge' to control output :param bloblist: """ newlist = [] copylist = list( bloblist ) # http://stackoverflow.com/questions/2612802/how-to-clone-or-copy-a-list-in-python printd('Blobs to merge:' + str(copylist), Config.debug_set_merge) while len(copylist) > 0: printd('Len of copylist:' + str(len(copylist)), Config.debug_set_merge) blob1 = copylist[0] newpixels = [] merged = False printd('**Curblob:' + str(blob1), Config.debug_set_merge) for (index2, blob2) in enumerate(copylist[1:]): if blob2 == blob1: printd( ' Found blobs to merge: ' + str(blob1) + ' & ' + str(blob2), Config.debug_set_merge) if Blob2d.get(blob1).recursive_depth != Blob2d.get( blob2).recursive_depth: printl( 'WARNING merging two blobs of different recursive depths:' + str(blob1) + ' & ' + str(blob2)) merged = True newpixels = newpixels + Blob2d.get(blob2).pixels if not merged: printd('--Never merged on blob:' + str(blob1), Config.debug_set_merge) newlist.append(blob1) del copylist[0] else: printd(' Merging, newlist-pre:', Config.debug_set_merge) printd(' Merging, copylist-pre:', Config.debug_set_merge) index = 0 while index < len(copylist): printd(' Checking to delete:' + str(copylist[index]), Config.debug_set_merge) if copylist[index] == blob1: printd(' Deleting:' + str(copylist[index]), Config.debug_set_merge) del copylist[index] index -= 1 index += 1 newlist.append( Blob2d(Blob2d.get(blob1).pixels + newpixels, Blob2d.get(blob1).height, recursive_depth=Blob2d.get(blob1).recursive_depth, parent_id=min( Blob2d.get(blob1).parentID, Blob2d.get(blob2).parentID))) printd(' Merging, newlist-post:' + str(newlist), Config.debug_set_merge) printd(' Merging, copylist-post:' + str(copylist), Config.debug_set_merge) printd('Merge result' + str(newlist), Config.debug_set_merge) return newlist
def set_possible_partners(self, blob2dlist): """ Finds all blobs in the given slide that COULD overlap with the given blob. These blobs could be part of the same blob3D (partners) :param blob2dlist: """ # A blob is a possible partner to another blob if they are in adjacent slides, and they overlap in area # Overlap cases (minx, maxx, miny, maxy at play) # minx2 <= (minx1 | max1) <= maxx2 # miny2 <= (miny1 | maxy1) <= maxy2 printd( 'Setting possible partners for b2d: ' + str(self) + ' from ' + str(len(blob2dlist)) + ' other blob2ds', Config.debug_partners) my_pixel_coor = set([(Pixel.get(pix).x, Pixel.get(pix).y) for b2d in self.getdescendants(include_self=True) for pix in b2d.pixels]) for b_num, blob in enumerate(blob2dlist): blob = Blob2d.get(blob) inbounds = False partner_smaller = False if (blob.minx <= self.minx <= blob.maxx) or ( blob.minx <= self.maxx <= blob.maxx ): # Covers the case where the blob on the above slide is larger # Overlaps in the x axis; a requirement even if overlapping in the y axis if (blob.miny <= self.miny <= blob.maxy) or (blob.miny <= self.maxy <= blob.maxy): inbounds = True partner_smaller = False if not inbounds: if (self.minx <= blob.minx <= self.maxx) or (self.minx <= blob.maxx <= self.maxx): if (self.miny <= blob.miny <= self.maxy) or ( self.miny <= blob.maxy <= self.maxy): inbounds = True partner_smaller = True # If either of the above was true, then one blob is within the bounding box of the other if inbounds: printd( ' Found b2d: ' + str(blob) + ' to be in-bounds, so checking other conditions', Config.debug_partners) pair_coor = set( (Pixel.get(pix).x, Pixel.get(pix).y) for b2d in blob.getdescendants(include_self=True) for pix in b2d.pixels) overlap_amount = len(my_pixel_coor) - len(my_pixel_coor - pair_coor) if len(pair_coor) and len(my_pixel_coor) and ( (overlap_amount / len(my_pixel_coor) > Config.minimal_pixel_overlap_to_be_possible_partners and len(my_pixel_coor) > 7) or ((overlap_amount / len(pair_coor) > Config.minimal_pixel_overlap_to_be_possible_partners) and len(pair_coor) > 7)): # HACK self.possible_partners.append(blob.id) printd( ' Above b2d confirmed to be partner, updated pp: ' + str(self.possible_partners), Config.debug_partners) if partner_smaller: # Use partner's (blob) midpoints, and expand a proportion of minx, maxx, miny, maxy midx = blob.avgx midy = blob.avgy left_bound = midx - ((blob.avgx - blob.minx) * Config.overscan_coefficient) right_bound = midx + ((blob.maxx - blob.avgx) * Config.overscan_coefficient) down_bound = midy - ((blob.avgy - blob.miny) * Config.overscan_coefficient) up_bound = midy + ((blob.maxy - blob.avgy) * Config.overscan_coefficient) else: # Use partner's (blob) midpoints, and expand a proportion of minx, maxx, miny, maxy midx = self.avgx midy = self.avgy left_bound = midx - ((self.avgx - self.minx) * Config.overscan_coefficient) right_bound = midx + ((self.maxx - self.avgx) * Config.overscan_coefficient) down_bound = midy - ((self.avgy - self.miny) * Config.overscan_coefficient) up_bound = midy + ((self.maxy - self.avgy) * Config.overscan_coefficient) partner_subpixel_indeces = [] my_subpixel_indeces = [] for p_num, pixel in enumerate(blob.edge_pixels): pixel = Pixel.get(pixel) if left_bound <= pixel.x <= right_bound and down_bound <= pixel.y <= up_bound: partner_subpixel_indeces.append(p_num) for p_num, pixel in enumerate(self.edge_pixels): pixel = Pixel.get(pixel) if left_bound <= pixel.x <= right_bound and down_bound <= pixel.y <= up_bound: my_subpixel_indeces.append(p_num)
def _interact(self, command, *params): """ Make one client/server interaction. An IMAP4 interactions consist of: * Command (a client command) * Response (server data and a server completion result response) This function not only complete an IMAP4 interaction, but also interact with the response, which includes change status, etc. Args: command: The IMAP4 command. params: The parameter passed with command. Raise: InvalidCommandError: If a non-existing command is given or invaild state for the command is in. Returns: A tuple of three elements: tag, tagged response and untagged response. Tag and tagged response are strings, and untagged response is a list of string. """ # Send client command to IMAP4 server. command = command.upper() if command not in COMMANDS: raise InvalidCommandError('Command ' + command + ' dees not exists') if self.state not in COMMANDS[command][0]: raise InvalidCommandError('Command ' + command + ' is not available in ' + self.state + ' state') # Generate a different tag for each command. [2.2.1] # The tag is generated to be a random 6-bit hexadecimal value. tag = hex(random.randint(1048576, 16777215))[2:] params = ' ' + ' '.join(params) if len(params) > 0 else '' msg = tag + ' ' + command + params + CRLF self.sock.send(msg) printd('\n' + msg) # Receive server response. tagged_response = '' untagged_response = [] while 1: curr_tag, info = self._recv_line() # Decide action by type. if curr_tag == '*': # Add quoted string if literal. match = re.match(Literal, info) if match: size = match.group('size') # Read the literal and the tail. quoted = self.buffer.read(int(size)) + self.buffer.readline() printd(quoted) info += CRLF + quoted[:-2] untagged_response.append(info) elif curr_tag == '+': # [7.5] self._recv_line() elif curr_tag == tag: tagged_response = info break else: raise InvalidCommandError('Receive invalid tagged response') # Analysis and interact with server response. # Check response type. type, tagged_data = tagged_response.split(' ', 1) if type == 'BAD': raise InvalidCommandError(tagged_data) # Update current states. new_state = { 'OK': COMMANDS[command][1][0], 'NO': COMMANDS[command][1][1] }.get(type, None) if new_state != None: self.state = COMMANDS[command][1][0] printd('\n[current state swith to ' + self.state + ']\n') # Return response for further processing in higher level functions. return type, tagged_data, untagged_response
def RENAME(self, old_mailbox, new_mailbox): return self._interact('RENAME', old_mailbox, new_mailbox) #def SEARCH(self): pass def SELECT(self, mailbox): return self._interact('SELECT', mailbox) def SELECTACL(self, mailbox, who, what): return self._interact('SELECT', mailbox, who, what) def SETANNOTATION(self, *annotations): return self._interact('SETANNOTATION', *annotations) def SETQUOTA(self, root, limits): return self._interact('SETQUOTA', root, limits) #def SORT(self): pass def STATUS(self, mailbox, names): return self._interact('STATES', mailbox, names) #def STORE(self, messages, command, flags): pass def SUBSCRIBE(self, mailbox): return self._interact('SUBSCRIBE', mailbox) #def THREAD(): pass #def UID(self): pass def UNSUBSCRIBE(self, mailbox): return self._interact('UNSUBSCRIBE', mailbox) #def xatom(self): pass if __name__ == '__main__': """ The entry for debugging. """ imap_server = IMAPServer('fcspostoffice.cs.dal.ca') while 1: command = raw_input('\nENTER: ') command = command.split(' ') if command[0] == "!": # Test the function with token '!' at begining. printd('\n[call function ' + command[1].upper() + '(' + repr(command[2:])[1:-1] + ')]\n') getattr(imap_server, command[1].upper())(*command[2:]) else: # Direct test the usability of the command. imap_server._interact(command[0], *command[1:])
def bloom_b3ds(blob3dlist, stitch=False): allb2ds = [Blob2d.get(b2d) for b3d in blob3dlist for b2d in b3d.blob2ds] printl('\nProcessing internals of ' + str(len(allb2ds)) + ' 2d blobs via \'blooming\' ', end='') t_start_bloom = time.time() num_unbloomed = len(allb2ds) pb = ProgressBar(max_val=sum(len(b2d.pixels) for b2d in allb2ds), increments=50) for bnum, blob2d in enumerate(allb2ds): blob2d.gen_internal_blob2ds( ) # NOTE will have len 0 if no blooming can be done pb.update(len(blob2d.pixels), set_val=False ) # set is false so that we add to an internal counter pb.finish() print_elapsed_time(t_start_bloom, time.time(), prefix='took') printl('Before blooming there were: ' + str(num_unbloomed) + ' b2ds contained within b3ds, there are now ' + str(len(Blob2d.all))) # Setting possible_partners printl( 'Pairing all new blob2ds with their potential partners in adjacent slides' ) max_avail_depth = max(b2d.recursive_depth for b2d in Blob2d.all.values()) for cur_depth in range(max_avail_depth)[1:]: # Skip those at depth 0 depth = [ b2d.id for b2d in Blob2d.all.values() if b2d.recursive_depth == cur_depth ] max_h_d = max(Blob2d.all[b2d].height for b2d in depth) min_h_d = min(Blob2d.all[b2d].height for b2d in depth) ids_by_height = [[] for _ in range(max_h_d - min_h_d + 1)] for b2d in depth: ids_by_height[Blob2d.get(b2d).height - min_h_d].append(b2d) for height_val, h in enumerate( ids_by_height[:-1]): # All but the last one for b2d in h: b2d = Blob2d.all[b2d] b2d.set_possible_partners(ids_by_height[height_val + 1]) # Creating b3ds printl('Creating 3d blobs from the generated 2d blobs') all_new_b3ds = [] for depth_offset in range( max_avail_depth + 1 )[1:]: # Skip offset of zero, which refers to the b3ds which have already been stitched printd('Depth_offset: ' + str(depth_offset), Config.debug_blooming) new_b3ds = [] for b3d in blob3dlist: all_d1_with_pp_in_this_b3d = [] for b2d in b3d.blob2ds: # Note this is the alternative to storing b3dID with b2ds b2d = Blob2d.get(b2d) d_1 = [ blob for blob in b2d.getdescendants() if blob.recursive_depth == b2d.recursive_depth + depth_offset ] if len(d_1): for desc in d_1: if len(desc.possible_partners): all_d1_with_pp_in_this_b3d.append(desc.id) all_d1_with_pp_in_this_b3d = set(all_d1_with_pp_in_this_b3d) if len(all_d1_with_pp_in_this_b3d) != 0: printd(' Working on b3d: ' + str(b3d), Config.debug_blooming) printd( ' Len of all_d1_with_pp: ' + str(len(all_d1_with_pp_in_this_b3d)), Config.debug_blooming) printd(' They are: ' + str(all_d1_with_pp_in_this_b3d), Config.debug_blooming) printd( ' = ' + str( list( Blob2d.get(b2d) for b2d in all_d1_with_pp_in_this_b3d)), Config.debug_blooming) for b2d in all_d1_with_pp_in_this_b3d: b2d = Blob2d.get(b2d) printd( ' Working on b2d: ' + str(b2d) + ' with pp: ' + str(b2d.possible_partners), Config.debug_blooming) if b2d.b3did == -1: # unset cur_matches = [ b2d ] # NOTE THIS WAS CHANGED BY REMOVED .getdescendants() #HACK for pp in b2d.possible_partners: printd( " *Checking if pp:" + str(pp) + ' is in all_d1: ' + str(all_d1_with_pp_in_this_b3d), Config.debug_blooming) if pp in all_d1_with_pp_in_this_b3d: # HACK REMOVED printd(" Added partner: " + str(pp), Config.debug_blooming) cur_matches += [ Blob2d.get(b) for b in Blob2d.get(pp).getpartnerschain() ] if len(cur_matches) > 1: printd("**LEN OF CUR_MATCHES MORE THAN 1", Config.debug_blooming) new_b3d_list = [ blob.id for blob in set(cur_matches) if blob.recursive_depth == b2d.recursive_depth and blob.b3did == -1 ] if len(new_b3d_list): new_b3ds.append( Blob3d(new_b3d_list, r_depth=b2d.recursive_depth)) all_new_b3ds += new_b3ds printl(' Made a total of ' + str(len(all_new_b3ds)) + ' new b3ds') if stitch: # Set up shape contexts printl('Setting shape contexts for stitching') for b2d in [ Blob2d.all[b2d] for b3d in all_new_b3ds for b2d in b3d.blob2ds ]: b2d.set_shape_contexts(36) # Stitching printl('Stitching the newly generated 2d blobs') for b3d_num, b3d in enumerate(all_new_b3ds): printl(' Working on b3d: ' + str(b3d_num) + ' / ' + str(len(all_new_b3ds))) Pairing.stitch_blob2ds(b3d.blob2ds, debug=False) return all_new_b3ds