def captcha(self, path=None, fmt='JPEG'): """Create a captcha. Args: path: save path, default None. fmt: image format, PNG / JPEG. Returns: A tuple, (name, text, StringIO.value). For example: ('fXZJN4AFxHGoU5mIlcsdOypa', 'JGW9', '\x89PNG\r\n\x1a\n\x00\x00\x00\r...') """ image = Image.new('RGB', (self.width, self.height), (255, 255, 255)) image = self.background(image) image = self.text(image, self.fonts, drawings=['warp', 'rotate', 'offset']) image = self.curve(image) image = self.noise(image) image = self.smooth(image) name = "".join(random.sample(string.lowercase + string.uppercase + '3456789', 24)) text = "".join(self._text) try: import cStringIO as StringIO except ImportError: import StringIO out = StringIO() image.save(out, format=fmt) if path: image.save(os.path.join(path, name), fmt) return name, text, out.getvalue()
def savePicture(self, path): """Saves the picture blob to disk.""" buf = StringIO(self.picture) with open(path, 'w') as fd: buf.seek(0) shutil.copyfileobj(buf, fd) return buf
def generate_chunk(self, data): bufsize = 1024 io = StringIO(data) while True: buf = io.read(bufsize) if not buf: return yield buf io.close()
def embed_image_html(image): """Creates an image embedded in HTML base64 format.""" image_pil = Image.fromarray((255 * image).astype('uint8')) image_pil = image_pil.resize((256, 256)) string_buf = StringIO() image_pil.save(string_buf, format='png') import base64 data = base64.b64encode(string_buf.getvalue()).decode().replace('\n', '') return 'data:image/png;base64,' + data
def pycurl_request(self, meth, abs_url, headers, params): s = StringIO() rheader = StringIO() curl = pycurl.Curl() meth = meth.lower() if meth == 'get': curl.setopt(pycurl.HTTPGET, 1) # TODO: maybe be a bit less manual here if params: abs_url = '%s?%s' % (abs_url, self.urlencode(params)) elif meth in ['post', 'patch']: curl.setopt(pycurl.POST, 1) curl.setopt(pycurl.POSTFIELDS, self.jsonencode(params)) headers['Content-Type'] = 'application/json' elif meth == 'delete': curl.setopt(pycurl.CUSTOMREQUEST, 'DELETE') if params: raise APIConnectionError( "Did not expect params in DELETE request") else: raise APIConnectionError( 'Unrecognized HTTP method %r. This may indicate a bug in the Clever bindings. Please contact [email protected] for assistance.' % (meth, )) # pycurl doesn't like unicode URLs abs_url = self._utf8(abs_url) curl.setopt(pycurl.URL, abs_url) curl.setopt(pycurl.WRITEFUNCTION, s.write) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.TIMEOUT, 80) curl.setopt(pycurl.HTTPHEADER, ['%s: %s' % (k, v) for k, v in six.iteritems(headers)]) curl.setopt(pycurl.HEADERFUNCTION, rheader.write) if verify_ssl_certs: curl.setopt(pycurl.CAINFO, CLEVER_CERTS) else: curl.setopt(pycurl.SSL_VERIFYHOST, False) try: curl.perform() except pycurl.error as e: self.handle_pycurl_error(e) return { 'body': s.getvalue(), 'headers': rheader.getvalue(), 'code': curl.getinfo(pycurl.RESPONSE_CODE) }
def _dump_pattern(input_file, destination, call_method): # type: (Union[str, BinaryIO], str, Callable) -> Union[None, str] if destination == 'stdout': call_method(input_file, destination=sys.stdout) elif destination == 'string': out = StringIO() call_method(input_file, destination=out) value = out.getvalue() out.close() # free the buffer return value else: the_out_file = _create_default_output_file( input_file) if destination == 'default' else destination with open(the_out_file, 'w') as fi: call_method(input_file, destination=fi)
def embed_image_html(image): """Creates an image embedded in HTML base64 format.""" image_pil = Image.fromarray((255 * image).astype('uint8')) if sys.version_info.major == 2: string_buf = StringIO.StringIO() image_pil.save(string_buf, format='png') data = string_buf.getvalue().encode('base64').replace('\n', '') else: _buf = BytesIO() image_pil.save(_buf, format='png') _buf.seek(0) b64_buf = base64.b64encode(_buf.getvalue()) string_buf = StringIO(b64_buf.decode('utf-8', errors='replace')) data = string_buf.getvalue().replace('\n', '') return 'data:image/png;base64,' + data
def runTest(self): output = snudown.markdown(self.input) for i, (a, b) in enumerate(zip(repr(self.expected_output), repr(output))): if a != b: try: io = StringIO.StringIO() except: io = StringIO() print("TEST FAILED:", file=io) print(" input: %s" % repr(self.input), file=io) print(" expected: %s" % repr(self.expected_output), file=io) print(" actual: %s" % repr(output), file=io) print(" %s" % (' ' * i + '^'), file=io) self.fail(io.getvalue())
def write_xls(file_name, sheet_name, headings, data, heading_xf, data_xfs, kinds): book = xlwt.Workbook(encoding='utf8') sheet = book.add_sheet(sheet_name) rowx = 0 for colx, value in enumerate(headings): sheet.write(rowx, colx, value, heading_xf) sheet.set_panes_frozen(True) # frozen headings instead of split panes sheet.set_horz_split_pos(rowx+1) # in general, freeze after last heading row sheet.set_remove_splits(True) # if user does unfreeze, don't leave a split there sheet.set_col_default_width(True) color_charts = {} for state, color in state_color_map.iteritems() : color_charts[state] = xlwt.easyxf("""font: height 180, name Times New Roman, colour_index %s, bold on; align: wrap on, vert centre, horiz center """ % color) for row in data: rowx += 1 for colx, value in enumerate(row) : style = data_xfs[colx] if file_name == "requisitions" and kinds[colx] != 'money' : state = row[-1] style = color_charts[state] sheet.write(rowx, colx, (value not in ('None', '0.00') and value) or '', style) # book.save(file_name) # ene hesgiig huulav from StringIO import StringIO result = StringIO() book.save(result) result.seek(0) response = HttpResponse(result.read(), mimetype='application/ms-excel') if file_name: response['Content-Disposition'] = 'attachment; filename='+file_name+".xls" else: response['Content-Disposition'] = 'attachment; filename=export.xls' return response
def coerce_response(response): if isinstance(response, basestring): response = HTTPResponse(HTTPPreamble(headers={'Content-Type': 'text/html'}), body=response) elif isinstance(response, dict): response = HTTPResponse(HTTPPreamble(headers={'Content-Type': 'application/json'}), body=json.dumps(response)) elif iselement(response): xml = StringIO() ElementTree(response).write(xml) response = HTTPResponse(HTTPPreamble(headers={'Content-Type': 'application/xml'}), body=xml.getvalue()) return response
def pycurl_request(self, meth, abs_url, headers, params): s = StringIO() rheader = StringIO() curl = pycurl.Curl() meth = meth.lower() if meth == 'get': curl.setopt(pycurl.HTTPGET, 1) # TODO: maybe be a bit less manual here if params: abs_url = '%s?%s' % (abs_url, self.urlencode(params)) elif meth in ['post', 'patch']: curl.setopt(pycurl.POST, 1) curl.setopt(pycurl.POSTFIELDS, self.jsonencode(params)) headers['Content-Type'] = 'application/json' elif meth == 'delete': curl.setopt(pycurl.CUSTOMREQUEST, 'DELETE') if params: raise APIConnectionError("Did not expect params in DELETE request") else: raise APIConnectionError( 'Unrecognized HTTP method %r. This may indicate a bug in the Clever bindings. Please contact [email protected] for assistance.' % (meth, )) # pycurl doesn't like unicode URLs abs_url = self._utf8(abs_url) curl.setopt(pycurl.URL, abs_url) curl.setopt(pycurl.WRITEFUNCTION, s.write) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 30) curl.setopt(pycurl.TIMEOUT, 80) curl.setopt(pycurl.HTTPHEADER, ['%s: %s' % (k, v) for k, v in six.iteritems(headers)]) curl.setopt(pycurl.HEADERFUNCTION, rheader.write) if verify_ssl_certs: curl.setopt(pycurl.CAINFO, CLEVER_CERTS) else: curl.setopt(pycurl.SSL_VERIFYHOST, False) try: curl.perform() except pycurl.error as e: self.handle_pycurl_error(e) return {'body': s.getvalue(), 'headers': rheader.getvalue(), 'code': curl.getinfo(pycurl.RESPONSE_CODE)}
def dump_nitf_file(file_name, dest, over_write=True): """ Utility to dump the NITF header and various subheader details to a configurable destination. Parameters ---------- file_name : str|BinaryIO The path to or file-like object containing a NITF 2.1 or 2.0 file. dest : str 'stdout', 'string', 'default' (will use `file_name+'.header_dump.txt'`), or the path to an output file. over_write : bool If `True`, then overwrite the destination file, otherwise append to the file. Returns ------- None|str There is only a return value if `dest=='string'`. """ if dest == 'stdout': print_nitf(file_name, dest=sys.stdout) return if dest == 'string': out = StringIO() print_nitf(file_name, dest=out) value = out.getvalue() out.close() # free the buffer return value the_out_file = _create_default_output_file( file_name) if dest == 'default' else dest if not os.path.exists(the_out_file) or over_write: with open(the_out_file, 'w') as the_file: print_nitf(file_name, dest=the_file) else: with open(the_out_file, 'a') as the_file: print_nitf(file_name, dest=the_file)
def _fix_chunked_encoding(self): from StringIO import StringIO if self.headers.get('transfer-encoding', '') != 'chunked': return full_data = "" while True: current_size = self._read_chunk_size() if current_size == 0: break full_data += self.rfile.read(current_size) self.rfile.read(2) # CRLF after chunk self.rfile = StringIO(full_data) self.headers['content-length'] = str(len(full_data)) return full_data
def docker_pull(self, namespace, repos): # Test pull # Docker -> Index resp = requests.get('{0}/v1/repositories/{1}/{2}/images'.format( self.index_endpoint, namespace, repos), auth=tuple(self.user_credentials), headers={'X-Docker-Token': 'true'}) self.assertEqual(resp.status_code, 200) token = resp.headers.get('x-docker-token') # Here we should use the 'X-Endpoints' returned in a real environment # Docker -> Registry resp = requests.get('{0}/v1/repositories/{1}/{2}/tags/latest'.format( self.registry_endpoint, namespace, repos), headers={'Authorization': 'Token ' + token}) self.assertEqual(resp.status_code, 200, resp.text) self.cookies = resp.cookies # Docker -> Registry image_id = json.loads(resp.text) resp = requests.get('{0}/v1/images/{1}/ancestry'.format( self.registry_endpoint, image_id), cookies=self.cookies) self.update_cookies(resp) self.assertEqual(resp.status_code, 200, resp.text) ancestry = json.loads(resp.text) # We got the ancestry, let's fetch all the images there for image_id in ancestry: json_data, checksum, blob = self.fetch_image(image_id) # check queried checksum and local computed checksum from the image # are the same tmpfile = StringIO() tmpfile.write(blob) tmpfile.seek(0) computed_checksum = checksums.compute_simple(tmpfile, json_data) tmpfile.close() self.assertEqual(checksum, computed_checksum) # Remove image tags resp = requests.delete('{0}/v1/repositories/{1}/{2}/tags'.format( self.registry_endpoint, namespace, repos), cookies=self.cookies) self.assertEqual(resp.status_code, 200, resp.text) self.update_cookies(resp) # Remove image_id, then parent_id store = storage.load() store.remove(os.path.join(store.images, self.image_id)) store.remove(os.path.join(store.images, self.parent_id))
#!/usr/bin/env python # # fits_hrd2txt.py # # # Created by Danny Jacobs on 9/8/10. # PAPER Project # import aipy as a, numpy as n, pylab as p,math as m import sys, optparse, pyfits as pf import cStringIO as StringIO o = optparse.OptionParser() opts, args = o.parse_args(sys.argv[1:]) for file in args: hdulist = pf.open(file) # outfile = file[:-len('.fits')]+'.txt' outfile = StringIO('') hdulist[0].header.toTxtFile(outfile) print ''.join(outfile.lines())
class SOAPRequestHandler(BaseSOAPRequestHandler): '''SOAP handler. ''' def _read_chunk_size(self): current_size = "" while '\n' not in current_size: current_size += self.rfile.read(1) current_size = int(current_size, 16) return current_size def _fix_chunked_encoding(self): from StringIO import StringIO if self.headers.get('transfer-encoding', '') != 'chunked': return full_data = "" while True: current_size = self._read_chunk_size() if current_size == 0: break full_data += self.rfile.read(current_size) self.rfile.read(2) # CRLF after chunk self.rfile = StringIO(full_data) self.headers['content-length'] = str(len(full_data)) return full_data def do_POST(self): '''The POST command. action -- SOAPAction(HTTP header) or wsa:Action(SOAP:Header) ''' self._fix_chunked_encoding() logger.debug("Request Host: {}".format(self.client_address)) logger.debug("Request URI: {}".format(self.requestline)) for key, value in self.headers.items(): logger.debug("Request Header: {}: {}".format(key, value)) content_type = self.headers.get("content-type", '') action_matchobj = re.search("action=\"(urn:\w+)\"", content_type) if action_matchobj is not None: # SOAP 1.2 soapAction = action_matchobj.group(1) else: # SOAP 1.1 soapAction = self.headers.getheader('SOAPAction') if soapAction: soapAction = soapAction.strip('\'"') self._soapAction = soapAction post = self.path if not post: raise PostNotSpecified, 'HTTP POST not specified in request' post = post.strip('\'"') try: ct = self.headers['content-type'] if ct.startswith('multipart/'): cid = resolvers.MIMEResolver(ct, self.rfile) xml = cid.GetSOAPPart() ps = ParsedSoap(xml, resolver=cid.Resolve) else: length = int(self.headers['content-length']) xml = self.rfile.read(length) logger.debug("Request Body: {}".format(xml)) ps = ParsedSoap(xml) except ParseException, e: self.send_fault(FaultFromZSIException(e)) except Exception, e: # Faulted while processing; assume it's in the header. self.send_fault(FaultFromException(e, 1, sys.exc_info()[2]))
def write_POSCAR(poscar, filename): """ Write the contents of poscar to filename. """ global hashes f = StringIO() f.write("1.0\n") for i in range(3): f.write("{0[0]:>20.15f} {0[1]:>20.15f} {0[2]:>20.15f}\n".format( (poscar["lattvec"][:, i]).tolist())) f.write("{0}\n".format(" ".join(poscar["elements"]))) f.write("{0}\n".format(" ".join([str(i) for i in poscar["numbers"]]))) f.write("Direct\n") for i in range(poscar["positions"].shape[1]): f.write("{0[0]:>20.15f} {0[1]:>20.15f} {0[2]:>20.15f}\n".format( poscar["positions"][:, i].tolist())) if hashes: header = hashlib.sha1(f.getvalue().encode()).hexdigest() else: header = filename with open(filename, "w") as finalf: finalf.write("{0}\n".format(header)) finalf.write(f.getvalue()) f.close()
def main(args): hostname = socket.gethostname() blockList = get_transmit_receive() order = sorted([blockList[key]['pid'] for key in blockList]) order = set(order) nPID = len(order) scr = curses.initscr() curses.noecho() curses.cbreak() scr.keypad(1) scr.nodelay(1) size = scr.getmaxyx() std = curses.A_NORMAL rev = curses.A_REVERSE poll_interval = 1.0 tLastPoll = 0.0 try: sel = 0 while True: t = time.time() ## Interact with the user c = scr.getch() curses.flushinp() if c == ord('q'): break elif c == curses.KEY_UP: sel -= 1 elif c == curses.KEY_DOWN: sel += 1 ## Find the current selected process and see if it has changed newSel = min([nPID - 1, max([0, sel])]) if newSel != sel: tLastPoll = 0.0 sel = newSel ## Do we need to poll the system again? if t - tLastPoll > poll_interval: ## Save what we had before prevList = blockList ## Find all running processes pidDirs = glob.glob(os.path.join(BIFROST_STATS_BASE_DIR, '*')) pidDirs.sort() ## Load the data blockList = get_transmit_receive() ## Sort order = sorted([blockList[key]['pid'] for key in blockList]) order = list(set(order)) nPID = len(order) ## Stats stats = get_statistics(blockList, prevList) ## Mark tLastPoll = time.time() ## Clear act = None ## For sel to be valid - this takes care of any changes between when ## we get what to select and when we polled the bifrost logs sel = min([nPID - 1, sel]) ## Display k = 0 ### General - selected try: output = ' PID: %i on %s' % (order[sel], hostname) except IndexError: output = ' PID: n/a on %s' % (hostname, ) output += ' ' * (size[1] - len(output) - len(os.path.basename(__file__)) - 1) output += os.path.basename(__file__) + ' ' output += '\n' k = _add_line(scr, k, 0, output, std) ### General - header k = _add_line(scr, k, 0, ' ', std) output = '%6s %9s %6s %9s %6s' % ( 'PID', 'RX Rate', 'RX #/s', 'TX Rate', 'TX #/s') output += ' ' * (size[1] - len(output)) output += '\n' k = _add_line(scr, k, 0, output, rev) ### Data for o in order: curr = stats[o] if o == order[sel]: act = curr drateR, prateR = curr['rx']['drate'], curr['rx']['prate'] drateR, drateuR = _set_units(drateR) drateT, prateT = curr['tx']['drate'], curr['tx']['prate'] drateT, drateuT = _set_units(drateT) output = '%6i %7.2f%2s %6i %7.2f%2s %6i\n' % ( o, drateR, drateuR, prateR, drateT, drateuT, prateT) try: if o == order[sel]: sty = std | curses.A_BOLD else: sty = std except IndexError: sty = std k = _add_line(scr, k, 0, output, sty) if k > size[0] - 9: break while k < size[0] - 9: output = ' ' k = _add_line(scr, k, 0, output, std) ### Details of selected output = 'Details - %8s %19s %19s' % ( stats['updated'].strftime("%H:%M:%S"), 'RX', 'TX') output += ' ' * (size[1] - len(output)) output += '\n' k = _add_line(scr, k, 0, output, rev) if act is not None: output = 'Good: %18iB %18iB\n' % ( act['rx']['good'], act['tx']['good']) k = _add_line(scr, k, 0, output, std) output = 'Missing: %18iB %18iB\n' % ( act['rx']['missing'], act['tx']['missing']) k = _add_line(scr, k, 0, output, std) output = 'Invalid: %18iB %18iB\n' % ( act['rx']['invalid'], act['tx']['invalid']) k = _add_line(scr, k, 0, output, std) output = 'Late: %18iB %18iB\n' % ( act['rx']['late'], act['tx']['late']) k = _add_line(scr, k, 0, output, std) output = 'Global Missing: %18.2f%% %18.2f%%\n' % ( act['rx']['gloss'], act['tx']['gloss']) k = _add_line(scr, k, 0, output, std) output = 'Current Missing: %18.2f%% %18.2f%%\n' % ( act['rx']['closs'], act['tx']['closs']) k = _add_line(scr, k, 0, output, std) output = 'Command: %s' % act['cmd'] k = _add_line(scr, k, 0, output[:size[1]], std) ### Clear to the bottom scr.clrtobot() ### Refresh scr.refresh() ## Sleep time.sleep(_REDRAW_INTERVAL_SEC) except KeyboardInterrupt: pass except Exception as error: exc_type, exc_value, exc_traceback = sys.exc_info() fileObject = StringIO() traceback.print_tb(exc_traceback, file=fileObject) tbString = fileObject.getvalue() fileObject.close() scr.keypad(0) curses.echo() curses.nocbreak() curses.endwin() try: print("%s: failed with %s at line %i" % (os.path.basename(__file__), str(error), traceback.tb_lineno(exc_traceback))) for line in tbString.split('\n'): print(line) except NameError: pass
def loadUiFile(uiPath): """ Load a designer UI xml file in :param uiPath: Path to UI file. ``uiPath`` be a partial path relative to the file calling :py:func:`.loadUiFile`. It is also not necessary to include the `.ui` extension. :type uiPath: str :return: Window Class defined by the input UI file :rtype: :py:class:`.DesignerForm` """ #Add extension if missing.. if not uiPath.endswith('.ui'): uiPath += '.ui' if not os.path.isfile(uiPath): #Resolve partial path into full path based on the call stack frame = inspect.currentframe( ).f_back #Back up one from the current frame modpath = frame.f_code.co_filename #Grab the filename from the code object base_directory = os.path.dirname(modpath) resolvePath = os.path.join(base_directory, uiPath) if os.path.isfile(resolvePath): uiPath = resolvePath else: raise ValueError('Could not locate UI file at path: %s' % uiPath) #Load the form class, and establish what the base class for the top level is in order to sub-class it if qt_lib == 'pyqt': #This step is easy with PyQt with open(uiPath, 'r') as f: form_class, base_class = uic.loadUiType(f) elif qt_lib == 'pyside': """ Pyside lacks the "loadUiType" command :( so we have to convert the ui file to py code in-memory first and then execute it in a special frame to retrieve the form_class. """ parsed = xml.parse(uiPath) widget_class = parsed.find('widget').get('class') form_class = parsed.find('class').text with open(uiPath, 'r') as f: o = StringIO() frame = {} #Compile to StringIO object uic.compileUi(f, o, indent=0) pyc = compile(o.getvalue(), '<string>', 'exec') exec pyc in frame #Fetch the base_class and form class based on their type in the xml from designer form_class = frame['Ui_%s' % form_class] base_class = eval('QtGui.%s' % widget_class) class WindowClass(form_class, base_class, DesignerForm): pass WindowClass._appName = uiPath WindowClass._uiPath = uiPath WindowClass.ensurePolished = DesignerForm.ensurePolished return WindowClass
def getStatusText(self): sout = StringIO() if self.mStatus in (definitions.eStatus_VToDo_NeedsAction, definitions.eStatus_VToDo_InProcess): if self.hasEnd(): # Check due date today = DateTime() today.setToday() if self.getEnd() > today: sout.append("Due: ") whendue = self.getEnd() - today if (whendue.getDays() > 0) and (whendue.getDays() <= 7): sout.write(whendue.getDays()) sout.write(" days") else: sout.write(self.getEnd().getLocaleDate( DateTime.NUMERICDATE)) elif self.getEnd() == today: sout.write("Due today") else: sout.write("Overdue: ") overdue = today - self.getEnd() if overdue.getWeeks() != 0: sout.write(overdue.getWeeks()) sout.write(" weeks") else: sout.write(overdue.getDays() + 1) sout.write(" days") else: sout.write("Not Completed") elif self.mStatus == definitions.eStatus_VToDo_Completed: if self.hasCompleted(): sout.write("Completed: ") sout.write(self.getCompleted().getLocaleDate( DateTime.NUMERICDATE)) else: sout.write("Completed") elif definitions.eStatus_VToDo_Cancelled: sout.write("Cancelled") return sout.toString()
def stream_csv(data): sio = StringIO() writer = csv.writer(sio) writer.writerow(data) return sio.getvalue()
def __init__(self, handle): '''Initialize this C3D file by reading header and parameter data. Arguments --------- handle : file handle Read metadata and C3D motion frames from the given file handle. This handle is assumed to be `seek`-able and `read`-able. The handle must remain open for the life of the `Reader` instance. The `Reader` does not `close` the handle. Raises ------ ValueError, if the processor metadata in the C3D file is anything other than 84 (Intel format) or 85 (DEC format). ''' super(Reader, self).__init__(Header(handle)) self._handle = handle self._handle.seek((self.header.parameter_block - 1) * 512) # metadata header buf = self._handle.read(4) _, _, parameter_blocks, processor = struct.unpack('BBBB', buf) if processor != PROCESSOR_INTEL: raise ValueError( 'we only read Intel C3D files (got processor {})'.format( processor)) # read all parameter blocks as a single chunk to avoid block # boundary issues. bytes = self._handle.read(512 * parameter_blocks - 4) while bytes: buf = FileIO(bytes) chars_in_name, group_id = struct.unpack('bb', buf.read(2)) if group_id == 0 or chars_in_name == 0: # we've reached the end of the parameter section. break name = buf.read(abs(chars_in_name)).upper() offset_to_next, = struct.unpack('<h', buf.read(2)) if group_id > 0: # we've just started reading a parameter. if its group doesn't # exist, create a blank one. add the parameter to the group. self.setdefault(group_id, Group()).add_param(name, handle=buf) else: # we've just started reading a group. if a group with the # appropriate id exists already (because we've already created # it for a parameter), just set the name of the group. # otherwise, add a new group. group_id = abs(group_id) size, = struct.unpack('B', buf.read(1)) desc = size and buf.read(size) or '' group = self.get(group_id) if group is not None: group.name = name group.desc = desc self[name] = group else: self.add_group(group_id, name, desc) bytes = bytes[2 + abs(chars_in_name) + offset_to_next:] self.check_metadata()
def getStatusText(self): sout = StringIO() if self.mStatus in (definitions.eStatus_VToDo_NeedsAction, definitions.eStatus_VToDo_InProcess): if self.hasEnd(): # Check due date today = DateTime() today.setToday() if self.getEnd() > today: sout.append("Due: ") whendue = self.getEnd() - today if (whendue.getDays() > 0) and (whendue.getDays() <= 7): sout.write(whendue.getDays()) sout.write(" days") else: sout.write(self.getEnd().getLocaleDate(DateTime.NUMERICDATE)) elif self.getEnd() == today: sout.write("Due today") else: sout.write("Overdue: ") overdue = today - self.getEnd() if overdue.getWeeks() != 0: sout.write(overdue.getWeeks()) sout.write(" weeks") else: sout.write(overdue.getDays() + 1) sout.write(" days") else: sout.write("Not Completed") elif self.mStatus == definitions.eStatus_VToDo_Completed: if self.hasCompleted(): sout.write("Completed: ") sout.write(self.getCompleted().getLocaleDate(DateTime.NUMERICDATE)) else: sout.write("Completed") elif definitions.eStatus_VToDo_Cancelled: sout.write("Cancelled") return sout.toString()