Example #1
0
	def expand_macro(self, formatter, name, args):
		if args is None:
			return "No diagram text defined!"

		diagramText = str(args).strip()
		
		# First, encode the data.
		data = urllib.urlencode({"message" : diagramText, "style" : self.style, "paginate" : 0, "paper" : "letter", "landscape" : 0, "page" : 1, "format" : "png"})
		# Now get that file-like object again, remembering to mention the data.
		f = urllib.urlopen("http://www.websequencediagrams.com/index.php", data)
		# Read the results back.
		s = f.read()
		f.close()
		
		s = s[1:-1]
		seqargs = s.split(",")
		locargs = seqargs[0].split(":")
		loc = locargs[1].strip()
		loc = loc[1:-1]
		
		s = StringIO()
		s.write("{{{\n")
		s.write("#!html\n")
		s.write("<img src='http://www.websequencediagrams.com/%s'>" % loc)
		s.write("\n}}}\n")
		v = s.getvalue()
		s.close()
		
		return wiki_to_html(v, self.env, formatter.req)
    def getSavedFormInputForEdit(self, header=False, delimiter=','):
        """ returns saved as CSV text """
        sbuf = StringIO()
        writer = csvwriter(sbuf, delimiter=delimiter)
        names = self.getColumnNames()
        titles = self.getColumnTitles()

        if header:
            encoded_titles = []
            for t in titles:
                if isinstance(t, unicode):
                    t = t.encode('utf-8')
                encoded_titles.append(t)
            writer.writerow(encoded_titles)
        for row in self.getSavedFormInput():
            def get_data(row, i):
                data = row.get(i, '')
                if self._is_file_data(data):
                    return data.filename
                if isinstance(data, unicode):
                    return data.encode('utf-8')
                return data
            writer.writerow([get_data(row, i) for i in names])
        res = sbuf.getvalue()
        sbuf.close()
        return res
Example #3
0
def _export_dashboard(request, dashboard_id):
    """
    Exports dashboard as multi-sheet Excel workbook.

    This is a helper method for export_dashboard and export_shared_dashboard
    below. Renders an export without requiring login, should not be exposed
    directly via a URL pattern.
    """
    dashboard = Dashboard.objects.get(id=dashboard_id)

    stream = StringIO()
    workbook = xlwt.Workbook()

    for metric in dashboard.metric_set.all():
        metric.export(workbook)

    workbook.save(stream)

    response = HttpResponse(
        stream.getvalue(),
        mimetype='application/vnd.ms-excel'
    )
    response['Content-Disposition'] = 'attachment; filename="%s-%s.xls"' \
        % (slugify(dashboard.name), date.today())
    stream.close()
    return response
Example #4
0
 def close(self):
     if self.mode == 'w':
         disposition = 'attachment; filename="%s"' % self.filename
         self.hosted_file._root._browser.put(
             self.url, self.getvalue(),
             self.content_type, {'Content-Disposition' : disposition})
     StringIO.close(self)
Example #5
0
def dump_fixtures(request):
    output = StringIO()

    fixture = request.GET.get('fixture', None)

    try:
        if fixture:
            call_command('dumpdata', fixture, '--indent=2', stdout=output)
        else:
            call_command('dumpdata', '--indent=2', stdout=output)

        data = output.getvalue()
        output.close()

        if fixture:
            file_label = 'fixtures_%s_%s' % (fixture, datetime.datetime.now().strftime('%d-%b-%Y_%H-%M'))
        else:
            file_label = 'fixtures_all_%s' % datetime.datetime.now().strftime('%d-%b-%Y_%H-%M')
        response = HttpResponse(data, content_type="application/json")
        response['Content-Disposition'] = 'attachment; filename=%s' % file_label
        return response
    except:
        dest =  request.META.get('HTTP_REFERER', '/')
        messages.info(request, 'Fixture name not recognized: %s' % fixture)
        return HttpResponseRedirect(dest)
 def __init__(self, config):
     stream = StringIO()
     stream.write("Current platform settings:\n")
     configfile.pretty_print(config.get_all_platforms(), stream)
     self.description = stream.getvalue()
     stream.close()
     self.config = config
Example #7
0
 def write_out(self, fde):
     """
     Write out full packet chain
     """
     # TODO: impl is just outright terrible.
     # Fix it in any way shape or form i don't care
     sio = StringIO()
     seq_id = self.start_seq_id
     net_total_written = 0
     total_written = 0
     last_total_written = 0xffffff
     for label, field in self.fields:
         written = field.write_out(sio, label='\t%s'  % label)
         total_written += written
         net_total_written += written
         if total_written >= 0xffffff:
             self._write_packet_header(0xffffff, seq_id, fde)
             fde.write(sio.read(0xffffff))
             remaining_bytes = sio.read()
             sio.close()
             sio = StringIO(remaining_bytes)
             last_total_written = total_written
             total_written -= 0xffffff
             seq_id += 1
     if last_total_written == 0xffffff:
         self._write_packet_header(total_written, seq_id, fde)
         sio.seek(0)
         fde.write(sio.read(total_written))
         net_total_written += total_written
     return (net_total_written, seq_id)
Example #8
0
class WebDAVFile(File):
    def __init__(self, name, storage, mode):
        self._name = name
        self._storage = storage
        self._mode = mode
        self._is_dirty = False
        self.file = StringIO()
        self._is_read = False

    @property
    def name(self):
        return self._name

    @property
    def size(self):
        if not hasattr(self, '_size'):
            self._size = self._storage.size(self._name)
        return self._size

    def read(self, num_bytes=None):
        if not self._is_read:
            self.file = self._storage._read(self._name)
            self._is_read = True

        return self.file.read(num_bytes)

    def write(self, content):
        if 'w' not in self._mode:
            raise AttributeError("File was opened for read-only access.")
        self.file = StringIO(content)
        self._is_dirty = True
        self._is_read = True

    def close(self):
        self.file.close()
Example #9
0
    def get(self,filename):
        name,sep,ext = filename.rpartition(".")
        if not sep:
            img_name = ext
        else:
            img_name = name
        try:
            img_file = self.fs.get_version(filename=img_name)
            img = img_file.read()
        except gridfs.errors.NoFile:
            raise tornado.web.HTTPError(500, 'image is not found ')
    
        resize = self.get_argument('_re', None)
        if resize :
            width, resep, height = resize.rpartition("x")
            output = StringIO()
            output.write(img)
            output.seek(0)
            im = Image.open(output)
            format = im.format
#            size = im.size
#            logging.info("format is %s ,size is %s" %(format,size))
            im = im.resize((int(width),int(height)), Image.ANTIALIAS)
            tmp = StringIO()
            im.save(tmp, format)
            img = tmp.getvalue()
            tmp.close()
            output.close()

        self.add_header('Content-Type',img_file.content_type)
        self.write(img)
        self.finish()
Example #10
0
 def test_packAndUnpackVerySmallIBMFloats(self):
     """
     The same test as test_packAndUnpackIBMFloat just for small numbers
     because they might suffer more from the inaccuracies.
     """
     # Some random seeds.
     seeds = [123, 1592, 4482, 601, 1, 783, 6849]
     endians = ['<', '>']
     # Loop over all combinations.
     for seed in seeds:
         # Generate 50000 random floats from -10000 to +10000.
         np.random.seed(seed)
         data = 1E-5 * np.random.ranf(50000)
         # Convert to float64 in case native floats are different to be
         # able to utilize double precision.
         data = np.require(data, 'float64')
         # Loop over little and big endian.
         for endian in endians:
             # Pack.
             f = StringIO()
             DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
             # Jump to beginning and read again.
             f.seek(0, 0)
             new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](f,
                                     len(data), endian)
             f.close()
             # A relative tolerance of 1E-6 is considered good enough.
             rms1 = rms(data, new_data)
             self.assertEqual(True, rms1 < 1E-6)
Example #11
0
 def test_packAndUnpackIBMSpecialCases(self):
     """
     Tests the packing and unpacking of several powers of 16 which are
     problematic because they need separate handling in the algorithm.
     """
     endians = ['>', '<']
     # Create the first 10 powers of 16.
     data = []
     for i in xrange(10):
         data.append(16 ** i)
         data.append(-16 ** i)
     data = np.array(data)
     # Convert to float64 in case native floats are different to be
     # able to utilize double precision.
     data = np.require(data, 'float64')
     # Loop over little and big endian.
     for endian in endians:
         # Pack.
         f = StringIO()
         DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
         # Jump to beginning and read again.
         f.seek(0, 0)
         new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](f,
                                 len(data), endian)
         f.close()
         # Test both.
         np.testing.assert_array_equal(new_data, data)
Example #12
0
    def CharacterDataHandler(self, data):

        if self.write_to == 'Name':
            data = data.strip()
            self.nvpair.name = data
        elif self.write_to == 'Value':
            data = data.strip()
            self.nvpair.value = data
        elif self.write_to == 'DataSpace':
            data = data.strip()
            self.coordsys.dataspace = xform_codes.code[data]
        elif self.write_to == 'TransformedSpace':
            data = data.strip()
            self.coordsys.xformspace = xform_codes.code[data]
        elif self.write_to == 'MatrixData':
            # conversion to numpy array
            from StringIO import StringIO
            c = StringIO(data)
            self.coordsys.xform = np.loadtxt(c)
            c.close()
        elif self.write_to == 'Data':
            da_tmp = self.img.darrays[-1]
            da_tmp.data = read_data_block(da_tmp.encoding, da_tmp.endian, \
                                          da_tmp.ind_ord, da_tmp.datatype, \
                                          da_tmp.dims, data)
        elif self.write_to == 'Label':
            self.label.label = data.strip()
Example #13
0
 def test_packAndUnpackIBMFloat(self):
     """
     Packing and unpacking IBM floating points might yield some inaccuracies
     due to floating point rounding errors.
     This test tests a large number of random floating point numbers.
     """
     # Some random seeds.
     seeds = [1234, 592, 459482, 6901, 0, 7083, 68349]
     endians = ['<', '>']
     # Loop over all combinations.
     for seed in seeds:
         # Generate 50000 random floats from -10000 to +10000.
         np.random.seed(seed)
         data = 200000.0 * np.random.ranf(50000) - 100000.0
         # Convert to float64 in case native floats are different to be
         # able to utilize double precision.
         data = np.require(data, 'float64')
         # Loop over little and big endian.
         for endian in endians:
             # Pack.
             f = StringIO()
             DATA_SAMPLE_FORMAT_PACK_FUNCTIONS[1](f, data, endian)
             # Jump to beginning and read again.
             f.seek(0, 0)
             new_data = DATA_SAMPLE_FORMAT_UNPACK_FUNCTIONS[1](f,
                                     len(data), endian)
             f.close()
             # A relative tolerance of 1E-6 is considered good enough.
             rms1 = rms(data, new_data)
             self.assertEqual(True, rms1 < 1E-6)
Example #14
0
def readFile():
    global curId

    script = StringIO(QuestScripts.SCRIPT)

    def readLine():
        return script.readline().replace('\r', '')

    gen = tokenize.generate_tokens(readLine)
    line = getLineOfTokens(gen)

    while line is not None:

        if line == []:
            line = getLineOfTokens(gen)
            continue

        if line[0] == 'ID':
            parseId(line)
        elif curId is None:
            notify.error('Every script must begin with an ID')
        else:
            lineDict[curId].append(line)

        line = getLineOfTokens(gen)

    script.close()
Example #15
0
    def get_results(self):
        """Get analysis results.
        @return: data.
        """
        root = self._get_root(container="cuckoo", create=False)

        if not os.path.exists(root):
            return False

        zip_data = StringIO()
        zip_file = ZipFile(zip_data, "w", ZIP_DEFLATED)

        root_len = len(os.path.abspath(root))
        
        for root, dirs, files in os.walk(root):
            archive_root = os.path.abspath(root)[root_len:]
            for name in files:
                path = os.path.join(root, name)
                archive_name = os.path.join(archive_root, name)
                zip_file.write(path, archive_name, ZIP_DEFLATED)
        
        zip_file.close()
        data = xmlrpclib.Binary(zip_data.getvalue())
        zip_data.close()

        return data
Example #16
0
    def put(self, path, v, wait='short'):

        c = self.con
        filesize = len(v)
        f = StringIO(v)

        c.setopt(pycurl.INFILESIZE, filesize)
        c.setopt(pycurl.PUT, 1)
        c.setopt(pycurl.READFUNCTION, FileReader(f).read_callback)

        # For the TIMEOUT, it will depends if we are waiting for a long query or not
        # long:data_timeout, like for huge broks receptions
        # short:timeout, like for just "ok" connection
        if wait == 'short':
            c.setopt(c.TIMEOUT, self.timeout)
        else:
            c.setopt(c.TIMEOUT, self.data_timeout)
        # if proxy:
        #    c.setopt(c.PROXY, proxy)
        # Pycurl want a list of tuple as args
        c.setopt(c.URL, str(self.uri + path))
        # Ok now manage the response
        response = StringIO()
        c.setopt(pycurl.WRITEFUNCTION, response.write)
        # c.setopt(c.VERBOSE, 1)
        try:
            c.perform()
        except pycurl.error, error:
            errno, errstr = error
            f.close()
            raise HTTPException('Connection error to %s : %s' % (self.uri, errstr))
Example #17
0
    def upload_analyzer(self):
        """Upload analyzer to guest.
        @return: operation status.
        """
        zip_data = StringIO()
        zip_file = ZipFile(zip_data, "w", ZIP_DEFLATED)

        root = os.path.join("analyzer", self.platform)
        root_len = len(os.path.abspath(root))

        if not os.path.exists(root):
            log.error("No valid analyzer found at path: %s" % root)
            return False

        for root, dirs, files in os.walk(root):
            archive_root = os.path.abspath(root)[root_len:]
            for name in files:
                path = os.path.join(root, name)
                archive_name = os.path.join(archive_root, name)
                zip_file.write(path, archive_name, ZIP_DEFLATED)

        zip_file.close()
        data = xmlrpclib.Binary(zip_data.getvalue())
        zip_data.close()

        log.debug("Uploading analyzer to guest (ip=%s)" % self.ip)
        self.server.add_analyzer(data)
Example #18
0
class MockRequest(object):
    def __init__(self, utoken, uri=None):
        self._utoken = utoken
        self.uri = uri
        self._finishedDeferreds = []
        self.stream = StringIO()
        self.args = {}

    def getSession(self):
        return MockSession(self._utoken)

    def finish(self):
        pass

    def write(self, data):
        self.stream.write(data)

    def clear(self):
        self.stream.close()
        self.stream = StringIO()

    def notifyFinish(self):
        finished = Deferred()
        self._finishedDeferreds.append(finished)
        return finished

    def setResponseCode(self, code, error):
        self.code = code
        self.error = error

    def __str__(self):
        return "\nrequest:args = %s\ndata = %s\n" % (self.args, self.stream.getvalue())
Example #19
0
File: bed.py Project: lukauskas/dgw
def read_bed(bed_file):
    """
    Parses the bed file specified into `pd.DataFrame`
    :param bed_file:
    :return:
    :rtype: `pd.DataFrame`
    """
    f = open(bed_file, 'r')
    try:
        s = StringIO()
        # Filter out all lines that do not start with "chr" as BED files are allowed to contain some junk
        for line in f:
            if line.startswith('chr'):
                s.write(line)
        s.seek(0)
        regions = pd.read_csv(s, sep="\t", header=None)
    finally:
        f.close()
        s.close()
    regions.columns = BED_COLUMNS[:len(regions.columns)]

    if len(regions.name) != len(regions.name.drop_duplicates()):
        raise Exception('Input BED file {0!r} contains duplicate values in name column. '
                        'Please ensure the names of the regions are unique'.format(bed_file))

    if 'name' in regions.columns:
        regions = regions.set_index('name')


    return Regions(regions)
Example #20
0
File: handler.py Project: moul/junk
    def send(self, content = '', code = 200, message = 'OK', headers = {}):
        if not 'Content-Type' in headers:
            headers['Content-Type'] = 'text/html; charset=utf-8'

        #headers['Expires'] = 'Sun, 25 Jan 1988 13:15:00 GMT'
        #headers['Last-Modified'] = 'Sun, 25 Jan 1988 13:15:00 GMT'
        #headers['Cache-Control'] = 'must-revalidate'
        #headers['X-Powered-By'] = 'jambe'

        headers['Content-Length'] = len(content)
        if 'gzip' in self.headers.get('Accept-Encoding', 'none').split(','):
            from StringIO import StringIO
            import gzip
            headers['Content-Encoding'] = 'gzip'
            f = StringIO()
            gzf = gzip.GzipFile(mode = "wb", fileobj = f, compresslevel = 1)
            gzf.write(content)
            gzf.close()
            content  = f.getvalue()
            f.close()
            new_content_length = len(content)
            self.log_message('Gzip: old-length=%s, new-length=%s' % (headers['Content-Length'], new_content_length))
            headers['Content-Length'] = new_content_length

        self.send_response(code, message)
        for key in headers:
            self.send_header(key, headers[key])
        self.end_headers()

        if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
            self.wfile.write(content)
Example #21
0
 def execute_from_socket(self,params):
     """
     Executes an arbitrary python command through the socket, and returns the console
     output
     """
     dc=self.__determineContext__(params).dict
     # setup a buffer to capture response, temporarily grab stdio
     params['request']['protocol'].transport.write('<Python<           '+params['command']+'\n\r')        
     rbuffer = StringIO()
     sys.stdout = rbuffer
     try: exec(params['command'],dc)
     except:
         params['request']['protocol'].transport.write('>Python>   ERROR\n\r')
         params['request']['protocol'].transport.loseConnection()
         return
     # exec command has side-effect of adding builtins; remove them
     if dc.has_key('__builtins__'): 
         del dc['__builtins__']
     # update data context
     # remember to restore the original stdout!
     sys.stdout = sys.__stdout__ 
     # requests variables from the directory and writes to user.
     params['request']['protocol'].transport.write('>Code>')
     for var in dc:
         params['request']['protocol'].transport.write('>Var>' + var + ' is ' + str(type(var)) + ' and is equal to ' + str(dc[var]))
     params['request']['protocol'].transport.write('>Code>')
     # output the response buffer to the HTTP request
     params['request']['protocol'].transport.write('>Python>   '+rbuffer.getvalue()+'\n\r')
     params['request']['protocol'].transport.loseConnection()
     rbuffer.close()
Example #22
0
def fill_file(result):
    url = result
    #print url
    f = get_page(url)
    #print f
    if f == "":
        return "a"
    f = f.read()
    ##removes tags
    f = html_to_text(f)
    ##decoding to ASCII
    encoding = 'utf-8'
    try:
        ustr = f.decode(encoding)
    except:
        pass
        return f
    b = StringIO()
    old = sys.stdout
    try:
        sys.stdout = b
        html2text.wrapwrite(html2text.html2text(ustr, url))
    finally: sys.stdout = old
    text = b.getvalue()
    b.close()
    return text
Example #23
0
    def parse_pdf_pdfminer(self, f, fpath):
        try:
            laparams = LAParams()
            laparams.all_texts = True  
            rsrcmgr = PDFResourceManager()
            pagenos = set()

            if self.dedup:
                self.dedup_store = set()

            self.handler.print_header(fpath)
            page_num = 0
            for page in PDFPage.get_pages(f, pagenos, check_extractable=True):
                page_num += 1

                retstr = StringIO()
                device = TextConverter(rsrcmgr, retstr, laparams=laparams)
                interpreter = PDFPageInterpreter(rsrcmgr, device)
                interpreter.process_page(page)
                data = retstr.getvalue()
                retstr.close()

                self.parse_page(fpath, data, page_num)
            self.handler.print_footer(fpath)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as e:
            self.handler.print_error(fpath, e)
    def convert_df_to_njson_and_upload(self, df, destination_blobname, metadata={}, tmp_bucket='isb-cgc-dev'):

        log.info("Converting dataframe into a new-line delimited JSON file")

        file_to_upload = StringIO()

        for i, rec in df.iterrows():
            file_to_upload.write(rec.convert_objects(convert_numeric=False).to_json() + "\n")
        file_to_upload.seek(0)

        bucket = self.client.get_bucket(tmp_bucket)
        upload_blob = storage.blob.Blob(destination_blobname, bucket=bucket)

        upload_blob.upload_from_string(file_to_upload.getvalue())
        # set blob metadata
        if metadata:
            log.info("Setting object metadata")
            upload_blob.metadata = metadata
            upload_blob.patch()
        file_to_upload.close()

        # check if the uploaded blob exists. Just a sanity check
        if upload_blob.exists():
            log.info("The uploaded file {0} has size {1} bytes.".format(destination_blobname, upload_blob.size))
            return True
        else:
            raise Exception('File upload failed - {0}.'.format(destination_blobname)) 
Example #25
0
    def _createCrumbs(self, locations, img=""):
        """
        Assemble the breadcrumbs.
        """
        buff = StringIO()
        count = 0
        size = len(locations) - 1

        for title, url in locations:
            log.debug("title: %s, url: %s", title, url)
            # Make the last page non-linkable.
            if size == count: url = ""

            if count > 0:
                if img != "":
                    buff.write('<img class="arrow" src="%s" alt="&raquo;" />'\
                               % img)
                else:
                    buff.write('<span class="arrow">&raquo;</span>')

            if url:
                buff.write('<a class="title" href="%s">%s</a>' % (url, title))
            else:
                buff.write('<span class="title">%s</span>' % title)

            count += 1

        crumbs = buff.getvalue()
        buff.close()
        return crumbs
Example #26
0
class _PythonCapturer(object):

    def __init__(self, stdout=True):
        if stdout:
            self._original = sys.stdout
            self._set_stream = self._set_stdout
        else:
            self._original = sys.stderr
            self._set_stream = self._set_stderr
        self._stream = StringIO()
        self._set_stream(self._stream)

    def _set_stdout(self, stream):
        sys.stdout = stream

    def _set_stderr(self, stream):
        sys.stderr = stream

    def release(self):
        # Original stream must be restored before closing the current
        self._set_stream(self._original)
        self._stream.flush()
        output = self._stream.getvalue()
        self._stream.close()
        return output
Example #27
0
def update_addon(uid,wow_dir):
    """Downloads latest version of addon and extract it into wow directory.
    
    uid: Wow interface addon addon id
    wow_dir: Wow addon directory"""
    url = 'http://www.wowinterface.com/patcher/info-%d.xml' % uid
    dom = minidom.parse(urllib2.urlopen(url))
    
    if dom.getElementsByTagName('error'):
        if int(dom.getElementsByTagName('id')[0].firstChild.nodeValue) == 403:
            print 'The file is still being checked by mods, update will be downloaded next time you run this script.' #This function shouldn't print.
            return False
        else:
            print 'Please give this info to the addon author: <%d> - %s' % (int(dom.getElementsByTagName('id')[0].firstChild.nodeValue),
             str(dom.getElementsByTagName('message')[0].firstChild.nodeValue))
            return False
    file_location = str(dom.getElementsByTagName('UIFileURL')[0].firstChild.nodeValue)
    size = int(dom.getElementsByTagName('UISize')[0].firstChild.nodeValue)
    if size > 1048576: #If size is lager then 1mb
        print 'Downloading big file, this may take more then few seconds' #This function shouldn't print. This is just a workaround. Again.
    f = urllib2.urlopen(file_location)
    data = StringIO(f.read())
    f.close()
    data = zipfile.ZipFile(data)
    addon_dirs = []
    for f in data.namelist():
        dir = str(f.split('/',1)[0])
        if not (dir in addon_dirs):
            addon_dirs.append(dir)
            wuiup_removedir(os.path.join(wow_dir, dir))
    wuiup_unzip(data,wow_dir)
    data.close()
    return True
Example #28
0
        def __call__(self, parser, namespace, values, option_string=None):

            # create the help string and store it into a string
            from StringIO import StringIO
            fstr = StringIO()
            try:
                parser.print_help(file=fstr)
                help_str = fstr.getvalue()
            finally:
                fstr.close()

            # create the regular expression to match the desciption
            descmatch = r'{0}(.+?){0}(.+?){0}'
            # escape possible dangerous characters
            esplit_string = re.escape(split_string)
            re_desc = re.compile(descmatch.format(esplit_string),
                                 flags=re.DOTALL)

            # select the case according to which option_string is selected
            if option_string == '-h':
                to_sub = r'\1'
            elif option_string == '--help':
                to_sub = r'\1\2'

            print(re_desc.sub(to_sub, help_str))
            parser.exit()
Example #29
0
class BaseTest(TestCase):

    def setUp(self):
        pass
        """ ignore output buffer """
        self.stdout = StringIO()
        self.stderr = StringIO()
#        self.new_command = new.Command()
#        app_dir = __import__(self.new_command.usercommand)
#        self.app_dir = app_dir.__path__[0]

    def tearDown(self):
        pass
        """ ignore output buffer """
        self.stdout.close()
        self.stderr.close()

    def new_setUp(self):
        pass
        """   """
#        os.mkdir(os.path.join(self.app_dir, "static"))
#        os.mkdir(os.path.join(self.app_dir, "static", "js"))
#
#        self.assertRaises(
#            (SystemExit, ValueError),
#            self.new_command.execute,
#            "gencmd",
##            stdout=self.stdout,
##            stderr=self.stderr
#        )

    def new_tearDown(self):
        pass
Example #30
0
def get_PDB_universe(pdbcode):
  # Get file from PDB
  filename = 'pdb'+str(pdbcode)+'.ent.gz'
  ftp = FTP('ftp.wwpdb.org')
  ftp.login()
  ftp.cwd('pub/pdb/data/structures/all/pdb')
  gzipfile = StringIO() # buffer for retrieved file
  ftp.retrbinary('RETR {}'.format(filename), gzipfile.write)
  ftp.quit()

  # unzip PDB file
  gzipfile.seek(0) # reset StringIO object for reading
  with gzip.GzipFile(pdbcode, 'rb', 0, gzipfile) as unzipper:
    pdbcontent = unzipper.read()
  gzipfile.close()

  # Is there no way to create a Universe directly from a text object?
  fh = tempfile.NamedTemporaryFile(suffix='.pdb', mode='w')
  fh.write(pdbcontent)
  fh.flush()

  # create universe
  u = mda.Universe(fh.name)

  # clean up and return
  fh.close()
  return u
    geometry.setAttribute('GeometryType', 'ORIGIN_DxDyDz')
    grid.appendChild(geometry)
    # create DataItem for origin
    origin_data_item = doc.createElement('DataItem')
    origin_data_item.setAttribute('Dimensions', '3')
    origin_data_item.setAttribute('Format', 'XML')
    origin_values = doc.createTextNode('0.0 0.0 0.0')
    origin_data_item.appendChild(origin_values)
    geometry.appendChild(origin_data_item)
    # create DataItem for dxdydz
    dxdydz_data_item = doc.createElement('DataItem')
    dxdydz_data_item.setAttribute('Dimensions', '3')
    dxdydz_data_item.setAttribute('Format', 'XML')
    dxdydz_values = doc.createTextNode('1.0 1.0 1.0')
    dxdydz_data_item.appendChild(dxdydz_values)
    geometry.appendChild(dxdydz_data_item)
    # create Attribute for values
    attribute = doc.createElement('Attribute')
    attribute.setAttribute('Name', 'temperature')
    attribute.setAttribute('Center', 'Node')
    grid.appendChild(attribute)
    # create DataItem for Attribute
    data_item = doc.createElement('DataItem')
    data_item.setAttribute('Format', 'XML')
    data_item.setAttribute('Dimensions', dim_str)
    data_text = doc.createTextNode(data_str.getvalue())
    data_item.appendChild(data_text)
    attribute.appendChild(data_item)
    data_str.close()
    print(doc.toprettyxml(indent='  '))
Example #32
0
    def export_reports(self,
                       format='json',
                       report_id=None,
                       raw_or_label='raw',
                       raw_or_label_headers='raw',
                       export_checkbox_labels='false',
                       decimal_character=None,
                       df_kwargs=None):
        """
        Export a report of the Project

        Notes
        -----


        Parameters
        ----------
        report_id : the report ID number provided next to the report name
            on the report list page
        format :  (``'json'``), ``'csv'``, ``'xml'``, ``'df'``
            Format of returned data. ``'json'`` returns json-decoded
            objects while ``'csv'`` and ``'xml'`` return other formats.
            ``'df'`` will attempt to return a ``pandas.DataFrame``.
        raw_or_label : raw [default], label - export the raw coded values or
            labels for the options of multiple choice fields
        raw_or_label_headers : raw [default], label - (for 'csv' format 'flat'
            type only) for the CSV headers, export the variable/field names
            (raw) or the field labels (label)
        export_checkbox_labels : true, false [default] - specifies the format of
            checkbox field values specifically when exporting the data as labels
            (i.e., when rawOrLabel=label). When exporting labels, by default
            (without providing the exportCheckboxLabel flag or if
            exportCheckboxLabel=false), all checkboxes will either have a value
            'Checked' if they are checked or 'Unchecked' if not checked.
            But if exportCheckboxLabel is set to true, it will instead export
            the checkbox value as the checkbox option's label (e.g., 'Choice 1')
            if checked or it will be blank/empty (no value) if not checked.
            If rawOrLabel=false, then the exportCheckboxLabel flag is ignored.
        decimal_character : If specified, force all numbers into same decimal
            format. You may choose to force all data values containing a
            decimal to have the same decimal character, which will be applied
            to all calc fields and number-validated text fields. Options
            include comma ',' or dot/full stop '.', but if left blank/null,
            then it will export numbers using the fields' native decimal format.
            Simply provide the value of either ',' or '.' for this parameter.

        Returns
        -------
        Per Redcap API:
        Data from the project in the format and type specified
        Ordered by the record (primary key of project) and then by event id
        """

        ret_format = format
        if format == 'df':
            from pandas import read_csv
            ret_format = 'csv'
        pl = self.__basepl(content='report', format=ret_format)
        keys_to_add = (report_id, raw_or_label, raw_or_label_headers,
                       export_checkbox_labels, decimal_character)
        str_keys = ('report_id', 'rawOrLabel', 'rawOrLabelHeaders',
                    'exportCheckboxLabel', 'decimalCharacter')
        for key, data in zip(str_keys, keys_to_add):
            if data:
                pl[key] = data
        response, _ = self._call_api(pl, 'exp_report')
        if format in ('json', 'csv', 'xml'):
            return response
        elif format == 'df':
            if not df_kwargs:
                if self.is_longitudinal():
                    df_kwargs = {
                        'index_col': [self.def_field, 'redcap_event_name']
                    }
                else:
                    df_kwargs = {'index_col': self.def_field}
            buf = StringIO(response)
            df = self.read_csv(buf, **df_kwargs)
            buf.close()
            return df
Example #33
0
    def import_records(self,
                       to_import,
                       overwrite='normal',
                       format='json',
                       return_format='json',
                       return_content='count',
                       date_format='YMD',
                       force_auto_number=False):
        """
        Import data into the RedCap Project

        Parameters
        ----------
        to_import : array of dicts, csv/xml string, ``pandas.DataFrame``
            :note:
                If you pass a csv or xml string, you should use the
                ``format`` parameter appropriately.
            :note:
                Keys of the dictionaries should be subset of project's,
                fields, but this isn't a requirement. If you provide keys
                that aren't defined fields, the returned response will
                contain an ``'error'`` key.
        overwrite : ('normal'), 'overwrite'
            ``'overwrite'`` will erase values previously stored in the
            database if not specified in the to_import dictionaries.
        format : ('json'),  'xml', 'csv'
            Format of incoming data. By default, to_import will be json-encoded
        return_format : ('json'), 'csv', 'xml'
            Response format. By default, response will be json-decoded.
        return_content : ('count'), 'ids', 'nothing'
            By default, the response contains a 'count' key with the number of
            records just imported. By specifying 'ids', a list of ids
            imported will be returned. 'nothing' will only return
            the HTTP status code and no message.
        date_format : ('YMD'), 'DMY', 'MDY'
            Describes the formatting of dates. By default, date strings
            are formatted as 'YYYY-MM-DD' corresponding to 'YMD'. If date
            strings are formatted as 'MM/DD/YYYY' set this parameter as
            'MDY' and if formatted as 'DD/MM/YYYY' set as 'DMY'. No
            other formattings are allowed.
        force_auto_number : ('False') Enables automatic assignment of record IDs
            of imported records by REDCap. If this is set to true, and auto-numbering
            for records is enabled for the project, auto-numbering of imported records
            will be enabled.

        Returns
        -------
        response : dict, str
            response from REDCap API, json-decoded if ``return_format`` == ``'json'``
        """
        pl = self.__basepl('record')
        if hasattr(to_import, 'to_csv'):
            # We'll assume it's a df
            buf = StringIO()
            if self.is_longitudinal():
                csv_kwargs = {
                    'index_label': [self.def_field, 'redcap_event_name']
                }
            else:
                csv_kwargs = {'index_label': self.def_field}
            to_import.to_csv(buf, **csv_kwargs)
            pl['data'] = buf.getvalue()
            buf.close()
            format = 'csv'
        elif format == 'json':
            pl['data'] = json.dumps(to_import, separators=(',', ':'))
        else:
            # don't do anything to csv/xml
            pl['data'] = to_import
        pl['overwriteBehavior'] = overwrite
        pl['format'] = format
        pl['returnFormat'] = return_format
        pl['returnContent'] = return_content
        pl['dateFormat'] = date_format
        pl['forceAutoNumber'] = force_auto_number
        response = self._call_api(pl, 'imp_record')[0]
        if 'error' in response:
            raise RedcapError(str(response))
        return response
Example #34
0
    def export_records(self,
                       records=None,
                       fields=None,
                       forms=None,
                       events=None,
                       raw_or_label='raw',
                       event_name='label',
                       format='json',
                       export_survey_fields=False,
                       export_data_access_groups=False,
                       df_kwargs=None,
                       export_checkbox_labels=False,
                       filter_logic=None,
                       date_begin=None,
                       date_end=None):
        """
        Export data from the REDCap project.

        Parameters
        ----------
        records : list
            array of record names specifying specific records to export.
            by default, all records are exported
        fields : list
            array of field names specifying specific fields to pull
            by default, all fields are exported
        forms : list
            array of form names to export. If in the web UI, the form
            name has a space in it, replace the space with an underscore
            by default, all forms are exported
        events : list
            an array of unique event names from which to export records

            :note: this only applies to longitudinal projects
        raw_or_label : (``'raw'``), ``'label'``, ``'both'``
            export the raw coded values or labels for the options of
            multiple choice fields, or both
        event_name : (``'label'``), ``'unique'``
             export the unique event name or the event label
        format : (``'json'``), ``'csv'``, ``'xml'``, ``'df'``
            Format of returned data. ``'json'`` returns json-decoded
            objects while ``'csv'`` and ``'xml'`` return other formats.
            ``'df'`` will attempt to return a ``pandas.DataFrame``.
        export_survey_fields : (``False``), True
            specifies whether or not to export the survey identifier
            field (e.g., "redcap_survey_identifier") or survey timestamp
            fields (e.g., form_name+"_timestamp") when surveys are
            utilized in the project.
        export_data_access_groups : (``False``), ``True``
            specifies whether or not to export the
            ``"redcap_data_access_group"`` field when data access groups
            are utilized in the project.

            :note: This flag is only viable if the user whose token is
                being used to make the API request is *not* in a data
                access group. If the user is in a group, then this flag
                will revert to its default value.
        df_kwargs : dict
            Passed to ``pandas.read_csv`` to control construction of
            returned DataFrame.
            by default, ``{'index_col': self.def_field}``
        export_checkbox_labels : (``False``), ``True``
            specify whether to export checkbox values as their label on
            export.
        filter_logic : string
            specify the filterLogic to be sent to the API.
        date_begin : datetime
            for the dateRangeStart filtering of the API
        date_end : datetime
            for the dateRangeEnd filtering snet to the API

        Returns
        -------
        data : list, str, ``pandas.DataFrame``
            exported data
        """
        ret_format = format
        if format == 'df':
            ret_format = 'csv'
        pl = self.__basepl('record', format=ret_format)
        fields = self.backfill_fields(fields, forms)
        keys_to_add = (records, fields, forms, events, raw_or_label,
                       event_name, export_survey_fields,
                       export_data_access_groups, export_checkbox_labels)

        str_keys = ('records', 'fields', 'forms', 'events', 'rawOrLabel',
                    'eventName', 'exportSurveyFields',
                    'exportDataAccessGroups', 'exportCheckboxLabel')

        for key, data in zip(str_keys, keys_to_add):
            if data:
                if key in ('fields', 'records', 'forms', 'events'):
                    for i, value in enumerate(data):
                        pl["{}[{}]".format(key, i)] = value
                else:
                    pl[key] = data

        if date_begin:
            pl["dateRangeBegin"] = date_begin.strftime('%Y-%m-%d %H:%M:%S')

        if date_end:
            pl["dateRangeEnd"] = date_end.strftime('%Y-%m-%d %H:%M:%S')

        if filter_logic:
            pl["filterLogic"] = filter_logic
        response, _ = self._call_api(pl, 'exp_record')
        if format in ('json', 'csv', 'xml'):
            return response
        elif format == 'df':
            if not df_kwargs:
                if self.is_longitudinal():
                    df_kwargs = {
                        'index_col': [self.def_field, 'redcap_event_name']
                    }
                else:
                    df_kwargs = {'index_col': self.def_field}
            buf = StringIO(response)
            df = self.read_csv(buf, **df_kwargs)
            buf.close()
            return df
Example #35
0
    def test2(self):
        """Test: configuration file and some other files."""
        os.mkdir("t2")
        handle = open("t2/rose-t.conf", "wb")
        handle.write(r"""title=all day breakfast

[sausage]
number=3
type=pork and apple

[toast]
butter=yes
number=2
type=brown

[tomato]
number=1
type=grilled
""")
        handle.close()
        os.mkdir("t2/bin")
        handle = open("t2/bin/make-breakfast", "wb")
        handle.write(r"""#!/bin/sh
echo "Making breakfast $@"
""")
        handle.close()
        os.chmod("t2/bin/make-breakfast", 0755)
        os.mkdir("t2/etc")
        for key, val in (("sausage", "10 fat sausages"),
                         ("bread", "slice bread"), ("tomato", "a red tomato")):
            handle = open(os.path.join("t2/etc", key), "wb")
            handle.write(val + "\n")
            handle.close()
        conf_tree = self.config_tree_loader("t2", "rose-t.conf")

        string_io = StringIO()
        self.config_dumper(conf_tree.node, string_io)
        self.test(
            "t2.node", string_io.getvalue(), r"""title=all day breakfast

[sausage]
number=3
type=pork and apple

[toast]
butter=yes
number=2
type=brown

[tomato]
number=1
type=grilled
""")
        string_io.close()
        conf_dir = os.path.join(os.getcwd(), "t2")
        self.test(
            "t2.files", conf_tree.files, {
                "bin/make-breakfast": conf_dir,
                "etc/sausage": conf_dir,
                "etc/bread": conf_dir,
                "etc/tomato": conf_dir
            })
        self.test("t2.conf_dirs", conf_tree.conf_dirs, [conf_dir])
Example #36
0
class BrighticsPythonRunner(object):
    NO_EXCEPTION = (False, "")

    def __init__(self, use_spark=False):
        self._stdout = StringIO()
        self._is_exception = self.NO_EXCEPTION
        self._init_executer()

        import brightics.brightics_data_api as data_api
        import brightics.common.data.utils as data_util
        from brightics.common.utils import check_required_parameters

        self._globals = {
            'make_data_path_from_key': data_util.make_data_path_from_key,
            'get_data_info': data_api.get_data_info,
            'get_data_status': data_api.get_data_status,
            'get_data': data_api.get_data,
            'list_status': data_api.list_status,
            'view_data': data_api.view_data,
            'view_schema': data_api.view_schema,
            'write_data': data_api.write_data,
            'delete_data': data_api.delete_data,
            'put_data': data_api.put_data,
            'read_parquet': data_api.read_parquet,
            'read_redis': data_api.read_redis,
            'check_required_parameters': check_required_parameters
        }

        if use_spark:
            sc, sparkSession, sqlContext = brtc_java_gateway.init_spark_context(
            )

            self._globals['sc'] = sc
            self._globals['spark'] = sparkSession
            self._globals['sqlContext'] = sqlContext

        signal.signal(signal.SIGINT, self._interrupt_handler)

    def __del__(self):
        self._reset_stdout()
        self._stdout.close()
        self._exec.close()

    def run(self, code):
        try:
            self._switch_stdout()
            return self._exec.send(code), self._is_exception
        except StopIteration:
            self._init_executer()
            return self._exec.send(code), self._is_exception
        finally:
            self._reset_stdout()

    def _init_executer(self):
        self._exec = self._executer()
        next(self._exec)

    def _executer(self):
        while True:
            code = (yield self._stdout.getvalue())
            self._is_exception = self.NO_EXCEPTION

            try:
                code_tree = ast.parse(code)

                exec_code = code_tree.body[:-1]
                single_code = code_tree.body[-1:]  # last node of code

                exec_code_object = compile(ast.Module(exec_code), '<string>',
                                           'exec')
                interactive_code_object = compile(ast.Interactive(single_code),
                                                  '<string>', 'single')

                with redirect_stderr():
                    exec(exec_code_object, self._globals)
                    exec(interactive_code_object, self._globals)
            except BrighticsCoreException as bce:
                raise bce
            except Exception as e:
                self._stdout.write(traceback.format_exc())
                self._is_exception = (True,
                                      traceback.format_exception_only(
                                          type(e), e)[-1])

    def _switch_stdout(self):
        self._stdout.truncate(0)
        self._stdout.seek(0)

        self.old_stdout = sys.stdout
        sys.stdout = self._stdout

    def _reset_stdout(self):
        try:
            sys.stdout = self.old_stdout
        except:
            sys.stdout = sys.__stdout__

    def _interrupt_handler(self, signum, frame):
        """
        signum: the signal number
        frame: current stack frame (None or a frame object)
        """
        raise Exception("User Interrupt")
Example #37
0
 def analyze(self):
     pb = self.pb
     page = pb.get_page_source()
     # preconditions
     assert page is not None and len(page) > 0, ""
     self.page_compressed = zlib.compress(page.encode('utf-8'))
     if not self.quietly:
         sys.stdout.write("Page data compressed: self.page_compressed..\n")
         sys.stdout.flush()
     soup = BS(page,'html.parser')
     # precondition
     assert soup != None, "No Page!"
     #main page
     if not self.quietly:
         sys.stdout.write("..Main..")
         sys.stdout.flush()
     #analyze main page ===========
     # reward
     frame = soup.select(".NS-projects-reward")
     proj_reward_append = self.projects_reward_result.append
     if len(frame) > 0:
         #
         for card in frame:
             #money
             money_f = card.select(".money")
             if len(money_f) > 0:
                 money = filter_number(money_f[0].text.strip())
             else:
                 money = 0.0
             #backers
             backers_f = card.select(".num-backers")
             if len(backers_f) > 0:
                 num_backers = filter_number(backers_f[0].text.strip())
             else:
                 num_backers = 0.0
             #description
             desc_f = card.select(".desc")
             if len(desc_f) > 0:
                 description = desc_f[0].text.strip()
             else:
                 description = ""
             #delivery
             delivery_f = card.select("time")
             if len(delivery_f) > 0:
                 delivery_estimated = delivery_f[0]['datetime']
             else:
                 delivery_estimated = ""
             #limited
             limited_f = card.select(".limited-number")
             if len(limited_f) > 0:
                 limited_num = int(re.findall(r"of ([0-9]+)",limited_f[0].text)[0])
             else:
                 limited_num = 0
             proj_reward_append([
                 money,
                 num_backers,
                 description,
                 delivery_estimated,
                 limited_num,
             ])
         #for
     # collect images
     frame = soup.select("img.fit")
     image_fnames_append = self.image_fnames.append
     images_append = self.images.append
     if len(frame) > 0:
         for imgf in frame:
             src = imgf['src']
             src = re.sub(r"\?.*$","",src)
             image_fnames_append(src)
             images_append("") #basically nothing is apppended
         if self.has_image:
             inque = list_to_queue(self.image_fnames)
             outque = Queue.Queue()
             tasks = []
             # parallel processing
             for i in range(inque.qsize()):
                 imageD = ImageDownloader(inque,outque,self.quietly,True)
                 imageD.setDaemon(True)
                 tasks.append(imageD)
                 imageD.start()
             inque.join() #wait till being finished
             for task in tasks:
                 task.stop()
             outlist = queue_to_list(outque)
             self.images = copy.deepcopy(outlist)
             # replace string list to binary data list
     # video (source file name, length)s
     frame = soup.select("video.has_webm")
     self.video_fname = "na"
     v_url = ""
     if len(frame) > 0:
         sources = frame[0].select("source")
         if len(sources) > 0:
             self.video_has = True
             for source in sources:
                 v_url = source['src']
                 if v_url.endswith(".mp4"):
                     if v_url.find('high') > 0:
                         self.video_has_high = 1
                         self.video_fname = v_url
                     else:
                         self.video_has_base = 1
                         self.video_fname = v_url # if base exists, replace by it.
             if self.video_has_high > 0 or self.video_has_base > 0:
                 url = self.video_fname
                 # video duration
                 r = requests.get(url,stream = True)
                 a = r.raw.read(2000) #2kb buffer
                 b = StringIO()
                 b.write(a)
                 c = mp4.File(b)
                 self.video_length = c.duration
                 b.close()
                 r.close()
     # collect full description
     frame = soup.select(".full-description")        
     rv = ""
     if len(frame) > 0:
         desc = frame[0].text
         rv = re.sub(r"\n\n\n+","\n",desc)
         try:
             rv = rv.strip()
         except:
             pass
     self.full_description = rv        
     # collect risk
     frame = soup.select("#risks")
     rv = ""
     if len(frame) > 0:
         desc = frame[0].text
         rv = re.sub(r"\n\n\n+","\n",desc)
         try:
             rv = rv.strip()
         except:
             pass
     self.risks = rv        
     # Facebook
     frame = soup.select("li.facebook.mr2 .count")
     waiting = 1
     while 1:
         if len(frame) > 0:
             try:
                 facebook_count = int(frame[0].text) #error prone
                 break
             except:
                 if not self.quietly:
                     sys.stdout.write("[facebook waiting...%d]\n"%waiting)
                     sys.stdout.flush()
                 time.sleep(waiting)
                 waiting += 1
                 temp_soup_facebook = BS(pb.get_page_source(),'html.parser')
                 frame = temp_soup_facebook.select("li.facebook.mr2 .count")
         else:
             self.facebook_like = 0
             break
         if waiting >= 10:
             if not self.quietly:
                 sys.stdout.write(" [facebook error] ")
                 sys.stdout.flush()
             self.facebook_like = -1 #means, error
             break
     
     # ============================
     if not self.quietly:
         sys.stdout.write("OK\n")
         sys.stdout.flush()
     #backers =====================
     #btn = pb.css_selector_element("#backers_nav")
     soup = None
     if not self.quietly:
         sys.stdout.write("..Visiting backers data..")
         sys.stdout.flush()
     try:
         headers = { 'User-Agent' : 'Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11',
                  'connection' : 'close',
                  'charset' : 'utf-8'}
         params = {'format':'json'}            
         con = requests.get(self.url,headers=headers,params=params)
         j = con.json()
         board = j['running_board']
         s = BS(board,'html.parser')
         eles = s.select('a#backers_nav data')
         if len(eles) > 0:
             t = eles[0].text.replace(",","")
             n = int(t)
             pn = n/40 + 2
         else:
             n = 0
             pn = 0            
         self.pagination = pn
         
         backer_url = self.url+"/backers"
         pb.goto(backer_url)
         page = 1
         # measure current backers
         p = pb.get_page_source()
         s = BS(p,'html.parser')
         frame = s.select("div.NS_backers__backing_row .meta a")
         prevc = len(frame)
         # init
         nowc = 0
         # pagination
         while 1:
             pb.scroll_down()
             # measure current backers
             p = pb.get_page_source()
             s = BS(p,'html.parser')
             frame = s.select("div.NS_backers__backing_row .meta a")
             nowc = len(frame)
             if not self.quietly:
                 sys.stdout.write("b")
                 sys.stdout.flush()
             if nowc > prevc:
                 prevc = nowc
             else:
                 break
         self.get_backers(pb)
         #get backers
         p = pb.get_page_source()
         s = BS(p,'html.parser')
         frame = s.select("div.NS_backers__backing_row .meta a")
         backers_append = self.backers.append
         if len(frame) > 0:
             for backer in frame:
                 profile_url = "%s"%(backer['href'])
                 backer_name = backer.text
                 backers_append((profile_url,backer_name,))
Example #38
0
def write_summary_report(coverage_module, project, modules):
    summary = StringIO()
    coverage_module.report(modules, file=summary)
    project.write_report("coverage", summary.getvalue())
    summary.close()
Example #39
0
        print('There was an error when trying the input: input' + str(_i) + '.txt')
        print('Your code outputted: ', codeOut.getvalue())
        print('Your code gave an error: ', codeErr.getvalue())
        print('This is the system error: ', sys.exc_info())
        exit()

    _code_out = codeOut.getvalue()
    if _code_out != _out:
        sys.stdout = sys.__stdout__
        sys.stderr = sys.__stderr__
        sys.stdin = sys.__stdin__
        print('Wrong Answer!')
        print('For the input: input' + str(_i) + ".txt" )
        print('Your code gave output:"' + _code_out + '"')
        print('But the desired output was:"' + _out + '"')
        exit()
    else:
        sys.stdout = sys.__stdout__
        sys.stderr = sys.__stderr__
        sys.stdin = sys.__stdin__
        print(_i, 'passed.')
    codeOut.close()
    codeErr.close()

sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
sys.stdin = sys.__stdin__

print('Accepted')
print('Your code passed all 299 tests')
Example #40
0
def show_error():
    f = StringIO()
    traceback.print_exc(file=f)
    log.error(f.getvalue())
    f.close()
Example #41
0
class Cmd(object):
    def __init__(self, cmd, chdir='', indir='', stdin=None, stdout=None, stderr=None, exitcode=0, reference=None):
        self.cmd = cmd
        self.chdir = chdir
        self.indir = indir
        self.reference = reference
        # set and rewind stdin
        self.stdin = stdin
        if stdin:
            self.stdin = StringIO()
            for f in stdin:
                self.stdin.write(f + '\n')
            self.stdin.seek(0)
        self.stdout = stdout
        if stdout:
            if chdir:
                self.stdout = []
                for o in stdout:
                    self.stdout.append(os.path.join(chdir, o))
            elif indir:
                self.stdout = []
                for o in stdout:
                    self.stdout.append(os.path.join(indir, o))
        self.stderr = stderr
        self.exitcode = exitcode

    def __str__(self):
        return self.cmd

    def argv(self):
        args = []
        if self.chdir:
            args.extend(['-d', self.chdir])
        args.extend(self.cmd.split())
        return args

    def cmdline(self):
        args = ['x2y']
        args.extend(self.argv())
        return args

    def run(self):
        rval = Cmd(self.cmd)
        argv = self.argv()
        stdout = StringIO()
        stderr = StringIO()
        rval.exitcode = x2y.x2y.main(argv, stdin=self.stdin, stdout=stdout, stderr=stderr)
        stdout_value = stdout.getvalue()
        stderr_value = stderr.getvalue()
        if stdout_value:
            rval.stdout = stdout_value.strip().split('\n')
        if stderr_value:
            rval.stderr = stderr_value.strip().split('\n')
        stdout.close()
        stderr.close()
        if self.stdin:
            self.stdin.close()
        return rval

    def run_as_process(self):
        rval = Cmd(self.cmd)
        try:
            cmd = self.cmdline()
            os.environ['COVERAGE_PROCESS_START'] = '1'
            env = os.environ.copy()
            env['COVERAGE_FILE'] = '.coverage.%s' % (self.cmd.replace('/', '-').replace(' ', '-'))
            p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
        except Exception as e:
            pytest.fail(msg='Cmd: run: exception running %s: %s' % (cmd, e))
        else:
            stdout, stderr = p.communicate()
            if stdout:
                rval.stdout = stdout.decode().strip().split('\n')
            if stderr:
                rval.stderr = stderr.decode().strip().split('\n')
            rval.exitcode = p.wait()
        return rval

    def outfile(self):
        argv = self.argv()
        i = 0
        while i < len(argv):
            if argv[i] == '-o':
                return argv[i + 1]
        return None
    
    def files_match(self):
        outfile = self.outfile()
        with open(outfile, 'rb') as fp:
            out_bytes = bytearray(fp.read())

        with open(self.reference, 'rb') as fp:
            ref_bytes = bytearray(fp.read())
        
        if out_bytes == ref_bytes:
            return True
        pyf.fail(msg='Files differ: %s %s' % (self.reference, outfile))
        return False
Example #42
0
 def _choices_as_array(self):
     from StringIO import StringIO
     valuebuffer = StringIO(self.list_values)
     choices = [[item.strip(), item.strip()] for item in valuebuffer.readlines()]
     valuebuffer.close()
     return choices
Example #43
0
def open_json_file():
    sio = StringIO()
    sio.write(json.dumps(TEST_DATA))
    sio.seek(0)
    yield sio
    sio.close()
Example #44
0
    def handle(self, *args, **options):
        month = options['month']
        year = options['year']

        ###################
        ### NEW SERVERS ###
        ###################

        if month == 1:
            inidate = date(year - 1, 12, 1)
        else:
            inidate = date(year, month - 1, 1)

        # Sites that haven't been canceled (end_date is null) and do not have a billing (PO) associated
        if Site.objects.filter(start_date__month=inidate.month,
                               start_date__year=inidate.year,
                               end_date__isnull=True,
                               billing__isnull=True).exists():
            LOGGER.error(
                "Sites not cancelled were found without billing after a month")

        # Billings of sites that haven't been canceled (end_date is null) and haven't been sent to finance yet
        new_sites_billing = Billing.objects.filter(
            date_sent_to_finance__isnull=True, site__end_date__isnull=True)

        ################
        ### RENEWALS ###
        ################

        # Billings of sites that haven't been canceled (end_date is null), that hasn't expressed to want to cancel
        # their subscription, and that started in the actual month of a previous year
        renewal_sites_billing = Billing.objects.filter(
            site__start_date__month=month,
            site__start_date__lt=date(year, 1, 1),
            site__end_date__isnull=True)

        if not (new_sites_billing.exists() or renewal_sites_billing.exists()):
            return  # Nothing to send

        ###################
        ### SEND REPORT ###
        ###################

        po_files = map(
            lambda x:
            ("%s%s" %
             (slugify(x.purchase_order_number), splitext(x.purchase_order.name)
              [1]), x.purchase_order.read(), 'application/other'),
            new_sites_billing | renewal_sites_billing)
        new_billing = map(
            lambda x: [
                x.site.id, x.site.name, x.group, x.purchase_order_number, x.
                site.start_date, x.site.type.price, x.site.start_date,
                calcendperiod(x.site.start_date)
            ], new_sites_billing)
        renewals_billing = map(
            lambda x: [
                x.site.id, x.site.name, x.group, x.purchase_order_number, x.
                site.start_date, x.site.type.price,
                x.site.start_date.replace(year=year),
                calcendperiod(x.site.start_date.replace(year=year)), x.
                date_modified > (date.today() - timedelta(days=100))
            ], renewal_sites_billing)
        header = [
            'id', 'Name', 'PO raised by', 'PO number', 'Created at', 'Cost',
            'Period start', 'Period end'
        ]
        new_billing = [header] + new_billing
        renewals_billing = [header + ['Have they uploaded a new PO?']
                            ] + renewals_billing

        stream_new = StringIO()
        stream_renewal = StringIO()
        writer_new = csv.writer(stream_new)
        writer_renewal = csv.writer(stream_renewal)

        for billing in new_billing:
            writer_new.writerow(billing)

        for billing in renewals_billing:
            writer_renewal.writerow(billing)

        EmailMessage(
            subject="Monthly Financial Report MWS3 - %s %i" %
            (month_name[month], year),
            body=
            "Hello,\n\nAttached you can find the monthly report spreadsheet for new servers and for renewals. "
            "You will also find all the corresponding purchase orders. The cost codes for MWS3 are:\n\n"
            "Cost centre for Managed Web Service = VCBQ\nTransaction code for Managed Web Service = LRED\n"
            "Internal code = VCBQ GAAB LRED\nExternal code = VCBQ GAAA LRED\n\nBest regards,\n\nMWS3 Team.\n",
            from_email="Managed Web Service Support <%s>" % getattr(
                settings, 'EMAIL_MWS3_SUPPORT', '*****@*****.**'),
            to=[settings.FINANCE_EMAIL],
            bcc=[settings.EMAIL_MWS3_SUPPORT],
            headers={
                'Return-Path':
                getattr(settings, 'EMAIL_MWS3_SUPPORT',
                        '*****@*****.**')
            },
            attachments=[('mws3sites_new.csv', stream_new.getvalue(),
                          'application/vnd.ms-excel'),
                         ('mws3sites_renewals.csv', stream_renewal.getvalue(),
                          'application/vnd.ms-excel')] + po_files).send()

        new_sites_billing.update(date_sent_to_finance=timezone.now().date())
        renewal_sites_billing.update(
            date_sent_to_finance=timezone.now().date())

        stream_new.close()
        stream_renewal.close()
Example #45
0
class Form:
    """ Form class to use reportlab to generate pdf """

    def __init__(self, pdfname="ocrform.pdf", margintop=50, marginsides=50, **kw):
        """ Form initialization """

        self.pdfpath = kw.get("pdfpath", pdfname)
        self.verbose = kw.get("verbose", 0)
        self.font = kw.get("typeface", Courier)
        self.fontsize = kw.get("fontsize", 13)
        self.IObuffer = StringIO()
        self.canvas = Canvas(self.IObuffer, pagesize = A4)
        self.width, self.height = A4
        self.x = marginsides
        self.lastx = marginsides
        self.marginsides = marginsides
        self.margintop = margintop
        self.y = self.height - margintop
        self.lasty = self.height - margintop
        self.num = 1

    def barcode(self, uuid):
        """ Generate barcode of uuid """

        barcode = code128.Code128(str(uuid), barWidth=1, barHeight=20)
        barcode.drawOn(self.canvas, self.lastx, self.lasty)
        self.lasty = self.lasty - 20
        self.y = self.lasty

    def decorate(self):
        """ Decorates the the form with the markers needed to align the form later """

        c = self.canvas
        c.rect(20, 20, 20, 20, fill=1)
        c.rect(self.width - 40, 20, 20, 20, fill=1)
        c.rect(20, self.height - 40, 20, 20, fill=1)
        c.rect(self.width/2 - 10, 20, 20, 20, fill=1)
        c.rect(20, self.height/2 - 10, 20, 20, fill=1)
        c.rect(self.width - 40, self.height - 40, 20, 20, fill=1)
        c.rect(self.width - 40, self.height/2 - 10, 20, 20, fill=1)

    def print_text(self, lines, fontsize=12, gray=0, seek=0, continuetext=0, style="default"):
        """ Give the lines to be printed as a list, set the font and grey level """

        c = self.canvas
        self.fontsize = fontsize
        if style == "center":
            self.x = self.width / 2
        if seek > (self.width-(self.marginsides + self.fontsize)):
            seek = 0
        if seek != 0:
            self.x = self.x + seek
        if continuetext == 1:
            self.x = self.lastx + seek
            if seek == 0:
                self.y = self.y + fontsize
        for line in lines:
            if style == "center":
                self.x = self.x - (len(line)) * self.fontsize / 2
            if style == "right":
                self.x = self.width - (self.marginsides + len(line) * self.fontsize)
            if (self.width - self.marginsides - self.lastx) < 200:
                self.x = self.marginsides
                if continuetext == 1:
                    self.y = self.y - 2 * fontsize
            if (self.y - self.fontsize) < 50:
                self.set_new_page()
            t = c.beginText(self.x, self.y)
            t.setFont(Helvetica, fontsize)
            t.setFillGray(gray)
            t.textOut(line)
            c.drawText(t)
	    self.y = self.y - fontsize
	    self.lastx = t.getX()
            self.lasty = self.y
        self.x = self.marginsides

    def draw_check_boxes(self, boxes=1, completeline=0, lines=0, seek=0, continuetext=0, fontsize=0, gray=0, style="", isdate=0):
        """ Function to draw check boxes default no of boxes = 1 """

        c = self.canvas
        c.setLineWidth(0.90)
        c.setStrokeGray(gray)
        if style == "center":
            self.x = self.width / 2
        elif style == "right":
            self.x = self.width - self.marginsides - self.fontsize
        if seek > (self.width - (self.marginsides + self.fontsize)):
            seek = 0
        if (self.y - self.fontsize) < 40:
            self.set_new_page()
        if continuetext == 1:
            self.y = self.y + self.fontsize
            self.x = self.lastx
        else:
            self.x = self.marginsides
        if seek != 0:
            self.x = self.x + seek
        if fontsize == 0:
            fontsize = self.fontsize
        else:
            self.fontsize = fontsize
        if completeline == 1:
            boxes = int(self.width / self.fontsize)
        for i in range(boxes):
            c.rect(self.x, self.y, self.fontsize, self.fontsize)
            self.x = self.x + self.fontsize
            if self.x > (self.width - (self.marginsides + self.fontsize)):
                break
        self.lastx = self.x
        self.x = self.marginsides
        self.y = self.y - self.fontsize
        if isdate:
            t = c.beginText(self.x, self.y)
            t.setFont(Helvetica, 13)
            t.setFillGray(0)
            t.textOut("   D  D  M  M  Y  Y  Y  Y")
            c.drawText(t)
            self.y = self.y - fontsize
            self.lastx = t.getX()
            self.lasty = self.y
        self.lastx = self.x
        self.x = self.marginsides
        self.y = self.y - 13

    def draw_circle(self, boxes=1, completeline=0, lines=0, seek=0, continuetext=0, fontsize=0, gray=0, style=""):
        """ Draw circles on the form """

        c = self.canvas
        c.setLineWidth(0.90)
        c.setStrokeGray(gray)
        if style == "center":
            self.x = self.width / 2
        elif style == "right":
            self.x = self.width - self.marginsides - self.fontsize
        if seek > (self.width - (self.marginsides + self.fontsize)):
            seek = 0
        if (self.y - self.fontsize) < 40:
            self.set_new_page()
        if continuetext == 1:
            self.y = self.y + self.fontsize
            self.x = self.lastx
        else:
            self.x = self.marginsides
        if seek != 0:
            self.x = self.x + seek
        if fontsize == 0:
            fontsize = self.fontsize
        else:
            self.fontsize = fontsize
        if completeline == 1:
            boxes = int(self.width / self.fontsize)
        for i in range(boxes):
            c.circle(self.x + self.fontsize/2, self.y+self.fontsize/2, self.fontsize/2, fill = 0)
            self.x = self.x + self.fontsize
            if self.x > (self.width - (self.marginsides + self.fontsize)):
                break
        self.lastx = self.x
        self.x = self.marginsides
        self.y = self.y - self.fontsize

    def draw_line(self, gray=0):
        """ Function to draw a straight line """

        c = self.canvas
        c.setStrokeGray(gray)
        c.setLineWidth(0.40)
        self.y = self.y - (self.fontsize)
        c.line(self.x, self.y, self.width - self.x, self.y)
        self.y = self.y - (self.fontsize)

    def set_new_page(self):
        """
            All changes are forgotten when a showPage() has been executed.
            They have to be set again.
        """
        self.num += 1
        c = self.canvas
        c.showPage()
        self.decorate()
        self.x = self.marginsides
        self.lastx = self.marginsides
        self.y = self.height - self.margintop
        self.print_text([str("Page "+ str(self.num))], fontsize=8, style="right")
        self.x = self.marginsides
        self.lastx = self.x
        self.y = self.y - 32

    def set_title(self, title = "FORM"):
        """ Sets the title of the pdf. """

        c = self.canvas.setTitle(title)

    def save(self):
        """ Saves the form """

        self.canvas.save()
        pdf = self.IObuffer.getvalue()
        self.IObuffer.close()
        return pdf
Example #46
0
    def generate(self, zetacount=2, polarizationcount=1,
                 tailnorm=(0.16, 0.3, 0.6), energysplit=0.1, tolerance=1.0e-3,
                 referencefile=None, referenceindex=None, rcutpol_rel=1.0,
                 rcutmax=20.0,
                 rcharpol_rel=None,
                 vconf_args=(12.0, 0.6), txt='-',
                 include_energy_derivatives=False,
                 #lvalues=None, # XXX clean up some of these!
                 jvalues=None,
                 l_pol=None):
        """Generate an entire basis set.

        This is a high-level method which will return a basis set
        consisting of several different basis vector types.

        Parameters:

        ===================== =================================================
        ``zetacount``         Number of basis functions per occupied orbital
        ``polarizationcount`` Number of polarization functions
        ``tailnorm``          List of tail norms for split-valence scheme
        ``energysplit``       Energy increase defining confinement radius (eV)
        ``tolerance``         Tolerance of energy split (eV)
        ``referencefile``     gpw-file used to generate polarization function
        ``referenceindex``    Index in reference system of relevant atom
        ``rcutpol_rel``       Polarization rcut relative to largest other rcut
        ``rcutmax``           No cutoff will be greater than this value
        ``vconf_args``        Parameters (alpha, ri/rc) for conf. potential
        ``txt``               Log filename or '-' for stdout
        ===================== =================================================

        Returns a fully initialized Basis object.
        """

        if txt == '-':
            txt = sys.stdout
        elif txt is None:
            txt = devnull

        if isinstance(tailnorm, float):
            tailnorm = (tailnorm,)
        if 1 + len(tailnorm) < max(polarizationcount, zetacount):
            raise ValueError(
                'Needs %d tail norm values, but only %d are specified' %
                (max(polarizationcount, zetacount) - 1, len(tailnorm)))

        textbuffer = StringIO()
        
        class TeeStream:  # quick hack to both write and save output
            def __init__(self, out1, out2):
                self.out1 = out1
                self.out2 = out2
                
            def write(self, string):
                self.out1.write(string)
                self.out2.write(string)
                
        txt = TeeStream(txt, textbuffer)

        if vconf_args is not None:
            amplitude, ri_rel = vconf_args

        g = self.generator
        rgd = self.rgd

        njcore = g.njcore
        n_j = g.n_j[njcore:]
        l_j = g.l_j[njcore:]
        f_j = g.f_j[njcore:]

        if jvalues is None:
            jvalues = []
            sortkeys = []
            for j in range(len(n_j)):
                if f_j[j] == 0 and l_j[j] != 0:
                    continue
                jvalues.append(j)
                sortkeys.append(l_j[j])
            
            # Now order jvalues by l
            #
            # Use a stable sort so the energy ordering within each
            # angular momentum is guaranteed to be preserved
            args = np.argsort(sortkeys, kind='mergesort')
            jvalues = np.array(jvalues)[args]

        fulljvalues = [njcore + j for j in jvalues]
        
        if isinstance(energysplit, float):
            energysplit = [energysplit] * len(jvalues)
        
        title = '%s Basis functions for %s' % (g.xcname, g.symbol)
        print(title, file=txt)
        print('=' * len(title), file=txt)
        
        singlezetas = []
        energy_derivative_functions = []
        multizetas = [[] for i in range(zetacount - 1)]
        polarization_functions = []

        splitvalencedescr = 'split-valence wave, fixed tail norm'
        derivativedescr = 'derivative of sz wrt. (ri/rc) of potential'

        for vj, fullj, esplit in zip(jvalues, fulljvalues, energysplit):
            l = l_j[vj]
            n = n_j[vj]
            assert n > 0
            orbitaltype = str(n) + 'spdf'[l]
            msg = 'Basis functions for l=%d, n=%d' % (l, n)
            print(file=txt)
            print(msg + '\n', '-' * len(msg), file=txt)
            print(file=txt)
            if vconf_args is None:
                adverb = 'sharply'
            else:
                adverb = 'softly'
            print('Zeta 1: %s confined pseudo wave,' % adverb, end=' ', file=txt)

            u, e, de, vconf, rc = self.rcut_by_energy(fullj, esplit,
                                                      tolerance,
                                                      vconf_args=vconf_args)
            if rc > rcutmax:
                rc = rcutmax  # scale things down
                if vconf is not None:
                    vconf = g.get_confinement_potential(amplitude, ri_rel * rc,
                                                        rc)
                u, e = g.solve_confined(fullj, rc, vconf)
                print('using maximum cutoff', file=txt)
                print('rc=%.02f Bohr' % rc, file=txt)
            else:
                print('fixed energy shift', file=txt)
                print('DE=%.03f eV :: rc=%.02f Bohr' % (de * Hartree,
                                                                rc), file=txt)
            if vconf is not None:
                print(('Potential amp=%.02f :: ri/rc=%.02f' %
                               (amplitude, ri_rel)), file=txt)
            phit_g = self.smoothify(u, l)
            bf = BasisFunction(l, rc, phit_g,
                               '%s-sz confined orbital' % orbitaltype)
            norm = np.dot(g.dr, phit_g * phit_g)**.5
            print('Norm=%.03f' % norm, file=txt)
            singlezetas.append(bf)

            zetacounter = iter(xrange(2, zetacount + 1))

            if include_energy_derivatives:
                assert zetacount > 1
                zeta = zetacounter.next()
                print('\nZeta %d: %s' % (zeta, derivativedescr), file=txt)
                vconf2 = g.get_confinement_potential(amplitude,
                                                     ri_rel * rc * .99, rc)
                u2, e2 = g.solve_confined(fullj, rc, vconf2)
                
                phit2_g = self.smoothify(u2, l)
                dphit_g = phit2_g - phit_g
                dphit_norm = np.dot(rgd.dr_g, dphit_g * dphit_g) ** .5
                dphit_g /= dphit_norm
                descr = '%s-dz E-derivative of sz' % orbitaltype
                bf = BasisFunction(l, rc, dphit_g, descr)
                energy_derivative_functions.append(bf)

            for i, zeta in enumerate(zetacounter):
                print('\nZeta %d: %s' % (zeta, splitvalencedescr), file=txt)
                # Unresolved issue:  how does the lack of normalization
                # of the first function impact the tail norm scheme?
                # Presumably not much, since most interesting stuff happens
                # close to the core.
                rsplit, norm, splitwave = rsplit_by_norm(rgd, l, phit_g,
                                                         tailnorm[i]**2.0,
                                                         txt)
                descr = '%s-%sz split-valence wave' % (orbitaltype,
                                                       '0sdtq56789'[zeta])
                bf = BasisFunction(l, rsplit, phit_g - splitwave, descr)
                multizetas[i].append(bf)
            
        if polarizationcount > 0 or l_pol is not None:
            if l_pol is None:
                # Now make up some properties for the polarization orbital
                # We just use the cutoffs from the previous one times a factor
                # Find 'missing' values in lvalues
                lvalues = [l_j[vj] for vj in jvalues]
                for i in range(max(lvalues) + 1):
                    if list(lvalues).count(i) == 0:
                        l_pol = i
                        break
                else:
                        l_pol = max(lvalues) + 1

            # Find the last state with l=l_pol - 1, which will be the state we
            # base the polarization function on
            for vj, fullj, bf in zip(jvalues[::-1], fulljvalues[::-1],
                                     singlezetas[::-1]):
                if bf.l == l_pol - 1:
                    fullj_pol = fullj
                    rcut = bf.rc * rcutpol_rel
                    break
            else:
                raise ValueError('The requested value l_pol=%d requires l=%d '
                                 'among valence states' % (l_pol, l_pol - 1))
            rcut = min(rcut, rcutmax)
            msg = 'Polarization function: l=%d, rc=%.02f' % (l_pol, rcut)
            print('\n' + msg, file=txt)
            print('-' * len(msg), file=txt)
            # Make a single Gaussian for polarization function.
            #
            # It is known that for given l, the sz cutoff defined
            # by some fixed energy is strongly correlated to the
            # value of the characteristic radius which best reproduces
            # the wave function found by interpolation.
            #
            # We know that for e.g. d orbitals:
            #   rchar ~= .37 rcut[sz](.3eV)
            # Since we don't want to spend a lot of time finding
            # these value for other energies, we just find the energy
            # shift at .3 eV now

            u, e, de, vconf, rc_fixed = self.rcut_by_energy(fullj_pol,
                                                            .3, 1e-2,
                                                            6., (12., .6))

            default_rchar_rel = .25
            # Defaults for each l.  Actually we don't care right now
            rchar_rels = {}

            if rcharpol_rel is None:
                rcharpol_rel = rchar_rels.get(l_pol, default_rchar_rel)
            rchar = rcharpol_rel * rc_fixed
            gaussian = QuasiGaussian(1.0 / rchar**2, rcut)
            psi_pol = gaussian(rgd.r_g) * rgd.r_g**(l_pol + 1)
            norm = np.dot(rgd.dr_g, psi_pol * psi_pol) ** .5
            psi_pol /= norm
            print('Single quasi Gaussian', file=txt)
            msg = 'Rchar = %.03f*rcut = %.03f Bohr' % (rcharpol_rel, rchar)
            adjective = 'Gaussian'
            print(msg, file=txt)
            type = '%s-type %s polarization' % ('spdfg'[l_pol], adjective)
            bf_pol = BasisFunction(l_pol, rcut, psi_pol, type)
                                   
            polarization_functions.append(bf_pol)
            for i in range(polarizationcount - 1):
                npol = i + 2
                msg = '\n%s: %s' % (['Secondary', 'Tertiary', 'Quaternary',
                                     'Quintary', 'Sextary', 'Septenary'][i],
                                    splitvalencedescr)
                print(msg, file=txt)
                rsplit, norm, splitwave = rsplit_by_norm(rgd, l_pol, psi_pol,
                                                         tailnorm[i],
                                                         txt)
                descr = ('%s-type split-valence polarization %d'
                         % ('spdfg'[l_pol], npol))
                bf_pol = BasisFunction(l_pol, rsplit, psi_pol - splitwave,
                                       descr)
                polarization_functions.append(bf_pol)
        
        bf_j = []
        bf_j.extend(singlezetas)
        bf_j.extend(energy_derivative_functions)
        for multizeta_list in multizetas:
            bf_j.extend(multizeta_list)
        bf_j.extend(polarization_functions)
        
        rcmax = max([bf.rc for bf in bf_j])

        # The non-equidistant grids are really only suited for AE WFs
        d = 1.0 / 64
        equidistant_grid = np.arange(0.0, rcmax + d, d)
        ng = len(equidistant_grid)

        for bf in bf_j:
            # We have been storing phit_g * r, but we just want phit_g
            bf.phit_g = divrl(bf.phit_g, 1, rgd.r_g)
            
            gcut = min(int(1 + bf.rc / d), ng - 1)
            
            assert equidistant_grid[gcut] >= bf.rc
            assert equidistant_grid[gcut - 1] <= bf.rc
            
            bf.rc = equidistant_grid[gcut]
            # Note: bf.rc *must* correspond to a grid point (spline issues)
            bf.ng = gcut + 1
            # XXX all this should be done while building the basis vectors,
            # not here
            
            # Quick hack to change to equidistant coordinates
            spline = rgd.spline(bf.phit_g, rgd.r_g[rgd.floor(bf.rc)], bf.l,
                                points=100)
            bf.phit_g = np.array([spline(r) * r**bf.l
                                  for r in equidistant_grid[:bf.ng]])
            bf.phit_g[-1] = 0.

        basis = Basis(g.symbol, self.name, False)
        basis.ng = ng
        basis.d = d
        basis.bf_j = bf_j
        basis.generatordata = textbuffer.getvalue().strip()
        basis.generatorattrs = {'version': version}
        textbuffer.close()

        return basis
Example #47
0
 def close(self):
     buf = self.getvalue()
     store, key = self._target
     store[key] = buf
     StringIO.close(self)
Example #48
0
def test_mlp(n_nodes=[74484,100,100,100,4],  # input-hidden-nodees
             datasets='lhrhadvs_sample_data.mat',  # load data
             
             # activation:  # sigmoid function: T.nnet.sigmoid, hyperbolic tangent function: T.tanh, Rectified Linear Unit: relu1
             batch_size = 40, n_epochs = 300, learning_rate=0.001,activation = T.tanh,
             beginAnneal= 50, min_annel_lrate = 1e-4, decay_rate = 0.0005, momentum_val=0.01,
             
             # Select optimizer 'Grad' for GradientDescentOptimizer, 'Adam' for AdamOptimizer, 'Rmsp' for RMSPropOptimizer
             optimizer_algorithm='Grad',
                       
             # Parameters for the node-wise control of weight sparsity
             # if you have three hidden layer, the number of target Hoyer's sparseness should be same 
             tg_hspset=[0.7, 0.7, 0.5], # Target sparsity
             max_beta=[0.05, 0.95, 0.7], # Maximum beta changes
             
             # Parameters for the layer-wise control of weight sparsity 
             # tg_hspset=[0.7, 0.7, 0.5], # Target sparsity 
             # max_beta=[0.05, 0.95, 0.8], # Maximum beta changes
             beta_lrates = 1e-2,        L2_reg = 1e-4,
             
             # flag_nodewise =1 is the node-wise control of weight sparsity 
             # flag_nodewise =0 is the layer-wise control of weight sparsity
            
             flag_nodewise = 0,
             # Save path  
             sav_path = '/Users/bspl/Downloads/dnnwsp-master/Theano_code', # a directory to save dnnwsp result  
              ):
               
    ########################################## Input data  #################################################

        
    datasets=sio.loadmat(datasets) # load datasets
    
    ############# lhrhadvs_sample_data.mat #############
    # train_x  = 240 volumes x 74484 voxels  
    # train_x  = 240 volumes x 1 [0:left-hand clenching task, 1:right-hand clenching task, 2:auditory task, 3:visual task]
    # test_x  = 120 volumes x 74484 voxels
    # test_y  = 120 volumes x 1 [0:left-hand clenching task, 1:right-hand clenching task, 2:auditory task, 3:visual task]
    ############################################################

    train_x = datasets['train_x'];     train_y = datasets['train_y'];
    test_x  = datasets['test_x'];    test_y  = datasets['test_y'];
    
    train_set_x = theano.shared(numpy.asarray(train_x, dtype=theano.config.floatX))
    train_set_y = T.cast(theano.shared(train_y.flatten(),borrow=True),'int32')
    
    test_set_x = theano.shared(numpy.asarray(test_x, dtype=theano.config.floatX))
    test_set_y = T.cast(theano.shared(test_y.flatten(),borrow=True),'int32')

    # compute number of minibatches for training, validation and testing
    n_train_batches = int(train_set_x.get_value(borrow=True).shape[0] / batch_size)
    n_test_batches = int(test_set_x.get_value(borrow=True).shape[0] / batch_size)

                   
    ########################################## Build model #################################################

    print('... building the model')

    # allocate symbolic variables for the data
    index = T.lscalar()  # index to a [mini]batch
    x = T.matrix('x')  
    y = T.ivector('y')  # the labels are presented as 1D vector of [int] labels

    l1_penalty_layer = T.fvector() #  L1-norm regularization parameter
    ln_rate = T.scalar(name='learning_rate') # learning rate
    momentum = T.scalar(name='momentum')
                 
    rng = numpy.random.RandomState(1234)

    # construct the MLP class
    classifier = MLP(
        rng=rng,
        input=x,
        n_nodes = n_nodes,
        activation = activation,
    )

    # cost function
    cost = (classifier.negative_log_likelihood(y))
    
    # L1 regularization term for either node-wise or layer-wise control 
    if flag_nodewise==1:
        for i in range(len(n_nodes)-2):
            node_size = n_nodes[i+1]; tg_index = np.arange((i * node_size),((i + 1) * node_size));
            cost += (T.dot(abs(classifier.hiddenLayer[i].W),l1_penalty_layer[tg_index])).sum();
    else:
        for i in range(len(n_nodes)-2):
            cost += l1_penalty_layer[i] * classifier.L1[i]

    # L2 regularization 
    cost += L2_reg * classifier.L2_sqr    

    # compiling a Theano function `train_model` that returns the cost, but
    # in the same time updates the parameter of the model based on the rules
    # defined in `updates`
    
    updates =[];
    # Select optimizer 'Grad' for GradientDescentOptimizer, 'Adam' for AdamOptimizer, 'Rmsp' for RMSPropOptimizer
    if optimizer_algorithm=='Grad':
        gparams = [T.grad(cost, param) for param in classifier.params]
        
        for param, gparam, oldparam in zip(classifier.params, gparams, classifier.oldparams):
            delta = ln_rate * gparam + momentum * oldparam
            updates.append((param, param - delta))
            updates.append((oldparam, delta))

    elif optimizer_algorithm=='Adam':
        updates = adam(cost, classifier.params, learning_rate)
        
    elif optimizer_algorithm=='Rmsp' :
        updates = RMSprop(cost, classifier.params, learning_rate)
  
    train_model = theano.function(
        inputs=[index, l1_penalty_layer,ln_rate,momentum],
        outputs=[cost,classifier.errors(y),classifier.mse(batch_size,n_nodes[-1],y)],
        updates=updates,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size],
            y: train_set_y[index * batch_size: (index + 1) * batch_size]
        },
        allow_input_downcast = True,
        on_unused_input = 'ignore'
    )
    
    updates_test = []
    for hiddenlayer in classifier.hiddenLayer:
        for i in range(1):
            updates_test.append( hiddenlayer.updates[i] )
           
    test_model = theano.function(
        inputs=[index],
        outputs=[classifier.errors(y),classifier.mse(batch_size,n_nodes[-1],y)],
        updates=updates_test,
        givens={
            x: test_set_x[index * batch_size:(index + 1) * batch_size],
            y: test_set_y[index * batch_size:(index + 1) * batch_size]
        }
    )

    ########################################## Learning model #################################################

    print('... training')

    test_score = 0. 
    start_time = timeit.default_timer()

    epoch = 0;    done_looping = False
    
    # Define variables to save/check training model 
    train_errors = np.zeros(n_epochs);    test_errors = np.zeros(n_epochs);
    train_mse = np.zeros(n_epochs);    test_mse = np.zeros(n_epochs);
    lrs = np.zeros(n_epochs); lrate_list = np.zeros(n_epochs);
    
    if flag_nodewise==1:
        hsp_avg_vals =[]; L1_beta_avg_vals=[];  all_hsp_vals =[]; all_L1_beta_vals=[];
        L1_beta_vals = np.zeros(np.sum(n_nodes[1:(len(n_nodes)-1)]));
        cnt_hsp_val = np.zeros(len(n_nodes)-2); 
        cnt_beta_val = np.zeros(len(n_nodes)-2);
        
        for i in range(len(n_nodes)-2):
            hsp_avg_vals.append(np.zeros((n_epochs,n_nodes[i+1])));
            L1_beta_avg_vals.append(np.zeros((n_epochs,n_nodes[i+1])));
    
            all_hsp_vals.append(np.zeros((n_epochs,n_nodes[i+1])));
            all_L1_beta_vals.append(np.zeros((n_epochs,n_nodes[i+1])));
    else:
        all_hsp_vals = np.zeros((n_epochs,len(n_nodes)-2));  
        all_L1_beta_vals = np.zeros((n_epochs,len(n_nodes)-2));
        
        L1_beta_vals= np.zeros(len(n_nodes)-2)
        cnt_hsp_val = np.zeros(len(n_nodes)-2);    

    ###################
    #  start training 
    ###################
    while (epoch < n_epochs) and (not done_looping):
        epoch = epoch + 1
        minibatch_all_avg_error = []; minibatch_all_avg_mse = []
        
        # minibatch based training
        for minibatch_index in range(n_train_batches):
            disply_text = StringIO();
            minibatch_avg_cost, minibatch_avg_error, minibatch_avg_mse = train_model(minibatch_index, L1_beta_vals,learning_rate,momentum_val)
            minibatch_all_avg_error.append(minibatch_avg_error)
            minibatch_all_avg_mse.append(minibatch_avg_mse)
             
            # Node-wise or layer-wise control of weight sparsity 
            if flag_nodewise==1:
                for i in range(len(n_nodes)-2):
                    node_size = n_nodes[i+1]; tg_index = np.arange((i * node_size),((i + 1) * node_size));
                    [all_hsp_vals[i][epoch-1], L1_beta_vals[tg_index]] = hsp_fnc(L1_beta_vals[tg_index],classifier.hiddenLayer[i].W,max_beta[i],tg_hspset[i],beta_lrates,flag_nodewise);
                    all_L1_beta_vals[i][epoch-1]= L1_beta_vals[tg_index];
            else:
                for i in range(len(n_nodes)-2):
                    [cnt_hsp_val[i], L1_beta_vals[i]] = hsp_fnc(L1_beta_vals[i],classifier.hiddenLayer[i].W,max_beta[i],tg_hspset[i],beta_lrates,flag_nodewise);
                
            # iteration number
            iter = (epoch - 1) * n_train_batches + minibatch_index
            # test it on the test set
            test_losses = []; test_mses = []
            for i in range(n_test_batches):
                test_losses.append(test_model(i)[0])
                test_mses.append(test_model(i)[1])
            test_score = numpy.mean(test_losses);
             
        # Begin Annealing
        if beginAnneal == 0:
            learning_rate = learning_rate * 1.0
        elif epoch > beginAnneal:
            learning_rate = max(min_annel_lrate, (-decay_rate*epoch + (1+decay_rate*beginAnneal)) * learning_rate )
            
        # Save variables to check training
        train_errors[epoch-1] = np.mean(minibatch_all_avg_error)*100
        test_errors[epoch-1] = test_score*100
        train_mse[epoch-1] = np.mean(minibatch_all_avg_mse)
        test_mse[epoch-1] = np.mean(test_mses)
        
        # Node-wise or layer-wise control of weight sparsity to display the current state of training 
        if flag_nodewise ==1:
            disply_text.write("Node-wise control, epoch %i/%d, Tr.err= %.2f, Ts.err= %.2f, lr = %.6f, " % (epoch,n_epochs,train_errors[epoch-1],test_errors[epoch-1],learning_rate))

            for layer_idx in range(len(n_nodes)-2):
                cnt_hsp_val[layer_idx] = np.mean(all_hsp_vals[layer_idx][epoch-1])
                cnt_beta_val[layer_idx] = np.mean(all_L1_beta_vals[layer_idx][epoch-1])
        else:  
            disply_text.write("Layer-wise control, epoch %i/%d, Tr.err= %.2f, Ts.err= %.2f, lr = %.6f, " % (epoch,n_epochs,train_errors[epoch-1],test_errors[epoch-1],learning_rate))
            
            all_hsp_vals[epoch-1,:] = cnt_hsp_val;
            all_L1_beta_vals[epoch-1,:] = L1_beta_vals;
            cnt_beta_val = L1_beta_vals;
            
        for layer_idx in range(len(n_nodes)-2):
            if (layer_idx==len(n_nodes)-3):
                disply_text.write("hsp_l%d = %.2f/%.2f, beta_l%d = %.2f" % (layer_idx+1,cnt_hsp_val[layer_idx],tg_hspset[layer_idx],layer_idx+1,cnt_beta_val[layer_idx]))
            else:
                disply_text.write("hsp_l%d = %.2f/%.2f, beta_l%d = %.2f, " % (layer_idx+1,cnt_hsp_val[layer_idx],tg_hspset[layer_idx],layer_idx+1,cnt_beta_val[layer_idx]))
                    
        # Display variables                 
        print(disply_text.getvalue())
        disply_text.close()
        
        lrs[epoch-1] = learning_rate

    ########################################## Save variables #################################################

    # make a new directory to save data
    if not os.path.exists(sav_path):
        os.makedirs(sav_path)
        
    end_time = timeit.default_timer()
    cst_time = (end_time - start_time) / 60.
    print(sys.stderr, ('\n The code for file ' + os.path.split(__file__)[1] +
                          ' ran for %.2fm' % ((end_time - start_time) / 60.)))
     
    sav_text = StringIO();
    for layer_idx in range(len(n_nodes)-2):
        if layer_idx==len(n_nodes)-3:
            sav_text.write("%d" % (n_nodes[layer_idx+1]))
        else:
            sav_text.write("%d-" % (n_nodes[layer_idx+1]))

    if flag_nodewise==1:
       sav_name = '%s/mlp_rst_node_%s.mat' % (sav_path,sav_text.getvalue())
    else: 
       sav_name = '%s/mlp_rst_layer_%s.mat' % (sav_path,sav_text.getvalue())

    sav_text.close()
        
    data_variable = {}; 

    for i in range(len(n_nodes)-1):
        if (i==len(n_nodes)-2): 
            W_name = "w%d" %(i+1); b_name = "b%d" % (i+1); 
            data_variable[W_name] = classifier.logRegressionLayer.W.get_value(borrow=True)
            data_variable[b_name] = classifier.logRegressionLayer.b.get_value(borrow=True)
        else:
            W_name = "w%d" %(i+1); b_name = "b%d" % (i+1)
            data_variable[W_name] = classifier.hiddenLayer[i].W.get_value(borrow=True)
            data_variable[b_name] = classifier.hiddenLayer[i].b.get_value(borrow=True)
            
    data_variable['hsp_vals'] = all_hsp_vals;  
    data_variable['L1_vals'] =  all_L1_beta_vals;
    data_variable['train_errors'] = train_errors;    data_variable['test_errors'] = test_errors;
    data_variable['l_rate'] = lrs;
    
    data_variable['momtentum'] = momentum_val;    data_variable['beginAnneal'] = beginAnneal;    data_variable['decay_lr'] = decay_rate;
    data_variable['beta_lrates'] = beta_lrates;    data_variable['max_beta'] = max_beta;    data_variable['tg_hspset'] = tg_hspset;
    data_variable['batch_size'] = batch_size;    data_variable['n_epochs'] = n_epochs;    data_variable['min_annel_lrate'] = min_annel_lrate;
    data_variable['n_nodes'] = n_nodes; data_variable['lrate_list'] = lrate_list;
    
    sio.savemat(sav_name,data_variable)

    print('...done!')
Example #49
0
    session = SessionManager.Session()
    username = args.user_promote[0]
    level = int(args.user_promote[1])
    user = session.query(User).filter(User.name == username).one()
    user.level = level
    session.commit()
    print('Update successfully')
    SessionManager.Session.remove()

elif args.db_init:
    Base.metadata.create_all(SessionManager.engine)
    fp = StringIO()
    alembic_config = Config('./alembic.ini', stdout=fp)
    command.heads(alembic_config)
    content = fp.getvalue()
    fp.close()
    revision_hash = re.search('^([0-9a-f]+)\s\(head\)', content, re.U).group(1)
    print('set current revision {0}'.format(revision_hash))
    new_alembic_config = Config('./alembic.ini')
    command.stamp(new_alembic_config, revision=revision_hash)
    print('Database initialized')

elif args.cover:
    fr = open('./config/config.yml', 'r')
    config = yaml.load(fr)
    download_location = config['download']['location']
    session = SessionManager.Session()
    cur = session.query(Bangumi)
    resp_cookies = None
    file_downloader = FileDownloader()
    for bangumi in cur:
    def check_gradient(self,
                       inputs=None,
                       outputs=None,
                       stream=sys.stdout,
                       mode='auto'):
        """Compare the OpenMDAO-calculated gradient with one calculated
        by straight finite-difference. This provides the user with a way
        to validate his derivative functions (apply_deriv and provideJ.)

        inputs: (optional) iter of str or None
            Names of input variables. The calculated gradient will be
            the matrix of values of the output variables with respect
            to these input variables. If no value is provided for inputs,
            they will be determined based on the parameters of
            the Driver corresponding to this workflow.

        outputs: (optional) iter of str or None
            Names of output variables. The calculated gradient will be
            the matrix of values of these output variables with respect
            to the input variables. If no value is provided for outputs,
            they will be determined based on the objectives and constraints
            of the Driver corresponding to this workflow.

        stream: (optional) file-like object or str
            Where to write to, default stdout. If a string is supplied,
            that is used as a filename. If None, no output is written.

        mode: (optional) str
            Set to 'forward' for forward mode, 'adjoint' for adjoint mode,
            or 'auto' to let OpenMDAO determine the correct mode.
            Defaults to 'auto'.

        Returns the finite difference gradient, the OpenMDAO-calculated
        gradient, and a list of suspect inputs/outputs.
        """
        parent = self.parent

        # tuples cause problems
        if inputs:
            inputs = list(inputs)
        if outputs:
            outputs = list(outputs)

        if isinstance(stream, basestring):
            stream = open(stream, 'w')
            close_stream = True
        else:
            close_stream = False
            if stream is None:
                stream = StringIO()

        J = self.calc_gradient(inputs, outputs, mode=mode, force_regen=True)
        Jbase = self.calc_gradient(inputs,
                                   outputs,
                                   mode='fd',
                                   force_regen=True)

        print >> stream, 24 * '-'
        print >> stream, 'Calculated Gradient'
        print >> stream, 24 * '-'
        print >> stream, J
        print >> stream, 24 * '-'
        print >> stream, 'Finite Difference Comparison'
        print >> stream, 24 * '-'
        print >> stream, Jbase

        # This code duplication is needed so that we print readable names for
        # the constraints and objectives.

        if inputs is None:
            if hasattr(parent, 'list_param_group_targets'):
                inputs = parent.list_param_group_targets()
                input_refs = []
                for item in inputs:
                    if len(item) < 2:
                        input_refs.append(item[0])
                    else:
                        input_refs.append(item)
            # Should be caught in calc_gradient()
            else:  # pragma no cover
                msg = "No inputs given for derivatives."
                self.scope.raise_exception(msg, RuntimeError)
        else:
            input_refs = inputs

        if outputs is None:
            outputs = []
            output_refs = []
            if hasattr(parent, 'get_objectives'):
                obj = [
                    "%s.out0" % item.pcomp_name
                    for item in parent.get_objectives().values()
                ]
                outputs.extend(obj)
                output_refs.extend(parent.get_objectives().keys())
            if hasattr(parent, 'get_constraints'):
                con = [
                    "%s.out0" % item.pcomp_name
                    for item in parent.get_constraints().values()
                ]
                outputs.extend(con)
                output_refs.extend(parent.get_constraints().keys())

            if len(outputs) == 0:  # pragma no cover
                msg = "No outputs given for derivatives."
                self.scope.raise_exception(msg, RuntimeError)
        else:
            output_refs = outputs

        out_width = 0

        for output, oref in zip(outputs, output_refs):
            out_val = self.scope.get(output)
            out_names = _flattened_names(oref, out_val)
            out_width = max(out_width, max([len(out) for out in out_names]))

        inp_width = 0
        for input_tup, iref in zip(inputs, input_refs):
            if isinstance(input_tup, str):
                input_tup = [input_tup]
            inp_val = self.scope.get(input_tup[0])
            inp_names = _flattened_names(str(iref), inp_val)
            inp_width = max(inp_width, max([len(inp) for inp in inp_names]))

        label_width = out_width + inp_width + 4

        print >> stream
        print >> stream, label_width*' ', \
              '%-18s %-18s %-18s' % ('Calculated', 'FiniteDiff', 'RelError')
        print >> stream, (label_width + (3 * 18) + 3) * '-'

        suspect_limit = 1e-5
        error_n = error_sum = 0
        error_max = error_loc = None
        suspects = []
        i = -1

        io_pairs = []

        for output, oref in zip(outputs, output_refs):
            out_val = self.scope.get(output)
            for out_name in _flattened_names(oref, out_val):
                i += 1
                j = -1
                for input_tup, iref in zip(inputs, input_refs):
                    if isinstance(input_tup, basestring):
                        input_tup = (input_tup, )

                    inp_val = self.scope.get(input_tup[0])
                    for inp_name in _flattened_names(iref, inp_val):
                        j += 1
                        calc = J[i, j]
                        finite = Jbase[i, j]
                        if finite and calc:
                            error = (calc - finite) / finite
                        else:
                            error = calc - finite
                        error_n += 1
                        error_sum += abs(error)
                        if error_max is None or abs(error) > abs(error_max):
                            error_max = error
                            error_loc = (out_name, inp_name)
                        if abs(error) > suspect_limit or isnan(error):
                            suspects.append((out_name, inp_name))
                        print >> stream, '%*s / %*s: %-18s %-18s %-18s' \
                              % (out_width, out_name, inp_width, inp_name,
                                 calc, finite, error)
                        io_pairs.append(
                            "%*s / %*s" %
                            (out_width, out_name, inp_width, inp_name))
        print >> stream
        if error_n:
            print >> stream, 'Average RelError:', error_sum / error_n
            print >> stream, 'Max RelError:', error_max, 'for %s / %s' % error_loc
        if suspects:
            print >> stream, 'Suspect gradients (RelError > %s):' % suspect_limit
            for out_name, inp_name in suspects:
                print >> stream, '%*s / %*s' \
                      % (out_width, out_name, inp_width, inp_name)
        print >> stream

        if close_stream:
            stream.close()

        # return arrays and suspects to make it easier to check from a test
        return Jbase.flatten(), J.flatten(), io_pairs, suspects
Example #51
0
print >> deadXMLBuf, '    </action>'

print >> deadXMLBuf, "  </actions>"


def termChar(s):
    if len(compose.char(s)) > 1:
        return xmlChar(compose.terminators.get(compose.char(s), "?"))
    return xmlChar(compose.terminators.get(compose.char(s), compose.char(s)))


print >> deadXMLBuf
print >> deadXMLBuf
print >> deadXMLBuf, '  <terminators>'
for m in sorted(dmm, mod_order):
    if m != tuple():
        print >> deadXMLBuf, '    <when state="%s" output="%s"/>' % (
            '_'.join(m), ''.join([xmlChar(terminators.get(n, "?"))
                                  for n in m]))
for ss in sorted(compose.states):
    C = ''.join([termChar(s) for s in ss])
    s = '_'.join(list(ss))
    print >> deadXMLBuf, '    <when state="%s" output="%s"/>' % (s, C)
print >> deadXMLBuf, '  </terminators>'

deadXMLCode = deadXMLBuf.getvalue()
deadXMLBuf.close()

if __name__ == "__main__":
    print deadXMLCode
Example #52
0
class TestOSTConnector(unittest.TestCase):
    """
    Class to test the IM connectors
    """
    def setUp(self):
        self.log = StringIO()
        self.handler = logging.StreamHandler(self.log)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        self.handler.setFormatter(formatter)

        logging.RootLogger.propagate = 0
        logging.root.setLevel(logging.ERROR)

        logger = logging.getLogger('CloudConnector')
        logger.setLevel(logging.DEBUG)
        logger.propagate = 0
        for handler in logger.handlers:
            logger.removeHandler(handler)
        logger.addHandler(self.handler)

    def tearDown(self):
        self.handler.flush()
        self.log.close()
        self.log = StringIO()
        self.handler.close()

    @staticmethod
    def get_ost_cloud():
        cloud_info = CloudInfo()
        cloud_info.type = "OpenStack"
        cloud_info.protocol = "https"
        cloud_info.server = "server.com"
        cloud_info.port = 5000
        inf = MagicMock()
        inf.id = "1"
        one_cloud = OpenStackCloudConnector(cloud_info, inf)
        return one_cloud

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_10_concrete(self, get_driver):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count>=1 and
            memory.size>=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'ost://server.com/ami-id' and
            disk.0.os.credentials.username = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl_system = radl.systems[0]

        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        driver = MagicMock()
        get_driver.return_value = driver

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        concrete = ost_cloud.concreteSystem(radl_system, auth)
        self.assertEqual(len(concrete), 1)
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_20_launch(self, get_driver):
        radl_data = """
            network net1 (outbound = 'yes' and provider_id = 'public' and outports = '8080')
            network net2 ()
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net1' and
            net_interface.0.dns_name = 'test' and
            net_interface.1.connection = 'net2' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'ost://server.com/ami-id' and
            disk.0.os.credentials.username = '******' and
            disk.1.size=1GB and
            disk.1.device='hdb' and
            disk.1.mount_path='/mnt/path'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        driver = MagicMock()
        get_driver.return_value = driver

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        net = MagicMock()
        net.name = "public"
        driver.ex_list_networks.return_value = [net]

        sg = MagicMock()
        sg.name = "sg"
        driver.ex_create_security_group.return_value = sg
        driver.ex_list_security_groups.return_value = []
        driver.ex_create_security_group_rule.return_value = True

        keypair = MagicMock()
        keypair.public_key = "public"
        driver.create_key_pair.return_value = keypair
        driver.features = {'create_node': ['ssh_key']}

        node = MagicMock()
        node.id = "ost1"
        node.name = "ost1name"
        driver.create_node.return_value = node

        res = ost_cloud.launch(InfrastructureInfo(), radl, radl, 1, auth)
        success, _ = res[0]
        self.assertTrue(success, msg="ERROR: launching a VM.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_30_updateVMInfo(self, get_driver):
        radl_data = """
            network net (outbound = 'yes')
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'one://server.com/1' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        inf.get_next_vm_id.return_value = 1
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {
            'flavorId': 'small',
            'addresses': {
                'os-lan': [{
                    'addr': '10.0.0.1',
                    'OS-EXT-IPS:type': 'fixed'
                }]
            }
        }
        node.public_ips = []
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.list_nodes.return_value = [node]

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.ex_get_size.return_value = node_size

        volume = MagicMock()
        volume.id = "vol1"
        volume.attach.return_value = True
        driver.create_volume.return_value = volume

        pool = MagicMock()
        pool.name = "pool1"
        pool.list_floating_ips.return_value = []
        pool.create_floating_ip.return_value = True
        driver.ex_list_floating_ip_pools.return_value = [pool]

        success, vm = ost_cloud.updateVMInfo(vm, auth)

        self.assertTrue(success, msg="ERROR: updating VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_40_stop(self, get_driver):
        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        inf.get_next_vm_id.return_value = 1
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, "", "", ost_cloud)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.list_nodes.return_value = [node]

        driver.ex_stop_node.return_value = True

        success, _ = ost_cloud.stop(vm, auth)

        self.assertTrue(success, msg="ERROR: stopping VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_50_start(self, get_driver):
        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        inf.get_next_vm_id.return_value = 1
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, "", "", ost_cloud)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.list_nodes.return_value = [node]

        driver.ex_start_node.return_value = True

        success, _ = ost_cloud.start(vm, auth)

        self.assertTrue(success, msg="ERROR: stopping VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_55_alter(self, get_driver):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'one://server.com/1' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)

        new_radl_data = """
            system test (
            cpu.count>=2 and
            memory.size>=2048m
            )"""
        new_radl = radl_parse.parse_radl(new_radl_data)

        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        inf.get_next_vm_id.return_value = 1
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.list_nodes.return_value = [node]

        node_size = MagicMock()
        node_size.ram = 2048
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 2
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        driver.ex_resize.return_value = True

        success, _ = ost_cloud.alterVM(vm, new_radl, auth)

        self.assertTrue(success, msg="ERROR: modifying VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    @patch('time.sleep')
    def test_60_finalize(self, sleep, get_driver):
        auth = Authentication([{
            'id': 'ost',
            'type': 'OpenStack',
            'username': '******',
            'password': '******',
            'tenant': 'tenant',
            'host': 'https://server.com:5000'
        }])
        ost_cloud = self.get_ost_cloud()

        radl_data = """
            system test (
            cpu.count>=2 and
            memory.size>=2048m
            )"""
        radl = radl_parse.parse_radl(radl_data)

        inf = MagicMock()
        inf.get_next_vm_id.return_value = 1
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud)

        driver = MagicMock()
        driver.name = "OpenStack"
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        node.destroy.return_value = True
        driver.list_nodes.return_value = [node]

        sg = MagicMock()
        sg.id = sg.name = "sg1"
        driver.ex_get_node_security_groups.return_value = [sg]

        keypair = MagicMock()
        driver.get_key_pair.return_value = keypair
        vm.keypair = keypair

        driver.delete_key_pair.return_value = True

        driver.delete_security_group.return_value = True

        driver.ex_list_floating_ips.return_value = []

        success, _ = ost_cloud.finalize(vm, auth)

        self.assertTrue(success, msg="ERROR: finalizing VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())
Example #53
0
def run_rounds(opts,args):
    def get_cmd_wd(cmd, exec_rel_cwd=False):
        ''' get the proper working directory from a command line '''
        new_cmd = []
        wd = None
        for i, part in reversed(list(enumerate(cmd.split()))):
            if wd == None and os.path.exists(part):
                wd = os.path.dirname(os.path.realpath(part))
                basename = os.path.basename(part)
                if i == 0:
                    if exec_rel_cwd:
                        new_cmd.insert(0, os.path.join(".", basename))
                    else:
                        new_cmd.insert(0, part)
                else:
                    new_cmd.insert(0, basename)
            else:
                new_cmd.insert(0, part)
        return wd, ' '.join(new_cmd)
    def get_cmd_name(cmd):
        ''' get the name of a bot from the command line '''
        for i, part in enumerate(reversed(cmd.split())):
            if os.path.exists(part):
                return part.split("/")[-2]
# this split of options is not needed, but left for documentation
    game_options = {
        "map": opts.map,
        "attack": opts.attack,
        "kill_points": opts.kill_points,
        "food": opts.food,
        "viewradius2": opts.viewradius2,
        "attackradius2": opts.attackradius2,
        "spawnradius2": opts.spawnradius2,
        "loadtime": opts.loadtime,
        "turntime": opts.turntime,
        "turns": opts.turns,
        "food_rate": opts.food_rate,
        "food_turn": opts.food_turn,
        "food_start": opts.food_start,
        "food_visible": opts.food_visible,
        "cutoff_turn": opts.cutoff_turn,
        "cutoff_percent": opts.cutoff_percent,
        "scenario": opts.scenario }
    if opts.player_seed != None:
        game_options['player_seed'] = opts.player_seed
    if opts.engine_seed != None:
        game_options['engine_seed'] = opts.engine_seed
    engine_options = {
        "loadtime": opts.loadtime,
        "turntime": opts.turntime,
        "map_file": opts.map,
        "turns": opts.turns,
        "log_replay": opts.log_replay,
        "log_stream": opts.log_stream,
        "log_input": opts.log_input,
        "log_output": opts.log_output,
        "log_error": opts.log_error,
        "serial": opts.serial,
        "strict": opts.strict,
        "capture_errors": opts.capture_errors,
        "secure_jail": opts.secure_jail,
        "end_wait": opts.end_wait }
    for round in range(opts.rounds):
        # initialize game
        game_id = round + opts.game_id
        with open(opts.map, 'r') as map_file:
            game_options['map'] = map_file.read()
        if opts.engine_seed:
            game_options['engine_seed'] = opts.engine_seed + round
        game = Ants(game_options)
        # initialize bots
        bots = [get_cmd_wd(arg, exec_rel_cwd=opts.secure_jail) for arg in args]
        bot_count = len(bots)
        # insure correct number of bots, or fill in remaining positions
        if game.num_players != len(bots):
            if game.num_players > len(bots) and opts.fill:
                extra = game.num_players - len(bots)
                for _ in range(extra):
                    bots.append(bots[-1])
            else:
                print("Incorrect number of bots for map.  Need {0}, got {1}"
                      .format(game.num_players, len(bots)), file=stderr)
                for arg in args:
                    print("Bot Cmd: {0}".format(arg), file=stderr)
                break
        bot_count = len(bots)
        # move position of first bot specified
        if opts.position > 0 and opts.position <= len(bots):
            first_bot = bots[0]
            bots = bots[1:]
            bots.insert(opts.position, first_bot)

        # initialize file descriptors
        if opts.log_dir and not os.path.exists(opts.log_dir):
            os.mkdir(opts.log_dir)
        if not opts.log_replay and not opts.log_stream and (opts.log_dir or opts.log_stdout):
            opts.log_replay = True
        replay_path = None # used for visualizer launch
        
        if opts.log_replay:
            if opts.log_dir:
                replay_path = os.path.join(opts.log_dir, '{0}.replay'.format(game_id))
                engine_options['replay_log'] = open(replay_path, 'w')
            if opts.log_stdout:
                if 'replay_log' in engine_options and engine_options['replay_log']:
                    engine_options['replay_log'] = Tee(sys.stdout, engine_options['replay_log'])
                else:
                    engine_options['replay_log'] = sys.stdout
        else:
            engine_options['replay_log'] = None

        if opts.log_stream:
            if opts.log_dir:
                engine_options['stream_log'] = open(os.path.join(opts.log_dir, '{0}.stream'.format(game_id)), 'w')
            if opts.log_stdout:
                if engine_options['stream_log']:
                    engine_options['stream_log'] = Tee(sys.stdout, engine_options['stream_log'])
                else:
                    engine_options['stream_log'] = sys.stdout
        else:
            engine_options['stream_log'] = None
        
        if opts.log_input and opts.log_dir:
            engine_options['input_logs'] = [open(os.path.join(opts.log_dir, '{0}.bot{1}.input'.format(game_id, i)), 'w')
                             for i in range(bot_count)]
        else:
            engine_options['input_logs'] = None
        if opts.log_output and opts.log_dir:
            engine_options['output_logs'] = [open(os.path.join(opts.log_dir, '{0}.bot{1}.output'.format(game_id, i)), 'w')
                              for i in range(bot_count)]
        else:
            engine_options['output_logs'] = None
        if opts.log_error and opts.log_dir:
            if opts.log_stderr:
                if opts.log_stdout:
                    engine_options['error_logs'] = [Tee(Comment(stderr), open(os.path.join(opts.log_dir, '{0}.bot{1}.error'.format(game_id, i)), 'w'))
                                      for i in range(bot_count)]
                else:
                    engine_options['error_logs'] = [Tee(stderr, open(os.path.join(opts.log_dir, '{0}.bot{1}.error'.format(game_id, i)), 'w'))
                                      for i in range(bot_count)]
            else:
                engine_options['error_logs'] = [open(os.path.join(opts.log_dir, '{0}.bot{1}.error'.format(game_id, i)), 'w')
                                  for i in range(bot_count)]
        elif opts.log_stderr:
            if opts.log_stdout:
                engine_options['error_logs'] = [Comment(stderr)] * bot_count
            else:
                engine_options['error_logs'] = [stderr] * bot_count
        else:
            engine_options['error_logs'] = None
        
        if opts.verbose:
            if opts.log_stdout:
                engine_options['verbose_log'] = Comment(sys.stdout)
            else:
                engine_options['verbose_log'] = sys.stdout
            
        engine_options['game_id'] = game_id 
        if opts.rounds > 1:
            print('# playgame round {0}, game id {1}'.format(round, game_id))

        # intercept replay log so we can add player names
        if opts.log_replay:
            intcpt_replay_io = StringIO()
            real_replay_io = engine_options['replay_log']
            engine_options['replay_log'] = intcpt_replay_io

        result = run_game(game, bots, engine_options)

        # add player names, write to proper io, reset back to normal
        if opts.log_replay:
            replay_json = json.loads(intcpt_replay_io.getvalue())
            replay_json['playernames'] = [get_cmd_name(arg) for arg in args]
            real_replay_io.write(json.dumps(replay_json))
            intcpt_replay_io.close()
            engine_options['replay_log'] = real_replay_io

        # close file descriptors
        if engine_options['stream_log']:
            engine_options['stream_log'].close()
        if engine_options['replay_log']:
            engine_options['replay_log'].close()
        if engine_options['input_logs']:
            for input_log in engine_options['input_logs']:
                input_log.close()
        if engine_options['output_logs']:
            for output_log in engine_options['output_logs']:
                output_log.close()
        if engine_options['error_logs']:
            for error_log in engine_options['error_logs']:
                error_log.close()
        if replay_path:
            if opts.nolaunch:
                if opts.html_file:
                    visualizer.visualize_locally.launch(replay_path, True, opts.html_file)
            else:
                if opts.html_file == None:
                    visualizer.visualize_locally.launch(replay_path,
                            generated_path="replay.{0}.html".format(game_id))
                else:
                    visualizer.visualize_locally.launch(replay_path,
                            generated_path=opts.html_file)
Example #54
0
class TestKubernetesConnector(unittest.TestCase):
    """
    Class to test the IM connectors
    """
    def setUp(self):
        self.log = StringIO()
        self.handler = logging.StreamHandler(self.log)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        self.handler.setFormatter(formatter)

        logging.RootLogger.propagate = 0
        logging.root.setLevel(logging.ERROR)

        logger = logging.getLogger('CloudConnector')
        logger.setLevel(logging.DEBUG)
        logger.propagate = 0
        for handler in logger.handlers:
            logger.removeHandler(handler)
        logger.addHandler(self.handler)

    def tearDown(self):
        self.handler.flush()
        self.log.close()
        self.log = StringIO()
        self.handler.close()

    @staticmethod
    def get_kube_cloud():
        cloud_info = CloudInfo()
        cloud_info.type = "Kubernetes"
        cloud_info.protocol = "http"
        cloud_info.server = "server.com"
        cloud_info.port = 8080
        inf = MagicMock()
        inf.id = "1"
        cloud = KubernetesCloudConnector(cloud_info, inf)
        return cloud

    def test_10_concrete(self):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count>=1 and
            memory.size>=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'docker://someimage' and
            disk.0.os.credentials.username = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl_system = radl.systems[0]

        auth = Authentication([{
            'id': 'fogbow',
            'type': 'Kubernetes',
            'host': 'http://server.com:8080'
        }])
        kube_cloud = self.get_kube_cloud()

        concrete = kube_cloud.concreteSystem(radl_system, auth)
        self.assertEqual(len(concrete), 1)
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    def get_response(self, method, url, verify, headers, data):
        resp = MagicMock()
        parts = uriparse(url)
        url = parts[2]

        if method == "GET":
            if url == "/api/":
                resp.status_code = 200
                resp.text = '{"versions": "v1"}'
            elif url.endswith("/pods/1"):
                resp.status_code = 200
                resp.text = (
                    '{"metadata": {"namespace":"namespace", "name": "name"}, "status": '
                    '{"phase":"Running", "hostIP": "158.42.1.1", "podIP": "10.0.0.1"}, '
                    '"spec": {"volumes": [{"persistentVolumeClaim": {"claimName" : "cname"}}]}}'
                )
        elif method == "POST":
            if url.endswith("/pods"):
                resp.status_code = 201
                resp.text = '{"metadata": {"namespace":"namespace", "name": "name"}}'
            if url.endswith("/namespaces"):
                resp.status_code = 201
        elif method == "DELETE":
            if url.endswith("/pods/1"):
                resp.status_code = 200
            if url.endswith("/namespaces/namespace"):
                resp.status_code = 200
            elif "persistentvolumeclaims" in url:
                resp.status_code = 200
        elif method == "PATCH":
            if url.endswith("/pods/1"):
                resp.status_code = 201

        return resp

    @patch('requests.request')
    @patch('IM.InfrastructureList.InfrastructureList.save_data')
    def test_20_launch(self, save_data, requests):
        radl_data = """
            network net1 (outbound = 'yes' and outports = '8080')
            network net2 ()
            system test (
            cpu.arch='x86_64' and
            cpu.count>=1 and
            memory.size>=512m and
            net_interface.0.connection = 'net1' and
            net_interface.0.dns_name = 'test' and
            net_interface.1.connection = 'net2' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'docker://someimage' and
            disk.0.os.credentials.username = '******' and
            disk.1.size=1GB and
            disk.1.device='hdb' and
            disk.1.mount_path='/mnt/path'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{
            'id': 'fogbow',
            'type': 'Kubernetes',
            'host': 'http://server.com:8080'
        }])
        kube_cloud = self.get_kube_cloud()

        requests.side_effect = self.get_response

        res = kube_cloud.launch(InfrastructureInfo(), radl, radl, 1, auth)
        success, _ = res[0]
        self.assertTrue(success, msg="ERROR: launching a VM.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('requests.request')
    def test_30_updateVMInfo(self, requests):
        radl_data = """
            network net (outbound = 'yes')
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'docker://someimage' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{
            'id': 'fogbow',
            'type': 'Kubernetes',
            'host': 'http://server.com:8080'
        }])
        kube_cloud = self.get_kube_cloud()

        inf = MagicMock()
        inf.id = "namespace"
        vm = VirtualMachine(inf, "1", kube_cloud.cloud, radl, radl, kube_cloud,
                            1)

        requests.side_effect = self.get_response

        success, vm = kube_cloud.updateVMInfo(vm, auth)

        self.assertTrue(success, msg="ERROR: updating VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('requests.request')
    def test_55_alter(self, requests):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'one://server.com/1' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)

        new_radl_data = """
            system test (
            cpu.count>=2 and
            memory.size>=2048m
            )"""
        new_radl = radl_parse.parse_radl(new_radl_data)

        auth = Authentication([{
            'id': 'fogbow',
            'type': 'Kubernetes',
            'host': 'http://server.com:8080'
        }])
        kube_cloud = self.get_kube_cloud()

        inf = MagicMock()
        inf.id = "namespace"
        vm = VirtualMachine(inf, "1", kube_cloud.cloud, radl, radl, kube_cloud,
                            1)

        requests.side_effect = self.get_response

        success, _ = kube_cloud.alterVM(vm, new_radl, auth)

        self.assertTrue(success, msg="ERROR: modifying VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('requests.request')
    def test_60_finalize(self, requests):
        auth = Authentication([{
            'id': 'fogbow',
            'type': 'Kubernetes',
            'host': 'http://server.com:8080'
        }])
        kube_cloud = self.get_kube_cloud()

        inf = MagicMock()
        inf.id = "namespace"
        vm = VirtualMachine(inf, "1", kube_cloud.cloud, "", "", kube_cloud, 1)

        requests.side_effect = self.get_response

        success, _ = kube_cloud.finalize(vm, True, auth)

        self.assertTrue(success, msg="ERROR: finalizing VM info.")
        self.assertNotIn("ERROR",
                         self.log.getvalue(),
                         msg="ERROR found in log: %s" % self.log.getvalue())
Example #55
0
class MavensMateTest(unittest.TestCase):

    # redirects standard out to a new target
    def redirectStdOut(self):
        new_target = StringIO()
        sys.stdout = new_target
        return new_target

    # runs an mm command, prints to command-specific stdout
    def runCommand(self,
                   command_name_or_argv,
                   stdin,
                   as_json=True,
                   print_before_deserialization=True):
        commandOut = self.redirectStdOut()
        request.get_request_payload = mock.Mock(return_value=stdin)
        if type(command_name_or_argv) is list:
            sys.argv = command_name_or_argv
        else:
            sys.argv = ['mm.py', '-o', command_name_or_argv]
        MavensMateRequestHandler().execute()
        mm_response = commandOut.getvalue()
        sys.stdout = self.saved_stdout
        if print_before_deserialization:
            if type(command_name_or_argv) is list:
                print '[' + str(command_name_or_argv[2]) + '] ------->'
            else:
                print '[' + str(command_name_or_argv) + '] ------->'
            print mm_response
        if as_json:
            mm_response = util.parse_mm_response(mm_response)
        return mm_response

    # runs before every test method
    def setUp(self):
        # self.commandStdOut = StringIO();

        self.output = StringIO()
        self.saved_stdout = sys.stdout
        sys.stdout = self.output

        # get settings from test client settings (modeled after ST3 settings)
        # found in default_client_settings.json and user_client_settings.json
        self.settings = util.get_plugin_client_settings()

        # test suite runs for each supported version of the api
        api_version = os.environ.get('SFDC_API_VERSION', 30.0)
        self.settings['user']['mm_api_version'] = api_version

        # set up CI-specific settings
        is_ci = os.environ.get('CI') == 'true' or os.environ.get('CI') == True
        if is_ci:
            self.settings['user']['mm_workspace'] = os.path.join(
                os.path.dirname(__file__), 'test_workspace')
            self.settings['user']['mm_use_keyring'] = False
        PluginConnection.get_plugin_client_settings = mock.Mock(
            return_value=self.settings)

    def tearDown(self):
        self.output.close()
        sys.stdout = self.saved_stdout

    def getTestApiVersion(self):
        test_api_version = os.environ.get('SFDC_API_VERSION', '30.0')
        return int(float(test_api_version))
Example #56
0
class TestOSTConnector(unittest.TestCase):
    """
    Class to test the IM connectors
    """

    def setUp(self):
        self.error_in_create = True
        self.log = StringIO()
        self.handler = logging.StreamHandler(self.log)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        self.handler.setFormatter(formatter)

        logging.RootLogger.propagate = 0
        logging.root.setLevel(logging.ERROR)

        logger = logging.getLogger('CloudConnector')
        logger.setLevel(logging.DEBUG)
        logger.propagate = 0
        for handler in logger.handlers:
            logger.removeHandler(handler)
        logger.addHandler(self.handler)

    def tearDown(self):
        self.handler.flush()
        self.log.close()
        self.log = StringIO()
        self.handler.close()

    @staticmethod
    def get_ost_cloud():
        cloud_info = CloudInfo()
        cloud_info.type = "OpenStack"
        cloud_info.protocol = "https"
        cloud_info.server = "server.com"
        cloud_info.port = 5000
        inf = MagicMock()
        inf.id = "1"
        one_cloud = OpenStackCloudConnector(cloud_info, inf)
        return one_cloud

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_10_concrete(self, get_driver):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count>=1 and
            memory.size>=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'ost://server.com/ami-id' and
            disk.0.os.credentials.username = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl_system = radl.systems[0]

        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        driver = MagicMock()
        get_driver.return_value = driver

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        concrete = ost_cloud.concreteSystem(radl_system, auth)
        self.assertEqual(len(concrete), 1)
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    def create_node(self, **kwargs):
        """
        Create VMs returning error only first time
        """
        if self.error_in_create:
            self.error_in_create = False
            raise Exception("Error creating VM")
        else:
            node = MagicMock()
            node.id = "ost1"
            node.name = "ost1name"
            return node

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    @patch('IM.InfrastructureList.InfrastructureList.save_data')
    def test_20_launch(self, save_data, get_driver):
        radl_data = """
            network net1 (outbound = 'yes' and provider_id = 'public' and
                          outports = '8080,9000:9100' and sg_name= 'test')
            network net2 ()
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net1' and
            net_interface.0.dns_name = 'test' and
            net_interface.1.connection = 'net2' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'ost://server.com/ami-id' and
            disk.0.os.credentials.username = '******' and
            disk.1.size=1GB and
            disk.1.device='hdb' and
            disk.1.mount_path='/mnt/path'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        driver = MagicMock()
        get_driver.return_value = driver

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        net = MagicMock()
        net.name = "public"
        driver.ex_list_networks.return_value = [net]

        sg = MagicMock()
        sg.name = "sg"
        driver.ex_create_security_group.return_value = sg
        driver.ex_list_security_groups.return_value = []
        driver.ex_create_security_group_rule.return_value = True

        keypair = MagicMock()
        keypair.public_key = "public"
        keypair.private_key = "private"
        driver.create_key_pair.return_value = keypair
        driver.features = {'create_node': ['ssh_key']}

        driver.create_node.side_effect = self.create_node

        res = ost_cloud.launch_with_retry(InfrastructureInfo(), radl, radl, 1, auth, 2, 1)
        success, _ = res[0]
        self.assertTrue(success, msg="ERROR: launching a VM.")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

        # test with proxy auth data
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'proxy': 'proxy',
                                'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        res = ost_cloud.launch(InfrastructureInfo(), radl, radl, 1, auth)
        success, _ = res[0]
        self.assertTrue(success, msg="ERROR: launching a VM.")

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_30_updateVMInfo(self, get_driver):
        radl_data = """
            network net (outbound = 'yes')
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'one://server.com/1' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)
        radl.check()

        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud, 1)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small',
                      'addresses': {'os-lan': [{'addr': '10.0.0.1', 'OS-EXT-IPS:type': 'fixed'}]}}
        node.public_ips = []
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.ex_get_node_details.return_value = node

        node_size = MagicMock()
        node_size.ram = 512
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 1
        node_size.name = "small"
        driver.ex_get_size.return_value = node_size

        volume = MagicMock()
        volume.id = "vol1"
        volume.attach.return_value = True
        driver.create_volume.return_value = volume

        pool = MagicMock()
        pool.name = "pool1"
        floating_ip = MagicMock()
        floating_ip.ip_address = "8.8.8.8"
        pool.list_floating_ips.return_value = []
        pool.create_floating_ip.return_value = floating_ip
        driver.ex_list_floating_ip_pools.return_value = [pool]

        success, vm = ost_cloud.updateVMInfo(vm, auth)

        self.assertTrue(success, msg="ERROR: updating VM info.")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.1")

        # In this case the Node has the float ip assigned
        # node.public_ips = ['8.8.8.8']
        floating_ip.node_id = node.id
        pool.list_floating_ips.return_value = [floating_ip]
        driver.ex_list_floating_ip_pools.return_value = [pool]

        success, vm = ost_cloud.updateVMInfo(vm, auth)

        self.assertTrue(success, msg="ERROR: updating VM info.")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.1")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ip"), "8.8.8.8")

        # In this case the Node addresses are not available and it uses the old method
        node.extra = {'flavorId': 'small'}
        success, vm = ost_cloud.updateVMInfo(vm, auth)
        self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.1")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ip"), "8.8.8.8")

        self.assertTrue(success, msg="ERROR: updating VM info.")

        # the node has a IPv6 IP
        node = MagicMock()
        node.id = "2"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['8.8.8.8', '2001:630:12:581:f816:3eff:fe92:2146']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.ex_get_node_details.return_value = node

        success, vm = ost_cloud.updateVMInfo(vm, auth)
        self.assertTrue(success, msg="ERROR: updating VM info.")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ip"), "8.8.8.8")
        self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ipv6"), "2001:630:12:581:f816:3eff:fe92:2146")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_40_stop(self, get_driver):
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, "", "", ost_cloud, 1)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.ex_get_node_details.return_value = node

        driver.ex_stop_node.return_value = True

        success, _ = ost_cloud.stop(vm, auth)

        self.assertTrue(success, msg="ERROR: stopping VM info.")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_50_start(self, get_driver):
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, "", "", ost_cloud, 1)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.ex_get_node_details.return_value = node

        driver.ex_start_node.return_value = True

        success, _ = ost_cloud.start(vm, auth)

        self.assertTrue(success, msg="ERROR: stopping VM info.")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_55_alter(self, get_driver):
        radl_data = """
            network net ()
            system test (
            cpu.arch='x86_64' and
            cpu.count=1 and
            memory.size=512m and
            net_interface.0.connection = 'net' and
            net_interface.0.dns_name = 'test' and
            disk.0.os.name = 'linux' and
            disk.0.image.url = 'one://server.com/1' and
            disk.0.os.credentials.username = '******' and
            disk.0.os.credentials.password = '******'
            )"""
        radl = radl_parse.parse_radl(radl_data)

        new_radl_data = """
            system test (
            cpu.count>=2 and
            memory.size>=2048m
            )"""
        new_radl = radl_parse.parse_radl(new_radl_data)

        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud, 1)

        driver = MagicMock()
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        driver.ex_get_node_details.return_value = node

        node_size = MagicMock()
        node_size.ram = 2048
        node_size.price = 1
        node_size.disk = 1
        node_size.vcpus = 2
        node_size.name = "small"
        driver.list_sizes.return_value = [node_size]

        driver.ex_resize.return_value = True

        success, _ = ost_cloud.alterVM(vm, new_radl, auth)

        self.assertTrue(success, msg="ERROR: modifying VM info.")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    @patch('time.sleep')
    def test_60_finalize(self, sleep, get_driver):
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        radl_data = """
            network public (outboud = 'yes')
            system test (
            cpu.count>=2 and
            memory.size>=2048m
            )"""
        radl = radl_parse.parse_radl(radl_data)

        inf = MagicMock()
        inf.id = "infid"
        inf.radl = radl
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, radl, radl, ost_cloud, 1)

        driver = MagicMock()
        driver.name = "OpenStack"
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.state = "running"
        node.extra = {'flavorId': 'small'}
        node.public_ips = ['158.42.1.1']
        node.private_ips = ['10.0.0.1']
        node.driver = driver
        node.destroy.return_value = True
        driver.ex_get_node_details.return_value = node

        keypair = MagicMock()
        driver.get_key_pair.return_value = keypair
        vm.keypair = keypair

        driver.delete_key_pair.return_value = True

        driver.delete_security_group.return_value = True

        driver.ex_list_floating_ips.return_value = []

        success, _ = ost_cloud.finalize(vm, True, auth)

        self.assertTrue(success, msg="ERROR: finalizing VM info.")
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_70_create_snapshot(self, get_driver):
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        inf = MagicMock()
        vm = VirtualMachine(inf, "1", ost_cloud.cloud, "", "", ost_cloud, 1)

        driver = MagicMock()
        driver.name = "OpenStack"
        get_driver.return_value = driver

        node = MagicMock()
        node.id = "1"
        node.driver = driver
        driver.ex_get_node_details.return_value = node
        image = MagicMock()
        image.id = "newimage"
        driver.create_image.return_value = image

        success, new_image = ost_cloud.create_snapshot(vm, 0, "image_name", True, auth)

        self.assertTrue(success, msg="ERROR: creating snapshot: %s" % new_image)
        self.assertEqual(new_image, "ost://server.com/newimage")
        self.assertEqual(driver.create_image.call_args_list, [call(node, "image_name")])
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())

    @patch('libcloud.compute.drivers.openstack.OpenStackNodeDriver')
    def test_80_delete_image(self, get_driver):
        auth = Authentication([{'id': 'ost', 'type': 'OpenStack', 'username': '******',
                                'password': '******', 'tenant': 'tenant', 'host': 'https://server.com:5000'}])
        ost_cloud = self.get_ost_cloud()

        driver = MagicMock()
        driver.name = "OpenStack"
        get_driver.return_value = driver

        image = MagicMock()
        image.id = "image"
        driver.get_image.return_value = image

        success, msg = ost_cloud.delete_image('ost://server.com/image', auth)

        self.assertTrue(success, msg="ERROR: deleting image. %s" % msg)
        self.assertEqual(driver.delete_image.call_args_list, [call(image)])
        self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
Example #57
0
>>> #     f.write  f.read  f.seek  f.close
>>> 
>>> # We want it to store a string
>>> #     f.getvalue()
>>> 
>>> 
>>> from StringIO import StringIO
>>> f = StringIO()
>>> dir(f)
['__doc__', '__init__', '__iter__', '__module__', 'buf', 'buflist', 'close', 'closed', 'flush', 'getvalue', 'isatty', 'len', 'next', 'pos', 'read', 'readline', 'readlines', 'seek', 'softspace', 'tell', 'truncate', 'write', 'writelines']
>>> 
>>> f.write('Line one\n')
>>> f.write('Line two\n')
>>> f.getvalue()
'Line one\nLine two\n'
>>> f.close()
>>> 
========== RESTART: /Users/raymond/Dropbox/Public/sj149/redirect.py ==========
The Starks Family
=================
* Eddard
* Catelyn
* Robb
* Sansa
* Arya
* Bram
* Rickon
* Jon Snow

Help on function show_family in module __main__:
Example #58
0
    def test_import_export(self):
        filehandle = StringIO()
        self.create_test_workbook(filehandle)
        xls_file = filehandle.getvalue()
        filehandle.close()

        # import
        browser = self.new_admin_browser()
        browser.open('/import-agencies')
        widget = browser.getControl(name='form.widgets.xls_file')
        widget.add_file(xls_file, 'application/xls', 'import.xls')
        browser.getControl('Import').click()

        # export
        browser.open('/export-agencies')
        self.assertEquals(browser.headers['Content-disposition'], 'export.xls')
        self.assertEquals(browser.headers['Content-Type'], 'application/xls')
        self.assertTrue(int(browser.headers['Content-Length']) > 0)

        # compare
        workbook = xlrd.open_workbook(file_contents=browser.contents)
        sheets = workbook.sheets()
        self.assertEquals(len(sheets), 2)
        self.assertEquals(sheets[0].name, u'Organizations')
        self.assertEquals(sheets[0].ncols, len(TITLES_ORGANIZATION))
        self.assertEquals(sheets[0].nrows, 5)

        for row, content in enumerate(self.organizations):
            self.assertEquals([cell.value for cell in sheets[0].row(row + 1)],
                              content)

        self.assertEquals(sheets[1].name, u'People')
        self.assertEquals(sheets[1].ncols, len(TITLES_REGISTER))
        self.assertEquals(sheets[1].nrows, 4)
        for row, content in enumerate(self.people):
            self.assertEquals(
                [cell.value for cell in sheets[1].row(row + 1)[:-1]],
                content[:-1])

        # roles are slightly different: position starts at zero, order is
        # random
        self.assertEquals(u'(0)(role)(2006)(*)(1)', sheets[1].cell(1,
                                                                   -1).value)
        self.assertTrue(u'(1)()()()(0)', sheets[1].cell(2, -1).value)
        self.assertTrue(u'(3)()()()(0)', sheets[1].cell(2, -1).value)
        self.assertTrue(u'(0)()()()(0)', sheets[1].cell(2, -1).value)
        self.assertTrue(u'(2)(role)()()(0)', sheets[1].cell(3, -1).value)
        self.assertTrue(u'(3)()()(*)(1)', sheets[1].cell(3, -1).value)

        # browse imported items
        browser.open('/organizations')
        self.assertIn('des_0', browser.contents)
        self.assertIn('port_0', browser.contents)
        self.assertIn('org_1', browser.contents)
        self.assertIn('org_2', browser.contents)
        self.assertNotIn('org_3', browser.contents)

        browser.open('/organizations/org_1')
        self.assertIn('des_1', browser.contents)
        self.assertIn('port_1', browser.contents)
        self.assertNotIn('org_2', browser.contents)
        self.assertNotIn('org_3', browser.contents)

        browser.open('/organizations/org_2')
        self.assertIn('des_2', browser.contents)
        self.assertIn('port_2', browser.contents)
        self.assertNotIn('org_1', browser.contents)
        self.assertIn('org_3', browser.contents)

        browser.open('/organizations/org_2/org_3')
        self.assertIn('des_3', browser.contents)
        self.assertIn('port_3', browser.contents)
which is faster.

In Python 3, these both modules are replaced with io module
"""
try:
    from StringIO import StringIO  # # for Python 2
except ImportError:
    from io import StringIO  # # for Python 3

# Arbitrary string
message = "This is just a normal string\n"

# Use StringIO method to set as file object
f = StringIO(message)

data = f.read()
print("Data:", data)

f.write("Second line written to file like object")

print(f"\n current cursor postion is {f.tell()}")
print("Again", f.read())

f.seek(0)
print(f"\n current cursor postion is {f.tell()}")
print("Again", f.read())

f.close()
# f.read()
# ValueError: I/O operation on closed file
Example #60
0
def _html2text(html):
    sio = StringIO()
    html2text.html2text_file(html, sio.write)
    text = sio.getvalue()
    sio.close()
    return text