Пример #1
0
 def load_bios(self, from_where=None):
     "If the desired bios file doesn't exist, try to load it from memory"
     
     if from_where is None:
         from_where = SAVE_PATH
         
     if not exists(from_where):
         bios_data = self.retrieve_from_memory()
         
         print "Saving BIOS to '%s' md5:%s" % (SAVE_PATH, md5sum(bios_data))
         with open(SAVE_PATH, "wb") as f_bios:
             f_bios.write(bios_data)
             
     else:
         with open(from_where, "rb") as f_bios:
             bios_data = f_bios.read()
         
         print "Opened BIOS '%s' with md5:%s" % (from_where, md5sum(bios_data))
         
     return bios_data
Пример #2
0
    def __init__(self, from_where=None):
        "Create a BIOS object"

        bios_data = self.load_bios(from_where)

        print "Operating on BIOS %s size = 0x%x" % (SAVE_PATH, len(bios_data))

        print "Loading compressed sections"
        compressed_chunks = get_lzma_chunks(bios_data)
        print " .. found %i compressed sections" % len(compressed_chunks)

        print "Locating Firmware Volumes"
        volumes = self.locate_firmware_volumes(bios_data)

        for position, data in compressed_chunks:
            #if False:
                #with open("data/fv-compr-0x%08x" % position, "wb") as f:
                    # Dump the executable with the PE header in the right place
                    #f.write(data[data.index("MZ"):])

            where = "[compr at 0x%x]" % position
            volumes.extend(self.locate_firmware_volumes(data, where))

        # Only good volumes
        volumes = filter(lambda fv: fv.good, volumes)
        vol_compr = filter(
            lambda fv: fv.where and "compr" in fv.where, volumes)

        print ("  .. found %i FirmwareVolumes (%i compressed)" %
               (len(volumes), len(vol_compr)))

        setup_utility = self.locate_setup_utility(vol_compr)

        TYPE_PE = 0x10
        setup_utility_pe = self.get_section_type(setup_utility[1], TYPE_PE)

        dump_filename = "data/SetupUtility-%s.pe" % WHAT
        if not exists(dump_filename):

            pe = setup_utility_pe

            with open(dump_filename, "wb") as fd:
                fd.write(pe.data)

            print "Wrote SetupUtility to %s" % dump_filename
            print "  Size = 0x%x MD5: %s" % (len(pe.data), md5sum(pe.data))

        self.locate_packs(setup_utility_pe.data)

        self.locate_vss(volumes)
Пример #3
0
 def doImport(self, filename):
     md5 = md5sum(filename)
     if self.alreadyImported(md5, filename):
         return
     bid = self.registerBench(md5, filename)
     db = self.db
     logging.info("Importing {0}: {1} into bid: {2}".format(self._name,
                                                            filename, bid))
     if filename.endswith('xml') or filename.endswith('xml.gz') or filename.endswith('jtl') or filename.endswith('jtl.gz'):
         self.importXmlFile(bid, filename)
     else:
         self.importOtherFormat(bid, filename)
     self.finalizeImport(bid, db)
     db.commit()
     return bid
Пример #4
0
    def store_data(self, name, mime, data):
        filename = os.path.join(self.path, name)
        f = open(filename, 'w')
        f.write(data)
        f.close()

        # calculate md5sum for file
        # this is intentionally done to verify that the file is not corruputed on disk
        # TODO this should be async
        md5sum = util.md5sum(filename)

        # write metadata file (avoid using ConfigParser, it's a simple file)
        f = open(filename + '.properties', 'w')
        f.write("mime=%s\n" % (mime, ))
        f.write("md5sum=%s\n" % (md5sum, ))
        f.close()

        return filename
Пример #5
0
    def __update(self, url):
        logging.info("开始更新 {}".format(url))
        try:
            chats = self.database.get_chats_by_url(url)
            mark = self.database.get_mark(url)
            _rssitems = self.fether.update_rss(url)
            self.database.set_mark(url, _rssitems[0].mark)
            rssitems = []
            normal = False
            for rssitem in _rssitems:
                iid = util.md5sum(rssitem.url + rssitem.mark)
                if rssitem.mark == mark:
                    normal = True
                    logging.info("所有新文章处理完毕 {}".format(rssitem.title))
                    break
                elif self.recently_used_elements.has_element(iid):
                    logging.info("此文章最近推送过 {}".format(rssitem.name))
                    continue
                else:
                    rssitems.append(rssitem)
                    logging.info("添加新文章 {}".format(rssitem.name))
            if not normal:
                rssitems.clear()
                logging.info("出现异常,清空所有文章 {}".format(rssitem.title))
            self.et[url] = 0
            if len(rssitems) > 0:
                logging.info("准备发送更新 {}".format(rssitem.title))
                self.__send(rssitems, chats)

        except (ParseError, IndexError):
            self.et[url] = self.et.setdefault(url, 0) + 1
            if self.et[url] >= int(self.el):
                self.database.set_active(url, False)
                title = self.database.get_rss_by_url(url).title
                text = '<a href="{}">{} </a>'.format(url, title)
                text += '更新时出现错误,已停止推送,请检查无误后重新订阅'
                for chat_id in chats:
                    self.__send_html(chat_id, text)
Пример #6
0
    def get_old_files(selection, paths, dest):
        curtotalrev = util.getrevcount(r)
        oldrevhash = revhash(r, selection, paths)
        cmdargs = [
            "gin", "version", "--max-count", "0", "--copy-to", dest, *paths
        ]
        print(f"Running gin version command: {cmdargs} with input {selection}")
        r.runcommand(*cmdargs, inp=str(selection))
        # no new commits
        newn = util.getrevcount(r)
        assert newn == curtotalrev,\
            "New commit was created when it shouldn't"

        out, err = r.runcommand("git", "ls-files", dest, exit=False)
        assert not len(out)  # none of the new files should be in git

        # hash checked out file(s) and check against original
        # assumes all files in dest are from oldrevhash
        for fn in util.lsfiles(dest):
            assert not os.path.islink(fn)
            cohash = util.md5sum(fn)
            origname = fn[len(dest) + 1:-18]
            assert cohash == r.hashes[oldrevhash][origname],\
                "Checked out file hash verification failed"
Пример #7
0
	'db' : {
		'driver' : 'mysql' ,
		'dbname' : '' ,
		'user' : '' ,
		'password' : '' ,
		'host' : 'localhost' ,
		'port' : 1433 ,
		'charset' : 'utf8' ,
		'tablePrefix' : '',
	},
	'session' : {
		'store_type' : 'file',
		'file_directory' : 'data/session',
		'cookie_name': 'drape_session_id',
		'timeout': 24*3600,
		'secret_key': util.md5sum('drape_web_framework'),
	},
	'view' : {
		'template_type' : 'jinja2',
	},
	'sae_storage' : dict(
		domain_name = 'storage'
	)
}

def update(newconfig):
	global config
	util.deepmerge(config,newconfig)

try:
	import app.config.config as appconfig