def ncd_probe(xbytes, cx, ybytes):
    # ybytes = open(filey, 'r').read()
    xybytes = xbytes + ybytes
    cy = bz2.compress(ybytes)
    cxy = bz2.compress(xybytes)
    n = (len(cxy) - len(cy)) / float(len(cx))
    return n
示例#2
0
    def dump_policy(self):
        """
            Main window calls this method to get policy generated by UI.
            Not required for global widgets.
        """

        """
        firewallState = "off"
        if self.radioEnable.isChecked():
            firewallState = "on"
        """

        rules_xml = bz2.compress(self.rules_xml)
        rules_xml = base64.encodestring(rules_xml)

        rules_compiled = bz2.compress(self.rules_compiled)
        rules_compiled = base64.encodestring(rules_compiled)

        firewallRules = rules_xml + ":" + rules_compiled

        # Disable firewall state for now
        policy = {
            #"firewallState": [firewallState],
            "firewallRules": [firewallRules],
        }
        return policy
示例#3
0
def test_bz2(guys):
    """ Look at the bz2 compression ratio """
    tot = len(guys)
    short = len(bz2.compress("0"*tot))
    c_length = len(bz2.compress(to_string(guys)))
    
    return (c_length-short)/float(tot-short)
示例#4
0
文件: patch.py 项目: alarmz/esky
 def diff(source,target):
     (tcontrol,bdiff,bextra) = _cx_bsdiff.Diff(source,target)
     #  Write control tuples as series of offts
     bcontrol = BytesIO()
     for c in tcontrol:
         for x in c:
             bcontrol.write(_encode_offt(x))
     del tcontrol
     bcontrol = bcontrol.getvalue()
     #  Compress each block
     bcontrol = bz2.compress(bcontrol)
     bdiff = bz2.compress(bdiff)
     bextra = bz2.compress(bextra)
     #  Final structure is:
     #  (header)(len bcontrol)(len bdiff)(len target)(bcontrol)\
     #  (bdiff)(bextra)
     return "".join((
         "BSDIFF40",
         _encode_offt(len(bcontrol)),
         _encode_offt(len(bdiff)),
         _encode_offt(len(target)),
         bcontrol,
         bdiff,
         bextra,
     ))
示例#5
0
def compressConfigFiles(config):
	files = {}
	files["detectorsDat"] = config.detectorsDat
	files["comgeantOpts"] = config.comgeantOptions
	files["comgeantGeom22"] = config.comgeantGeom22
	files["comgeantGeom23"] = config.comgeantGeom23
	files["comgeantGeom24"] = config.comgeantGeom24
	files["coralOpts"] = config.coralOptions
	for inFile in files.keys():
		if files[inFile] == '':
			files[inFile] = buffer('')
		else:
			with open(files[inFile], 'r') as inputFile:
				files[inFile] = buffer(_bz2.compress(inputFile.read(), 9))
	memoryFile = _StringIO.StringIO()
	config._configFile.write(memoryFile)
	files["scriptConf"] = buffer(_bz2.compress(memoryFile.getvalue(), 9))
	generatorConf = ""
	for inputFileName in config.generatorFilesToWatch:
		header = "#          START FILE '" + inputFileName + "'          #"
		separator = len(header) * '#'
		generatorConf += separator + "\n" + header + "\n" + separator + "\n\n\n"
		with open(inputFileName, 'r') as inputFile:
			generatorConf += inputFile.read()
		generatorConf += "\n\n" + separator + "\n"
		generatorConf += "#          END FILE '" + inputFileName + "'            #\n"
		generatorConf += separator + "\n"
	files["generatorConf"] = buffer(_bz2.compress(generatorConf, 9))
	return files
示例#6
0
    def bsdiff4_diff(source,target):
        """Generate a BSDIFF4-format patch from 'source' to 'target'.

        You must have cx-bsdiff installed for this to work; if I get really
        bored I might do a pure-python version but it would probably be too
        slow and ugly to be worthwhile.
        """
        (tcontrol,bdiff,bextra) = cx_bsdiff.Diff(source,target)
        #  Write control tuples as series of offts
        bcontrol = BytesIO()
        for c in tcontrol:
            for x in c:
                bcontrol.write(_encode_offt(x))
        del tcontrol
        bcontrol = bcontrol.getvalue()
        #  Compress each block
        bcontrol = bz2.compress(bcontrol)
        bdiff = bz2.compress(bdiff)
        bextra = bz2.compress(bextra)
        #  Final structure is:
        #  (head)(len bcontrol)(len bdiff)(len target)(bcontrol)(bdiff)(bextra)
        return "".join((
            "BSDIFF40",
            _encode_offt(len(bcontrol)),
            _encode_offt(len(bdiff)),
            _encode_offt(len(target)),
            bcontrol,
            bdiff,
            bextra,
        ))
示例#7
0
def bundleProxy( executableFile, proxy ):
  """ Create a self extracting archive bundling together an executable script and a proxy
  """
  
  compressedAndEncodedProxy = base64.encodestring( bz2.compress( proxy.dumpAllToString()['Value'] ) ).replace( '\n', '' )
  compressedAndEncodedExecutable = base64.encodestring( bz2.compress( open( executableFile, "rb" ).read(), 9 ) ).replace( '\n', '' )

  bundle = """#!/usr/bin/env python
# Wrapper script for executable and proxy
import os, tempfile, sys, stat, base64, bz2, shutil
try:
  workingDirectory = tempfile.mkdtemp( suffix = '_wrapper', prefix= 'TORQUE_' )
  os.chdir( workingDirectory )
  open( 'proxy', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedProxy)s" ) ) )
  open( '%(executable)s', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedExecutable)s" ) ) )
  os.chmod('proxy', stat.S_IRUSR | stat.S_IWUSR)
  os.chmod('%(executable)s', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
  os.environ["X509_USER_PROXY"]=os.path.join(workingDirectory, 'proxy')
except Exception as x:
  print >> sys.stderr, x
  sys.exit(-1)
cmd = "./%(executable)s"
print 'Executing: ', cmd
sys.stdout.flush()
os.system( cmd )

shutil.rmtree( workingDirectory )

""" % { 'compressedAndEncodedProxy': compressedAndEncodedProxy, \
        'compressedAndEncodedExecutable': compressedAndEncodedExecutable, \
        'executable': os.path.basename( executableFile ) }

  return bundle
示例#8
0
	def _child_process(self, processing_object, shipment):
		process_list = []

		def __lookup_address(route):
			print nodes.keys()
			print route
			return nodes[route]

		def __connect(self, container):
			client_socket = socket.socket(
				socket.AF_INET,
				socket.SOCK_STREAM
				)
			tls_sock = ssl.wrap_socket(
				client_socket,
				cert_reqs=ssl.CERT_NONE,
				do_handshake_on_connect=False,
				ssl_version=ssl.PROTOCOL_TLSv1
				)
			tls_sock.settimeout(1)
			#print 'sending'
			try:
				tls_sock.connect((container.address,9999))
				#print 'connected'
			except Exception, e:
				pass
			try:
				compressed = bz2.compress(pickle.dumps(container))
				container.csize = len(compressed)
				compressed = bz2.compress(pickle.dumps(container))
				tls_sock.send(compressed)
				#print 'sent'
			except Exception, e:
				print e
示例#9
0
def main(filename):
    f = open(filename)
    f.readline() # Descartamos el comentario
    f.readline()
    width, height, ncolors, bpc = f.readline()[1:-3].split()
    ncolors = int(ncolors)
    width = int(width)
    height = int(height)
    colors = {}
    for n in range(ncolors):
        line = f.readline()
        parts = line[1:-3].split()
        if len(parts) == 2:
            continue
        char, _, color = parts
        color = int(color[1:3], 16)
        colors[char] = color
    pixels = []
    y = height - 1
    for n in range(height):
        line = f.readline()
        cols = line[1:width + 1]
        for x, char in enumerate(cols):
            if char != TRANSPARENT:
                pixels.append((x, y, colors[char]))
        y -= 1
    data = cPickle.dumps(pixels)
    print bz2.compress(data).encode('base64')
示例#10
0
def save_build(project, branch, system, data):
    """save build to disk"""
    # pylint: disable=too-many-locals
    validate_build(project, branch, system)
    if not data:
        raise ValueError("build should have data")

    metadata = metadata_for_build(project, branch, system, "current")
    if "commit" in metadata and metadata["commit"] == data["commit"]:
        if not data["force"]:
            print("This commit is already built")
            return
        delete_build(project, branch, system, "current")

    date = datetime.utcnow()
    fsdate = date.strftime("%Y%m%d%H%M%S")
    buildpath = config.build_directory(project, branch, system)
    if not os.path.isdir(buildpath):
        os.makedirs(buildpath)
    currentpath = os.path.join(buildpath, "current")
    buildpath = os.path.join(buildpath, fsdate)
    if not os.path.isdir(buildpath):
        os.makedirs(buildpath)

    metadata["date"] = date.isoformat()
    metadata["client"] = data["client"]
    metadata["commit"] = data["commit"]
    metadata["description"] = data["description"]
    metadata["upstream"] = data["upstream"]

    posthook = {}
    posthook["github"] = check_github_posthook(data, metadata)

    for key, value in data.items():
        if key not in STUSKEYS:
            continue

        metadata[key] = {"status": value["status"]}

        if "log" in value and value["log"]:
            text = remove_control_characters(b64decode(value["log"].encode("UTF-8")).decode("UTF-8"))
            buildlog = bz2.compress(text.encode("UTF-8"))
            with open(os.path.join(buildpath, "{}-log.bz2".format(key)), "wb") as fle:
                fle.write(buildlog)

        if "zip" in value and value["zip"]:
            buildzip = b64decode(value["zip"].encode("UTF-8"))
            with open(os.path.join(buildpath, "{}.zip".format(key)), "wb") as fle:
                fle.write(buildzip)

    with open(os.path.join(buildpath, "metadata.bz2"), "wb") as fle:
        if config.config["github"] and posthook["github"]:
            handle_github(project, branch, system, fsdate, metadata)
        fle.write(bz2.compress(json.dumps(metadata).encode("UTF-8")))
        if os.path.lexists(currentpath):
            os.unlink(currentpath)
        os.symlink(fsdate, currentpath)
        print("[SAVED] {}".format(project))
示例#11
0
def comptest(s):
    print 'original length:', len(s),' ', s
    print 'zlib compressed length:', len(zlib.compress(s)),' ', zlib.compress(s)
    print 'bz2 compressed length:', len(bz2.compress(s)),' ', bz2.compress(s)
    
    out = StringIO.StringIO()
    infile = StringIO.StringIO(s)    
    uu.encode(infile, out)
    print 'uu:', len(out.getvalue()), out.getvalue()  
示例#12
0
def kolmogorov(source0, source1):
    """approximate Kolmogorov distance via compression"""
    source0, source1 = source0.encode('utf-8'), source1.encode('utf-8')

    comp01 = len(bz2.compress(source0))
    comp10 = len(bz2.compress(source1))
    comp11 = len(bz2.compress(source0 + source1))

    return float(comp11 - min(comp01, comp10)) / max(comp01, comp10)
示例#13
0
 def _expected_files(self):
     return {
         'asciicast[meta]':
             ('meta.json', json.dumps({ 'shell': '/bin/sh',
                                        'user_token': 'a1b2c3' })),
         'asciicast[stdout]':
             ('stdout', bz2.compress(b'data123')),
         'asciicast[stdout_timing]':
             ('stdout.time', bz2.compress(b'timing456'))
     }
    def diff(self, oldFiles):
        # Create a patch in the target directory, if need be, and then return our entry in the manifest
        # First of all, is there a current version of this file?
        if not os.path.exists(self.loc):
            print "Current version of file %s does not exist, aborting! You should've told me this file isn't managed any more :(" % self.name
            exit(1)
 
        currentHash = self.currentHash()
        # bz2 myself and toss it on disk
        me = self.getContents()
        me = bz2.compress(me)
        compHash = self.__hash(me)
        compressedSelf = open(self.loc + '.bz2', 'wb')
        compressedSelf.write(me)
        compressedSelf.close()
 
        # if this is a first-time manifest
        if not oldFiles:
            # New file, don't have patches or anything
            return {'hash': currentHash, 'dl': self.name + '.bz2', 'compHash': compHash, 'patches': {}}
        fileEntry = {'hash': currentHash, 'dl': self.name + '.bz2', 'compHash': compHash, 'patches': {}}
 
        # iterate through all the old versions we'll be diffing against
        for oldFile in oldFiles:
            oldFileHandle = oldFile._getFile('rb')
            if oldFileHandle is None:
                # Old file doesn't exist, w/e
                continue
            oldFileHandle.close()
            oldHash = oldFile.currentHash()
            if oldHash == currentHash:
                # easy
                continue
            # Does a patch already exist?
            if oldHash in fileEntry['patches']:
                # Yep, it does
                continue
            # ooooooh, we have to make a patch. start by setting up where the patch will go
            patchName = '%s_%s_to_%s.patch.bin' % (os.path.basename(self.name), oldHash[:5], currentHash[:5])
            print 'Diffing file %s: %s/%s -> %s' % (self.name, oldFile.installBase, oldHash[:5], currentHash[:5])
            patchPath = os.path.join(os.path.join(self.installBase, os.path.split(self.name)[0]), patchName)
            # Then, do the diff in-memory
            patchContents = bsdiff4.diff(oldFile.getContents(), self.getContents())
            # Figure out the hash of the patch
            patchHash = self.__hash(patchContents)
            # Then compress it!
            patchContents = bz2.compress(patchContents)
            # Then hash it again! Isn't this fun?
            compPatchHash = self.__hash(patchContents)
            # Then finally write it to disk
            patchHandle = open(patchPath, 'wb')
            patchHandle.write(patchContents)
            patchHandle.close()
            fileEntry['patches'][oldHash] = {'filename': os.path.join(os.path.dirname(self.name), patchName), 'patchHash': patchHash, 'compPatchHash': compPatchHash}
        return fileEntry
示例#15
0
文件: login.py 项目: bmelton/CLAIM
def ask_credentials():
	global settings_file
	USER = raw_input("username: "******"password: "******"user": USER, "pass": PASS}
	f = open(settings_file, "w")
	pickle.dump(user_dict, f)
	f.close()
示例#16
0
def compile_string(source, filename, data = "\x00"):
    path = os.path.abspath(os.path.join(*__file__.split("/")[:-1] + ["parser.py"]))
    
    if type(source) is dict:
        bytes = bz2.compress(marshal.dumps( (source, data) ))
    else:
        code = _compile_string(source, filename)
        bytes = bz2.compress(marshal.dumps( ({"__main__" : code}, data) ))

    bytecode = "\x00LOVE" + bytes

    return bytecode
def ncd(filex, filey):
    xbytes = open(filex, 'r').read()
    ybytes = open(filey, 'r').read()
    xybytes = xbytes + ybytes
    cx = bz2.compress(xbytes)
    cy = bz2.compress(ybytes)
    cxy = bz2.compress(xybytes)
    if len(cy) > len(cx):
        n = (len(cxy) - len(cx)) / float(len(cy))
    else:
        n = (len(cxy) - len(cy)) / float(len(cx))
    return n
示例#18
0
def stats(n=8*1048576*10):
    t0 = time.time()
    bufb = sieveBit(n//8)
    print('bufb done ({:.2f}s)'.format(time.time()-t0))
    t0 = time.time()
    bufB = sieveByte(n)
    print('bufB done ({:.2f}s)'.format(time.time()-t0))
    t0 = time.time()
    print('deflate: b {: 6} B {: 6} ({:.2f}s)'.format(len(zlib.compress(bufb,9)), len(zlib.compress(bufB,9)), time.time()-t0))
    t0 = time.time()
    print('bz2    : b {: 6} B {: 6} ({:.2f}s)'.format(len(bz2.compress(bufb,9)), len(bz2.compress(bufB,9)), time.time()-t0))
    t0 = time.time()
    print('lzma   : b {: 6} B {: 6} ({:.2f}s)'.format(len(lzma.compress(bufb)), len(zlib.compress(bufB)), time.time()-t0))
 def filtercall(self, data, inout):
     if inout == COMPRESSFOR : 
         if not data :
             print "something is f****d up  %s" % data
             return data
         if DEBUG :
             print "Before compression for %s " % COMPRESSFOR
             print data
             bzdata = bz2.compress(data, COMPRESS_LEVEL)
             print "After compresson for %s " % COMPRESSFOR
             print bzdata
             return bzdata
         return bz2.compress(data, COMPRESS_LEVEL)
     else : return data
示例#20
0
    def on_ok(self, widget, callback_data=None):
        logging.debug("Saving configuration")
        message = None
        for element in self.elements:
            if (type(self.options[element[0]]) == int):
                try:
                    self.options[element[0]] = int(element[2].get_text())
                except:
                    message = self.lang["config_error_value"] % element[3].get_text()
                    break
            else:
                self.options[element[0]] = element[2].get_text()
        if message:
            logging.info(message)
            dialog = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, type=gtk.MESSAGE_ERROR)
            dialog.set_position(gtk.WIN_POS_CENTER)
            dialog.set_title(self.lang["program"])
            dialog.set_markup(message)
            dialog.run()
            dialog.destroy()
            return

        active_iter = self.cbo_langs.get_active_iter()
        self.options["lang"] = self.cbo_langs.get_model().get_value(active_iter, 0)

        for key in self.options.keys():
            self.config.set("options", key, self.options[key])

        if not self.save.get_active():
            self.config.remove_option("options", "gmailusername")
            self.config.remove_option("options", "gmailpassword")
        else:
            if self.options["gmailusername"]:
                self.config.set("options", "gmailusername", bz2.compress(self.mix(self.options["gmailusername"])).encode('base64')[:-1])
            if self.options["gmailpassword"]:
                self.config.set("options", "gmailpassword", bz2.compress(self.mix(self.options["gmailpassword"])).encode('base64')[:-1])
        try:
            self.ensure_dir(self.loaded_config)
            self.config.write(open(self.loaded_config, 'w'))
        except:
            logging.info("Can't save settings to file!")
            dialog = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, type=gtk.MESSAGE_WARNING)
            dialog.set_position(gtk.WIN_POS_CENTER)
            dialog.set_title(self.lang["program"])
            dialog.set_markup(self.lang["config_error_save"])
            dialog.run()
            dialog.destroy()
            return
        gtk.main_quit()
        self.hide()
示例#21
0
 def testMain(self):
     # test encoder/decoder:
     def test(codec, f):
         for s in ["Hello World!"]:
             assert event.decode(codec, f(s)) == s
             assert event.decode(codec, event.encode(codec, s)) == s
     test('', lambda x: x)
     test(None, lambda x: x)
     test('|||', lambda x: x)
     test('base64', lambda x: base64.encodestring(x))
     #test('base64', lambda x: base64.b64encode(x))
     test('gz', lambda x: zlib.compress(x))
     test('bz2', lambda x: bz2.compress(x))
     test('bz2|base64', lambda x: base64.encodestring(bz2.compress(x)))
     test('|bz2||base64|', lambda x: base64.encodestring(bz2.compress(x)))
示例#22
0
    def dist(self, src, tar):
        """Return the NCD between two strings using bzip2 compression.

        Parameters
        ----------
        src : str
            Source string for comparison
        tar : str
            Target string for comparison

        Returns
        -------
        float
            Compression distance

        Examples
        --------
        >>> cmp = NCDbz2()
        >>> cmp.dist('cat', 'hat')
        0.06666666666666667
        >>> cmp.dist('Niall', 'Neil')
        0.03125
        >>> cmp.dist('aluminum', 'Catalan')
        0.17647058823529413
        >>> cmp.dist('ATCG', 'TAGC')
        0.03125


        .. versionadded:: 0.3.5
        .. versionchanged:: 0.3.6
            Encapsulated in class

        """
        if src == tar:
            return 0.0

        src = src.encode('utf-8')
        tar = tar.encode('utf-8')

        src_comp = bz2.compress(src, self._level)[10:]
        tar_comp = bz2.compress(tar, self._level)[10:]
        concat_comp = bz2.compress(src + tar, self._level)[10:]
        concat_comp2 = bz2.compress(tar + src, self._level)[10:]

        return (
            min(len(concat_comp), len(concat_comp2))
            - min(len(src_comp), len(tar_comp))
        ) / max(len(src_comp), len(tar_comp))
示例#23
0
    def test_cat_bz2(self):
        self.add_mock_s3_data(
            {'walrus': {'data/foo.bz2': bz2.compress(b'foo\n' * 1000)}})

        self.assertEqual(
            b''.join(self.fs._cat_file('s3://walrus/data/foo.bz2')),
            b'foo\n' * 1000)
示例#24
0
 def _transformb(self, bdata, pwd='', arch='zlib'):
     '''
     Transforms any binary data into ready-to-write SQL information. \n\
     zlib is faster, bz2 is stronger. \n\
     '''
     if arch=='bz2':
         vCompressed = bz2.compress(bdata,6)
     else:
         vCompressed = zlib.compress(bdata,9)
     # If password is null in some way, do not encrypt.
     if not pwd:
         return buffer(vCompressed)
     # If using global password.
     elif pwd == 1:
         # If global password is null, do not encrypt.
         if not self.glob_key:
             return buffer(vCompressed)
         # If global password exists, use it.
         else:
             pwd = self.glob_key
     # If password is provided, generate key derivation.
     else:
         pwd = PBKDF2(password=pwd, salt=self.glob_salt, dkLen=32, count=1000)
     # Encrypt and return.
     crypt = AES.new(pwd)
     padding = 'X' * ( (((len(vCompressed)/16)+1)*16) - len(vCompressed) )
     vCrypt = crypt.encrypt(vCompressed + padding)
     return buffer(vCrypt)
示例#25
0
def fetch_plugins(old_index):
    ans = {}
    pool = ThreadPool(processes=10)
    entries = tuple(parse_index())
    result = pool.map(partial(parallel_fetch, old_index), entries)
    for entry, plugin in zip(entries, result):
        if isinstance(plugin, dict):
            ans[entry.name] = plugin
        else:
            if entry.name in old_index:
                ans[entry.name] = old_index[entry.name]
            log('Failed to get plugin', entry.name, 'at', datetime.utcnow().isoformat(), 'with error:')
            log(plugin)
    # Move staged files
    for plugin in ans.itervalues():
        if plugin['file'].startswith('staging_'):
            src = plugin['file']
            plugin['file'] = src.partition('_')[-1]
            os.rename(src, plugin['file'])
    raw = bz2.compress(json.dumps(ans, sort_keys=True, indent=4, separators=(',', ': ')))
    atomic_write(raw, PLUGINS)
    # Cleanup any extra .zip files
    all_plugin_files = {p['file'] for p in ans.itervalues()}
    extra = set(glob.glob('*.zip')) - all_plugin_files
    for x in extra:
        os.unlink(x)
    return ans
    def saveToDisk(cls, data, logFile, token, compress=False):
        if data is None or logFile is None or token is None:
            raise InvalidLogFileException()

        salt = get_random_bytes(cls._salt_size)
        if len(salt) != cls._salt_size:
            raise EncryptionException()

        (enc_key, hmac_key) = cls._getKeys(token, salt)

        nonce = get_random_bytes(cls._nonce_size)
        if len(nonce) != cls._nonce_size:
            raise EncryptionException()

        ctr = Crypto.Util.Counter.new(cls._counter_size, prefix=nonce)
        cipher = Crypto.Cipher.AES.new(enc_key, Crypto.Cipher.AES.MODE_CTR, counter=ctr)

        if compress:
            data = bz2.compress(bytes(data, 'utf-8'), cls._compression_level)

        ciphertext = cipher.encrypt(data)
        ciphertext_len = len(ciphertext)
        new_hmac = hmac.new(hmac_key, salt + nonce + ciphertext + struct.pack(">I", ciphertext_len), cls._hmac_hash_algo)

        (fd, temporary_file_name) = tempfile.mkstemp()
        os.write(fd, salt)
        os.write(fd, nonce)
        os.write(fd, new_hmac.digest())
        os.write(fd, ciphertext)
        os.fsync(fd)
        os.rename(temporary_file_name, logFile)
        os.close(fd)
示例#27
0
    def upload_tarball(self):
        if self.skip_uploading:
            print(f'Skipping uploading for package {self.name}')
            return

        bintray_api_key = os.getenv('BINTRAY_API_KEY')
        if not bintray_api_key:
            print(f'No Bintray API key found. Skipping uploading {self.tarball_name}...')
            return

        print(f'Uploading {self.tarball_name} to Bintray...')

        with open(self.tarball_path, 'rb') as pkg:
            import requests
            r = requests.put(
                f'https://bintray.com/api/v1/content/{self.bintray_path}',
                data=bz2.compress(pkg.read()),
                auth=(env.bintray_username, bintray_api_key),
                headers={
                    'X-Bintray-Package': self.name,
                    'X-Bintray-Version': self.bintray_version,
                    'X-Bintray-Publish': '1',
                    'X-Bintray-Override': '1',
                })

            print(f'Uploading result: {r.text}')
def reportMemoryLeaks():
    if printUnreachableNum() == 0:
        return
    import bz2, gc
    gc.set_debug(gc.DEBUG_SAVEALL)
    gc.collect()
    uncompressedReport = ''
    for s in gc.garbage:
        try:
            uncompressedReport += str(s) + '&'
        except TypeError:
            pass

    reportdata = bz2.compress(uncompressedReport, 9)
    headers = {'Content-type': 'application/x-bzip2',
     'Accept': 'text/plain'}
    try:
        baseURL = patcherVer()[0].split('/lo')[0]
    except IndexError:
        print 'Base URL not available for leak submit'
        return

    basePort = 80
    if baseURL.count(':') == 2:
        basePort = baseURL[-4:]
        baseURL = baseURL[:-5]
    baseURL = baseURL[7:]
    if basePort != 80:
        finalURL = 'http://' + baseURL + ':' + str(basePort) + '/logging/memory_leak.php?leakcount=' + str(printUnreachableNum())
    else:
        finalURL = 'http://' + baseURL + '/logging/memory_leak.php?leakcount=' + str(printUnreachableNum())
    reporthttp = HTTPClient()
    reporthttp.postForm(URLSpec(finalURL), reportdata)
示例#29
0
    def interaction(self, message, frame, info):
        if _VERBOSE:
            print 'hit debug side interaction'
        self._adb._userBreak = False

        self._currentFrame = frame
        done = False
        while not done:
            try:
                xml = self.getFrameXML(frame)
                arg = xmlrpclib.Binary(bz2.compress(xml))
                if _VERBOSE:
                    print '============== calling gui side interaction============'
                self._guiServer.interaction(xmlrpclib.Binary(message), arg, info)
                if _VERBOSE:
                    print 'after interaction'
                done = True
            except:
                tp, val, tb = sys.exc_info()
                if True or _VERBOSE:
                    print 'Error contacting GUI server!: '
                    try:
                        traceback.print_exception(tp, val, tb)
                    except:
                        print "Exception printing traceback", 
                        tp, val, tb = sys.exc_info()
                        traceback.print_exception(tp, val, tb)
                done = False
        # Block while waiting to be called back from the GUI. Eventually, self._wait will
        # be set false by a function on this side. Seems pretty lame--I'm surprised it works.
        self.waitForRPC()
示例#30
0
文件: c2r.py 项目: ilanschnell/misc
def write_index(index):
    repodata = {"packages": index}
    data = json.dumps(repodata, indent=2, sort_keys=True)
    with open(join(repo_path, "repodata.json"), "w") as fo:
        fo.write(data)
    with open(join(repo_path, "repodata.json.bz2"), "wb") as fo:
        fo.write(bz2.compress(data.encode("utf-8")))
示例#31
0
def BZIP2(content):
    return len(bz2.compress(content))
示例#32
0
def encode(data):
    return sqlite3.Binary(bz2.compress(json.dumps(data).encode()))
示例#33
0
文件: ips.py 项目: bopopescu/icsw
 def process(self, ccs):
     _com_line, com_type, ctrl_num = ccs.run_info["command"]
     if com_type == "config":
         ctrl_config = {
             "logical": {},
             "array": {},
             "channel": {},
             "physical": [],
             "controller": {}
         }
         act_part, prev_line = ("", "")
         for line in ccs.read().split("\n"):
             ls = line.strip()
             lsl = ls.lower()
             # get key and value, space is important here
             if lsl.count(" :"):
                 key, value = [entry.strip() for entry in lsl.split(" :", 1)]
             else:
                 key, value = (None, None)
             if prev_line.startswith("-" * 10) and line.endswith("information"):
                 act_part = " ".join(line.split()[0:2]).lower().replace(" ", "_").replace("drive", "device")
             elif line.lower().startswith("command complet") or line.startswith("-" * 10):
                 pass
             else:
                 if act_part == "logical_device":
                     if line.lower().count("logical device number") or line.lower().count("logical drive number"):
                         act_log_drv_num = int(line.split()[-1])
                         ctrl_config["logical"][act_log_drv_num] = {}
                     elif line.lower().strip().startswith("logical device name"):
                         array_name = ls.split()[1]
                         ctrl_config["array"][array_name] = " ".join(line.lower().strip().split()[2:])
                     elif line.count(":"):
                         key, val = _split_config_line(line)
                         ctrl_config["logical"][act_log_drv_num][key] = val
                 elif act_part == "physical_device":
                     if lsl.startswith("channel #"):
                         act_channel_num = int(lsl[-2])
                         ctrl_config["channel"][act_channel_num] = {}
                         act_scsi_stuff = None
                     elif lsl.startswith("device #"):
                         act_scsi_id = int(lsl[-1])
                         act_channel_num = -1
                         act_scsi_stuff = {}
                     elif lsl.startswith("reported channel,device"):
                         # key should be set here
                         if key.endswith(")"):
                             key, value = (key.split("(", 1)[0],
                                           value.split("(", 1)[0])
                         act_scsi_id = int(value.split(",")[-1])
                         if act_channel_num == -1:
                             act_channel_num = int(value.split(",")[-2].split()[-1])
                             ctrl_config["channel"][act_channel_num] = {}
                         ctrl_config["channel"][act_channel_num][act_scsi_id] = key
                         act_scsi_stuff["channel"] = act_channel_num
                         act_scsi_stuff["scsi_id"] = act_scsi_id
                         ctrl_config["channel"][act_channel_num][act_scsi_id] = act_scsi_stuff
                         ctrl_config["physical"].append(act_scsi_stuff)
                     elif line.count(":"):
                         if act_scsi_stuff is not None:
                             key, val = _split_config_line(line)
                             act_scsi_stuff[key] = val
                 elif act_part == "controller_information":
                     if key:
                         ctrl_config["controller"][key] = value
                 # print act_part, linea
             prev_line = line
         self._dict[ctrl_num].update(ctrl_config)
     elif com_type == "status":
         task_list = []
         act_task = None
         for line in ccs.read().split("\n"):
             lline = line.lower()
             if lline.startswith("logical device task"):
                 act_task = {"header": lline}
             elif act_task:
                 if lline.count(":"):
                     key, value = [part.strip().lower() for part in lline.split(":", 1)]
                     act_task[key] = value
             if not lline.strip():
                 if act_task:
                     task_list.append(act_task)
                     act_task = None
         self._dict[ctrl_num]["config"]["task_list"] = task_list
     if ctrl_num == max(self._dict.keys()) and com_type == "status":
         ccs.srv_com["ips_dict_base64"] = base64.b64encode(bz2.compress(marshal.dumps(self._dict)))
示例#34
0
 def compress(s):
     bz2.compress(s)
示例#35
0
def dumps(obj):
    obj = pickle.dumps(obj)
    obj = zlib.compress(obj)
    obj = bz2.compress(obj)
    return base64.b64encode(obj)
示例#36
0
 def test_bz2_round_trip(self, payload, compresslevel):
     result = bz2.decompress(
         bz2.compress(payload, compresslevel=compresslevel))
     self.assertEqual(payload, result)
示例#37
0
def dumps_bz2(data):
    import bz2
    return bz2.compress(data, 9)
    def _sync_data(self, host):
        '''
            每隔一段时间县级服务器调用资源平台接口同步上课数据
        '''
        objs = models.CountryToResourcePlatformSyncLog.objects.filter(
            used=False).order_by('created_at')

        post_data = []

        for obj in objs:
            data = {}
            one = serializers.deserialize('json', obj.operation_content)
            pk = obj.pk
            for i in one:
                # teacherloginlog 上课信息
                teacherloginlog = i.object.teacherloginlog
                model_name = i.object.__class__.__name__
                data.update({
                    'pk':
                    pk,
                    'model_name':
                    model_name,
                    'term_school_year':
                    teacherloginlog.term_school_year,
                    'term_type':
                    teacherloginlog.term_type,
                    'province_name':
                    teacherloginlog.province_name,
                    'city_name':
                    teacherloginlog.city_name,
                    'country_name':
                    teacherloginlog.country_name,
                    'town_name':
                    teacherloginlog.town_name,
                    'school_name':
                    teacherloginlog.school_name,
                    'grade_name':
                    teacherloginlog.grade_name,
                    'class_name':
                    teacherloginlog.class_name,
                    'lesson_name':
                    teacherloginlog.lesson_name,
                    'teacher_name':
                    teacherloginlog.teacher_name,
                    'lesson_period_start_time':
                    teacherloginlog.lesson_period_start_time.strftime(
                        '%H:%M:%S'),
                    'lesson_period_end_time':
                    teacherloginlog.lesson_period_end_time.strftime(
                        '%H:%M:%S'),
                    'weekday':
                    teacherloginlog.weekday,
                    'log_create_time':
                    teacherloginlog.created_at.strftime('%Y-%m-%d %H:%M:%S')
                })
                # 大纲与上课信息
                if model_name == 'TeacherLoginLogLessonContent':
                    lessoncontent = i.object.lessoncontent

                    data.update({
                        'publish':
                        lessoncontent.syllabus_grade_lesson.publish,
                        'bookversion':
                        lessoncontent.syllabus_grade_lesson.bookversion,
                        'seq':
                        lessoncontent.seq,
                        'subseq':
                        lessoncontent.subseq,
                        'title':
                        lessoncontent.title
                    })
                    if lessoncontent.parent:
                        parent_title = lessoncontent.parent.title
                    else:
                        parent_title = u''
                    data.update({'parent_title': parent_title})

                # 课件与上课信息
                elif model_name == 'TeacherLoginLogCourseWare':
                    courseware = i.object.courseware

                    data.update({
                        'md5':
                        courseware.md5,
                        'title':
                        courseware.title,
                        'size':
                        courseware.size,
                        'use_times':
                        courseware.use_times,
                        'download_times':
                        courseware.download_times,
                        'qiniu_url':
                        courseware.qiniu_url,
                        'log_create_time':
                        courseware.create_time.strftime('%Y-%m-%d %H:%M:%S')
                    })

            post_data.append(data)

        url = "%s/view/api/sync/country-to-resourceplatform/" % (host)

        try:
            ret = requests.post(url,
                                data={
                                    'data':
                                    base64.b64encode(
                                        bz2.compress(json.dumps(post_data)))
                                },
                                timeout=120)
            ret = ret.json()

            if ret['status'] == 'success':
                for obj in objs.filter(pk__in=ret['success_pk']):
                    obj.used = True
                    obj.save()

        except:
            self.logger.exception('')
def bz2_len_json(metadata):
    return len(bz2.compress(jsonify(metadata)))
示例#40
0
 def update_event(self, inp=-1):
     self.set_output_val(0, bz2.compress(self.input(0), self.input(1)))
示例#41
0
文件: beef.py 项目: gabrielat/noc
    def compress_bz2(data):
        import bz2

        return bz2.compress(data)
示例#42
0
文件: conf.py 项目: Spencerx/osc
def passx_encode(passwd):
    """encode plain text password to obfuscated form"""
    return base64.b64encode(bz2.compress(
        passwd.encode('ascii'))).decode("ascii")
示例#43
0
 def write(self, fobj):
     _io = io.BytesIO()
     self.serialize(_io)
     fobj.write(bz2.compress(_io.getvalue()))
示例#44
0
 def testCompress(self):
     # "Test compress() function"
     data = bz2.compress(self.TEXT)
     self.assertEqual(self.decompress(data), self.TEXT)
示例#45
0
def _send_to_scribe_via_boto3(logs: str) -> str:
    sprint("Scribe access token not provided, sending report via boto3...")
    event = {"base64_bz2_logs": base64.b64encode(bz2.compress(logs.encode())).decode()}
    return str(invoke_lambda("gh-ci-scribe-proxy", event))
示例#46
0
 def compress(self, stream):
     return BytesIO(bz2.compress(b''.join(stream)))
示例#47
0
    def _encode(self, f, value, ext_id):
        """ Main encoder function.
        """

        x = encode_type_id

        if value is None:
            f.write(x(b'v', ext_id))  # V for void
        elif value is True:
            f.write(x(b'y', ext_id))  # Y for yes
        elif value is False:
            f.write(x(b'n', ext_id))  # N for no
        elif isinstance(value, int):
            if -32768 <= value <= 32767:
                f.write(x(b'h', ext_id) + spack('h', value))  # H for ...
            else:
                f.write(x(b'i', ext_id) + spack('<q', value))  # I for int
        elif isinstance(value, float):
            if self._float64:
                f.write(x(b'd', ext_id) + spack('<d', value))  # D for double
            else:
                f.write(x(b'f', ext_id) + spack('<f', value))  # f for float
        elif isinstance(value, str):
            bb = value.encode('UTF-8')
            f.write(x(b's', ext_id) + lencode(len(bb)))  # S for str
            f.write(bb)
        elif isinstance(value, (list, tuple)):
            f.write(x(b'l', ext_id) + lencode(len(value)))  # L for list
            for v in value:
                self._encode(f, v, None)
        elif isinstance(value, dict):
            f.write(x(b'm', ext_id) + lencode(len(value)))  # M for mapping
            for key, v in value.items():
                assert isinstance(key, str)
                name_b = key.encode('UTF-8')
                f.write(lencode(len(name_b)))
                f.write(name_b)
                self._encode(f, v, None)
        elif isinstance(value, bytes):
            f.write(x(b'b', ext_id))  # B for blob
            # Compress
            compression = self._compression
            if compression == 0:
                compressed = value
            elif compression == 1:
                compressed = zlib.compress(value, 9)
            elif compression == 2:
                compressed = bz2.compress(value, 9)
            else:
                assert False, 'Unknown compression identifier'
            # Get sizes
            data_size = len(value)
            used_size = len(compressed)
            extra_size = 0
            allocated_size = used_size + extra_size
            # Write sizes - write at least in a size that allows resizing
            if allocated_size <= 250 and compression == 0:
                f.write(spack('<B', allocated_size))
                f.write(spack('<B', used_size))
                f.write(lencode(data_size))
            else:
                f.write(spack('<BQ', 253, allocated_size))
                f.write(spack('<BQ', 253, used_size))
                f.write(spack('<BQ', 253, data_size))
            # Compression and checksum
            f.write(spack('B', compression))
            if self._use_checksum:
                f.write(b'\xff' + hashlib.md5(compressed).digest())
            else:
                f.write(b'\x00')
            # Byte alignment (only necessary for uncompressed data)
            if compression == 0:
                alignment = 8 - (f.tell() + 1) % 8  # +1 for the byte to write
                f.write(spack('<B', alignment))  # padding for byte alignment
                f.write(b'\x00' * alignment)
            else:
                f.write(spack('<B', 0))
            # The actual data and extra space
            f.write(compressed)
            f.write(b'\x00' * (allocated_size - used_size))
        elif getattr(value, "shape", None) == () and str(
                getattr(value, "dtype", "")).startswith(
                    ("uint", "int", "float")):
            # Implicit conversion of numpy scalars
            if 'int' in str(value.dtype):
                value = int(value)
                if -32768 <= value <= 32767:
                    f.write(x(b'h', ext_id) + spack('h', value))
                else:
                    f.write(x(b'i', ext_id) + spack('<q', value))
            else:
                value = float(value)
                if self._float64:
                    f.write(x(b'd', ext_id) + spack('<d', value))
                else:
                    f.write(x(b'f', ext_id) + spack('<f', value))
        else:
            if ext_id is not None:
                raise ValueError(
                    'Extension %s wronfully encodes object to another '
                    'extension object (though it may encode to a list/dict '
                    'that contains other extension objects).' % ext_id)
            # Try if the value is of a type we know
            ex = self._extensions_by_cls.get(value.__class__, None)
            # Maybe its a subclass of a type we know
            if ex is None:
                for name, c in self._extensions.items():
                    if c.match(self, value):
                        ex = name, c.encode
                        break
                else:
                    ex = None
            # Success or fail
            if ex is not None:
                ext_id2, extension_encode = ex
                self._encode(f, extension_encode(self, value), ext_id2)
            else:
                t = ('Class %r is not a valid base BSDF type, nor is it '
                     'handled by an extension.')
                raise TypeError(t % value.__class__.__name__)
示例#48
0
def JS2C(source, target, env):
  ids = []
  debugger_ids = []
  modules = []
  # Locate the macros file name.
  consts = []
  macros = []
  for s in source:
    if 'macros.py' == (os.path.split(str(s))[1]):
      (consts, macros) = ReadMacros(ReadLines(str(s)))
    else:
      modules.append(s)

  minifier = jsmin.JavaScriptMinifier()

  module_offset = 0
  all_sources = []
  for module in modules:
    filename = str(module)
    debugger = filename.endswith('-debugger.js')
    lines = ReadFile(filename)
    lines = ExpandConstants(lines, consts)
    lines = ExpandMacros(lines, macros)
    Validate(lines, filename)
    if not env['DEBUG']:
      lines = minifier.JSMinify(lines)
    id = (os.path.split(filename)[1])[:-3]
    if debugger: id = id[:-9]
    raw_length = len(lines)
    if debugger:
      debugger_ids.append((id, raw_length, module_offset))
    else:
      ids.append((id, raw_length, module_offset))
    all_sources.append(lines)
    module_offset += raw_length
  total_length = raw_total_length = module_offset

  if env['COMPRESSION'] == 'off':
    raw_sources_declaration = RAW_SOURCES_DECLARATION
    sources_data = ToCAsciiArray("".join(all_sources))
  else:
    raw_sources_declaration = RAW_SOURCES_COMPRESSION_DECLARATION
    if env['COMPRESSION'] == 'bz2':
      all_sources = bz2.compress("".join(all_sources))
    total_length = len(all_sources)
    sources_data = ToCArray(all_sources)

  # Build debugger support functions
  get_index_cases = [ ]
  get_raw_script_source_cases = [ ]
  get_script_name_cases = [ ]

  i = 0
  for (id, raw_length, module_offset) in debugger_ids + ids:
    native_name = "native %s.js" % id
    get_index_cases.append(GET_INDEX_CASE % { 'id': id, 'i': i })
    get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % {
        'offset': module_offset,
        'raw_length': raw_length,
        'i': i
        })
    get_script_name_cases.append(GET_SCRIPT_NAME_CASE % {
        'name': native_name,
        'length': len(native_name),
        'i': i
        })
    i = i + 1

  # Emit result
  output = open(str(target[0]), "w")
  output.write(HEADER_TEMPLATE % {
    'builtin_count': len(ids) + len(debugger_ids),
    'debugger_count': len(debugger_ids),
    'sources_data': sources_data,
    'raw_sources_declaration': raw_sources_declaration,
    'raw_total_length': raw_total_length,
    'total_length': total_length,
    'get_index_cases': "".join(get_index_cases),
    'get_raw_script_source_cases': "".join(get_raw_script_source_cases),
    'get_script_name_cases': "".join(get_script_name_cases),
    'type': env['TYPE']
  })
  output.close()
  return "".join(all_sources)
示例#49
0
 def object_to_param_str(change):
     """Convert a change object into a format suitable for passing in job
     parameters
     """
     return b64encode(compress(cPickle.dumps(change))).decode('utf8')
示例#50
0
def check_if_feed_changed(self, court_pk, feed_status_pk, date_last_built):
    """Check if the feed changed

    For now, we do this in a very simple way, by using the lastBuildDate field
    and checking if it differs from the last time we checked. One thing that
    makes this approach suboptimal is that we know the `lastBuildDate` field
    varies around the time that the feeds are actually...um, built. For
    example, we've seen the same feed with two different values for this field
    around the time that it is built. When this happens, the two values tend to
    be off by about a minute or so.

    If we were being very careful and really optimizing when we crawled these
    feeds, this would cause us trouble because we'd detect a change in this
    field when the actual data hadn't changed. But because we only crawl the
    feeds at most once every five minutes, and because the gaps we've observed
    in this field tend to only be about one minute, we can get away with this.

    Other solutions/thoughts we can consider later:

     - If the difference between two lastBuildDate values is less than two
       minutes assume it's the same feed.
     - Use hashing of the feed to determine if it has changed.

    One other oddity here is that we use regex parsing to grab the
    lastBuildDate value. This is because parsing the feed properly can take
    several seconds for a big feed.

    :param court_pk: The CL ID for the court object.
    :param feed_status_pk: The CL ID for the status object.
    :param date_last_built: The last time the court was scraped.
    """
    feed_status = RssFeedStatus.objects.get(pk=feed_status_pk)
    rss_feed = PacerRssFeed(map_cl_to_pacer_id(court_pk))
    try:
        rss_feed.query()
    except requests.RequestException as exc:
        logger.warning("Network error trying to get RSS feed at %s" %
                       rss_feed.url)
        abort_or_retry(self, feed_status, exc)
        return

    content = rss_feed.response.content
    if not content:
        try:
            raise Exception("Empty RSS document returned by PACER: %s" %
                            feed_status.court_id)
        except Exception as exc:
            logger.warning(str(exc))
            abort_or_retry(self, feed_status, exc)
            return

    current_build_date = get_last_build_date(content)
    if current_build_date:
        alert_on_staleness(current_build_date, feed_status.court_id,
                           rss_feed.url)
        feed_status.date_last_build = current_build_date
        feed_status.save()
    else:
        try:
            raise Exception("No last build date in RSS document returned by "
                            "PACER: %s" % feed_status.court_id)
        except Exception as exc:
            logger.warning(str(exc))
            abort_or_retry(self, feed_status, exc)
            return

    # Only check for early abortion during partial crawls.
    if date_last_built == current_build_date and not feed_status.is_sweep:
        logger.info(
            "%s: Feed has not changed since %s. Aborting.",
            feed_status.court_id,
            date_last_built,
        )
        # Abort. Nothing has changed here.
        self.request.chain = None
        mark_status(feed_status, RssFeedStatus.UNCHANGED)
        return

    logger.info("%s: Feed changed or doing a sweep. Moving on to the merge." %
                feed_status.court_id)
    rss_feed.parse()
    logger.info("%s: Got %s results to merge." %
                (feed_status.court_id, len(rss_feed.data)))

    # Update RSS entry types in Court table
    update_entry_types(court_pk, rss_feed.feed.feed.description)

    # Save the feed to the DB
    feed_data = RssFeedData(court_id=court_pk)
    try:
        feed_data.filepath.save("rss.xml.bz2",
                                ContentFile(bz2.compress(content)))
    except OSError as exc:
        if exc.errno == errno.EIO:
            abort_or_retry(self, feed_status, exc)
        else:
            raise exc

    return rss_feed.data
示例#51
0
def SendRemote(remote,data,timeout=0,bind_port=0):
    client = UDPService(remote,servicePort,timeout)
    if not bind_port:
        client.setOption(listen_addr=("",bind_port))
    client.set(bz2.compress(data))
示例#52
0
    def _data(self):
        '''
        Take and collect data, send and clean if needed
        '''
        logging.info('%s', threading.currentThread())
        api_host = self.config.get('data', 'api_host')
        api_path = self.config.get('data', 'api_path')
        max_age = self.config.getint('agent', 'max_data_age')
        max_span = self.config.getint('agent', 'max_data_span')
        server = self.config.get('agent', 'server')
        user = self.config.get('agent', 'user')
        interval = self.config.getint('data', 'interval')
        max_cached_collections = self.config.get('agent',
                                                 'max_cached_collections')
        cached_collections = []
        collection = []
        while True:
            loop_ts = time.time()
            if self.shutdown:
                logging.info('%s:shutdown', threading.currentThread())
                break
            logging.debug('%s:data_queue:%i:collection:%i',
                          threading.currentThread(), self.data.qsize(),
                          len(collection))
            while self.data.qsize():
                try:
                    collection.append(self.data.get_nowait())
                except Exception as e:
                    logging.error('Data queue error: %s' % e)
            if collection:
                first_ts = min((e['ts'] for e in collection))
                last_ts = max((e['ts'] for e in collection))
                now = time.time()
                send = False
                if last_ts - first_ts >= max_span:
                    logging.debug('Max data span')
                    send = True
                    clean = False
                elif now - first_ts >= max_age:
                    logging.warning('Max data age')
                    send = True
                    clean = True
                if send:
                    headers = {
                        "Content-type": "application/json",
                        "Authorization": "ApiKey %s:%s" % (user, server),
                    }
                    logging.debug(
                        'collection: %s',
                        json.dumps(collection, indent=2, sort_keys=True))
                    if not (server and user):
                        logging.warning(
                            'Empty server or user, nowhere to send.')
                        clean = True
                    else:

                        try:
                            if sys.version_info >= (3, ):
                                connection = http.client.HTTPSConnection(
                                    api_host, timeout=15)
                            else:
                                connection = httplib.HTTPSConnection(
                                    api_host, timeout=15)

                            # Trying to send cached collections if any
                            if cached_collections:
                                logging.info('Sending cached collections: %i',
                                             len(cached_collections))
                                while cached_collections:
                                    connection.request('PUT',
                                                       '%s?version=%s' %
                                                       (api_path, __version__),
                                                       cached_collections[0],
                                                       headers=headers)
                                    response = connection.getresponse()
                                    response.read()
                                    if response.status == 200:
                                        del cached_collections[
                                            0]  # Remove just sent collection
                                        logging.debug(
                                            'Successful response: %s',
                                            response.status)
                                    else:
                                        raise ValueError(
                                            'Unsuccessful response: %s' %
                                            response.status)
                                logging.info('All cached collections sent')

                            # Send recent collection (reuse existing connection)
                            connection.request(
                                'PUT',
                                '%s?version=%s' % (api_path, __version__),
                                bz2.compress(
                                    str(json.dumps(collection) +
                                        "\n").encode()),
                                headers=headers)
                            response = connection.getresponse()
                            response.read()

                            if response.status == 200:
                                logging.debug('Successful response: %s',
                                              response.status)
                                clean = True
                            else:
                                raise ValueError('Unsuccessful response: %s' %
                                                 response.status)
                        except Exception as e:
                            logging.error('Failed to submit collection: %s' %
                                          e)

                            # Store recent collection in cached_collections if send failed
                            if max_cached_collections > 0:
                                if len(cached_collections
                                       ) >= max_cached_collections:
                                    del cached_collections[
                                        0]  # Remove oldest collection
                                    logging.info(
                                        'Reach max_cached_collections (%s): oldest cached collection dropped',
                                        max_cached_collections)
                                logging.info(
                                    'Cache current collection to resend next time'
                                )
                                cached_collections.append(
                                    bz2.compress(
                                        str(json.dumps(collection) +
                                            "\n").encode()))
                                collection = []
                        finally:
                            connection.close()
                    if clean:
                        collection = []
            sleep_interval = interval - (time.time() - loop_ts)
            if sleep_interval > 0:
                time.sleep(sleep_interval)
示例#53
0
def compress(data, compressed, compress_level):
    return bz2.compress(data, compress_level) if compressed else data
示例#54
0
def compress_bz2(s):
    return bz2.compress(s.encode("ascii"))
示例#55
0
    else:
        subs = [0]
    print("> Converting %s, subsongs: %s " %
          (name, ", ".join([str(s) for s in subs])))

    for s in subs:
        dump_path = os.path.join("dump", name + "_(subsong %d).txt.bz2" % s)
        midi_path = os.path.join("midi", name + "_(subsong %d).mid" % s)

        if not os.path.isfile(dump_path):
            print("Dumping subsong %d" % s)
            data = subprocess.run(
                [asapscan_path, '-s', "%d" % s, '-d', sap],
                stdout=subprocess.PIPE).stdout
            with open(dump_path, "wb") as zdump:
                zdump.write(bz2.compress(data))
        else:
            print("Subsong %d already dumped" % s)

        print("Converting subsong %d... " % s)

        opts = pokey2midi_options
        if name in tempos and tempos[name][s] is not None:
            opts += ['--bpm', str(tempos[name][s])]

        log = subprocess.run([python3, pokey2midi_path] + opts +
                             [dump_path, midi_path],
                             stdout=subprocess.PIPE)

        print("OK")
示例#56
0
def compression_score(alpha_vector):
    bz2_score = len(bz2.compress(alpha_vector)) / float(len(alpha_vector))
    lzma_score = len(lzma.compress(alpha_vector)) / float(len(alpha_vector))
    return (bz2_score, lzma_score)
示例#57
0
def compress(data):
    if len(data) > DECOMPRESSED_LIMIT:
        print('ERROR: File size limit exceeded!')
        exit(0)
    return bz2.compress(data, compresslevel=9)
def bz2compressor(values):
    return list(bz2.compress(bytes(values), 9))
示例#59
0
  def _writePilotScript( self, workingDirectory, pilotOptions, proxy = None,
                         httpProxy = '', pilotExecDir = '' ):
    """ Bundle together and write out the pilot executable script, admix the proxy if given
    """

    try:
      compressedAndEncodedProxy = ''
      proxyFlag = 'False'
      if proxy is not None:
        compressedAndEncodedProxy = base64.encodestring( bz2.compress( proxy.dumpAllToString()['Value'] ) )
        proxyFlag = 'True'
      compressedAndEncodedPilot = base64.encodestring( bz2.compress( open( self.pilot, "rb" ).read(), 9 ) )
      compressedAndEncodedInstall = base64.encodestring( bz2.compress( open( self.install, "rb" ).read(), 9 ) )
      compressedAndEncodedExtra = {}
      for module in self.extraModules:
        moduleName = os.path.basename( module )
        compressedAndEncodedExtra[moduleName] = base64.encodestring( bz2.compress( open( module, "rb" ).read(), 9 ) )
    except:
      self.log.exception( 'Exception during file compression of proxy, dirac-pilot or dirac-install' )
      return S_ERROR( 'Exception during file compression of proxy, dirac-pilot or dirac-install' )

    # Extra modules
    mStringList = []
    for moduleName in compressedAndEncodedExtra:
      mString = """open( '%s', "w" ).write(bz2.decompress( base64.decodestring( \"\"\"%s\"\"\" ) ) )""" % \
                ( moduleName, compressedAndEncodedExtra[moduleName] )
      mStringList.append( mString )
    extraModuleString = '\n  '.join( mStringList )

    localPilot = """#!/bin/bash
/usr/bin/env python << EOF
#
import os, stat, tempfile, sys, shutil, base64, bz2
try:
  pilotExecDir = '%(pilotExecDir)s'
  if not pilotExecDir:
    pilotExecDir = os.getcwd()
  pilotWorkingDirectory = tempfile.mkdtemp( suffix = 'pilot', prefix = 'DIRAC_', dir = pilotExecDir )
  pilotWorkingDirectory = os.path.realpath( pilotWorkingDirectory )
  os.chdir( pilotWorkingDirectory )
  if %(proxyFlag)s:
    open( 'proxy', "w" ).write(bz2.decompress( base64.decodestring( \"\"\"%(compressedAndEncodedProxy)s\"\"\" ) ) )
    os.chmod("proxy", stat.S_IRUSR | stat.S_IWUSR)
    os.environ["X509_USER_PROXY"]=os.path.join(pilotWorkingDirectory, 'proxy')
  open( '%(pilotScript)s', "w" ).write(bz2.decompress( base64.decodestring( \"\"\"%(compressedAndEncodedPilot)s\"\"\" ) ) )
  open( '%(installScript)s', "w" ).write(bz2.decompress( base64.decodestring( \"\"\"%(compressedAndEncodedInstall)s\"\"\" ) ) )
  os.chmod("%(pilotScript)s", stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR )
  os.chmod("%(installScript)s", stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR )
  %(extraModuleString)s
  if "LD_LIBRARY_PATH" not in os.environ:
    os.environ["LD_LIBRARY_PATH"]=""
  if "%(httpProxy)s":
    os.environ["HTTP_PROXY"]="%(httpProxy)s"
  os.environ["X509_CERT_DIR"]=os.path.join(pilotWorkingDirectory, 'etc/grid-security/certificates')
  # TODO: structure the output
  print '==========================================================='
  print 'Environment of execution host'
  for key in os.environ.keys():
    print key + '=' + os.environ[key]
  print '==========================================================='
except Exception as x:
  print >> sys.stderr, x
  shutil.rmtree( pilotWorkingDirectory )
  sys.exit(-1)
cmd = "python %(pilotScript)s %(pilotOptions)s"
print 'Executing: ', cmd
sys.stdout.flush()
os.system( cmd )

shutil.rmtree( pilotWorkingDirectory )

EOF
""" % { 'compressedAndEncodedProxy': compressedAndEncodedProxy,
        'compressedAndEncodedPilot': compressedAndEncodedPilot,
        'compressedAndEncodedInstall': compressedAndEncodedInstall,
        'extraModuleString': extraModuleString,
        'httpProxy': httpProxy,
        'pilotExecDir': pilotExecDir,
        'pilotScript': os.path.basename( self.pilot ),
        'installScript': os.path.basename( self.install ),
        'pilotOptions': ' '.join( pilotOptions ),
        'proxyFlag': proxyFlag }

    fd, name = tempfile.mkstemp( suffix = '_pilotwrapper.py', prefix = 'DIRAC_', dir = workingDirectory )
    pilotWrapper = os.fdopen( fd, 'w' )
    pilotWrapper.write( localPilot )
    pilotWrapper.close()
    return name
示例#60
0
    def save_player( self, _id, _pickled_player ):
        """Saves/Updates the pickled player in the database at the given ID.

        Args:
            _id (int): ID at which the pickled player is to be saved.
            _pickled_player (bytes): Pickled player.
        """
        c = self.conn.cursor()
        try:
            # Save pickledplayer at new playername
            c.execute( '''INSERT INTO player (playername, pickledplayer) VALUES (?,?)''', (_id, bz2.compress( _pickled_player )) )
            self.conn.commit()
        except Exception as e:
            # Update pickledplayer at existing playername
            consts.dbg( 1, "EXCEPTION IN SERIALIZER SAVE_PLAYER:", e )
            c.execute( '''UPDATE player SET pickledplayer =?  WHERE playername=?''', (bz2.compress( _pickled_player ), _id) )
            self.conn.commit()