Beispiel #1
0
	def add(self, document, defer_recalculate = True):
		# Retreive (if known URI) or assign document ID
		known_ID = self.db.get('U_'+ document.uri)
		if known_ID:
			document.id = int(known_ID)
		else:
			self.max_id += 1
			self.db['M_max_id'] = str(self.max_id)
			document.id = self.max_id
			self.db["U_" + document.uri] = str(document.id)
		# Add an entry for each document's metadata
		doc_details = {}
		doc_details['uri'] = document.uri
		doc_details['title'] = document.title
		doc_details['author'] = document.author
		doc_details['modified'] = document.modified
		modified = time.localtime(document.modified)
		doc_details['str_modified'] = time.strftime('%d %B %Y', modified)
		try:
			# TODO: better name for raw_content
			# TODO: check attribute exists, don't just try to read it
			doc_details['raw_content'] = document.raw_data
		except AttributeError:
			pass
		self.db["D_%s" % document.id] = marshal.dumps(doc_details)
		# Add/update the entry for each term in the document
		for term in document.tokens:
			if self.db.has_key('T_' + term):
				term_data = marshal.loads(self.db['T_' + term])
				term_data[document.id] = (document.tokens[term], document.length)
			else:
				term_data = {}
				term_data[document.id] = (document.tokens[term], document.length)
			self.db['T_' + term] = marshal.dumps(term_data)
Beispiel #2
0
    def get_config(self):
        py3 = sys.version_info[0] == 3

        if isinstance(self.function, python_types.LambdaType):
            if py3:
                function = marshal.dumps(self.function.__code__).decode("raw_unicode_escape")
            else:
                function = marshal.dumps(self.function.func_code).decode("raw_unicode_escape")
            function_type = "lambda"
        else:
            function = self.function.__name__
            function_type = "function"

        if isinstance(self._output_shape, python_types.LambdaType):
            if py3:
                output_shape = marshal.dumps(self._output_shape.__code__)
            else:
                output_shape = marshal.dumps(self._output_shape.func_code)
            output_shape_type = "lambda"
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = "function"
        else:
            output_shape = self._output_shape
            output_shape_type = "raw"

        config = {
            "function": function,
            "function_type": function_type,
            "output_shape": output_shape,
            "output_shape_type": output_shape_type,
            "arguments": self.arguments,
        }
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
Beispiel #3
0
    def test_floats(self):
        # Test a few floats
        small = 1e-25
        n = sys.maxsize * 3.7e250
        while n > small:
            for expected in (-n, n):
                self.helper(float(expected))
            n /= 123.4567

        f = 0.0
        s = marshal.dumps(f, 2)
        got = marshal.loads(s)
        self.assertEqual(f, got)
        # and with version <= 1 (floats marshalled differently then)
        s = marshal.dumps(f, 1)
        got = marshal.loads(s)
        self.assertEqual(f, got)

        n = sys.maxsize * 3.7e-250
        while n < small:
            for expected in (-n, n):
                f = float(expected)
                self.helper(f)
                self.helper(f, 1)
            n *= 123.4567
Beispiel #4
0
	def run_phase4(self):
		self.advance_phase()
		if self.am_leader():
			self.debug("Leader broadcasting ciphers to all nodes")
			self.broadcast_to_all_nodes(marshal.dumps(self.final_ciphers))
			self.debug("Cipher set len %d" % (len(self.final_ciphers)))
		else:
			""" Get C' ciphertexts from leader. """
			self.final_ciphers = marshal.loads(self.recv_from_leader())

		"""
		self.final_ciphers holds an array of
		pickled (round_id, cipher_prime) tuples
		"""

		my_cipher_str = marshal.dumps((self.round_id, self.cipher_prime))

		go = False
		if my_cipher_str in self.final_ciphers:
			self.info("Found my ciphertext in set")
			go = True
			self.debug("Go = TRUE")
		else:
			self.critical("ABORT! My ciphertext is not in set!")
			self.debug(self.final_ciphers)
			go = False
			self.debug("Go = FALSE")
			raise RuntimeError, "Protocol violation: My ciphertext is missing!"

		#Pedro: Directly copy the final ciphers (plaintext in coinshuffle)
		self.anon_data = self.final_ciphers
Beispiel #5
0
    def get_config(self):
        py3 = sys.version_info[0] == 3

        if isinstance(self.function, python_types.LambdaType):
            if py3:
                function = marshal.dumps(self.function.__code__).decode('raw_unicode_escape')
            else:
                function = marshal.dumps(self.function.func_code).decode('raw_unicode_escape')
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            if py3:
                output_shape = marshal.dumps(self._output_shape.__code__)
            else:
                output_shape = marshal.dumps(self._output_shape.func_code)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
Beispiel #6
0
    def recv_interest_voucher(self, data):
        msg, key = marshal.loads(data)
        (nonce, leader_ip, leader_gui_port, leader_com_port) = marshal.loads(msg)
        self.DEBUG(
            "Start round voucher from %s:%s, communicating at port %s" % (leader_ip, leader_gui_port, leader_com_port)
        )

        # get the path to the file you want to share
        self.emit(SIGNAL("getSharedFilename()"))

        verified = self.verify(leader_ip, leader_gui_port, data)

        """ generate temporary keys for this round so leader can aggregate """
        self.gen_temp_keys()
        temp1_str = AnonCrypto.pub_key_to_str(self.pubgenkey1)
        temp2_str = AnonCrypto.pub_key_to_str(self.pubgenkey2)

        """ default to random file of 128 bytes if you don't have anything to share """
        if verified:
            if os.path.exists(self.shared_filename):
                self.DEBUG("You are sharing file %s" % (self.shared_filename))
            else:
                self.DEBUG("Not a valid file path, continuing without sharing...")

            # respond with your interest
            self.DEBUG("Verified leader, participating as %s:%s at port %s" % (self.ip, self.gui_port, self.com_port))
            response = marshal.dumps((nonce, self.ip, self.gui_port, self.com_port, temp1_str, temp2_str))
            cipher = AnonCrypto.sign_with_key(self.privKey, response)
            AnonNet.send_to_addr(leader_ip, int(leader_gui_port), marshal.dumps(("interested", cipher)))
        else:
            self.DEBUG("Unkown leader, opting out...")
Beispiel #7
0
def prepare_value(val, compress):
    flag = 0
    if isinstance(val, six.binary_type):
        pass
    elif isinstance(val, bool):
        flag = FLAG_BOOL
        val = str(int(val)).encode('utf-8')
    elif isinstance(val, six.integer_types):
        flag = FLAG_INTEGER
        val = str(val).encode('utf-8')
    elif isinstance(val, six.text_type):
        flag = FLAG_MARSHAL
        val = marshal.dumps(val, 2)
    else:
        try:
            val = marshal.dumps(val, 2)
            flag = FLAG_MARSHAL
        except ValueError:
            val = cPickle.dumps(val, -1)
            flag = FLAG_PICKLE

    if compress and len(val) > 1024:
        flag |= FLAG_COMPRESS
        val = quicklz.compress(val)

    return flag, val
Beispiel #8
0
    def prepare_round(self):
        # can't start round without 3 or more peers
        if len(self.participants) < 3:
            self.DEBUG("Not enough peers to start round!")
            return

        prepare_voucher = marshal.dumps(
            (int(PREPARE_WAIT), int(1), copy.copy(self.participants), self.ip, self.gui_port, self.com_port)
        )
        cipher = AnonCrypto.sign_with_key(self.privKey, prepare_voucher)

        for index, participant in enumerate(self.participants):
            down_index = (index - 1) % len(self.participants)
            up_index = (index + 1) % len(self.participants)
            if (self.ip, self.gui_port, self.com_port) != (participant[1], participant[2], participant[3]):
                AnonNet.send_to_addr(
                    participant[1],
                    participant[2],
                    marshal.dumps(("prepare:%s:%s:%s" % (index, down_index, up_index), cipher)),
                )
                self.DEBUG(
                    "Sending prepare to peer %s:%s at port %s" % (participant[1], participant[2], participant[3])
                )

        # after informing the participants, create your node
        dn_idx = -1 % len(self.participants)
        up_idx = 1
        self.start_node(dn_idx, up_idx, self.participants, 0)

        # start round after PREPARE_WAIT minutes
        DelayTimer(PREPARE_WAIT, self.run_protocol).start()
Beispiel #9
0
    def accept_phase(self, ip, port, nonce):
        # package and encrypt data
        response = marshal.dumps((nonce, self.ip, self.gui_port))
        cipher = AnonCrypto.sign_with_key(self.privKey, response)

        # respond with ((ip, port), encrypted_data)
        AnonNet.send_to_addr(ip, int(port), marshal.dumps(("accept", cipher)))
def send_monthly_card_reward():
    conn = MySQLdb.connect(**db_conf)
    cursor = conn.cursor()

    cid_sql = 'select id, monthly_card from tb_character' 
    update_sql = 'update tb_character set monthly_card = %s where id = %s'

    print 'send begin!'
    
    cursor.execute( cid_sql )
    _all_cid = cursor.fetchall()
    for _cid, _month in _all_cid:
        if _month:
            _month -= 1
            if _month == 0:
                yield redis.hdel(redis_key, _cid)
            else:
                card_data = (time(), 0)
                yield redis.hset(redis_key, _cid, dumps(card_data))
            _primary = yield redis.hincrby( 'HASH_HINCRBY_KEY', 'AWARD_ID', 1 )
            _data    = [_primary, 4, [time()]]
            yield redis.hset( 'HASH_AWARD_CENTER_%s' % _cid, _primary, dumps(_data) )
            cursor.execute(update_sql % (_month, _cid))

    cursor.close()
    conn.close()
 
    print 'end...'
    conn   = None
    cursor = None
    reactor.stop()
Beispiel #11
0
def Paser_SigMDB(file, num) :
    fp = open(SIGDB_FILENAME)

    while 1: 
        lines = fp.readlines(100000) #메모리가 허용하는 적당한 양 
        if not lines: 
            break 
        for line in lines: 
            convert(line, num)

    fp.close()

    fname = '%s.c%02d' % (file, num)
    output = open(fname, 'wb')
    #s = pickle.dumps(db_size_pattern, -1)
    s = marshal.dumps(db_size_pattern)
    output.write(s)
    output.close()

    fname = '%s.i%02d' % (file, num)
    output = open(fname, 'wb')
    # s = pickle.dumps(db_vname, -1)
    s = marshal.dumps(db_vname)
    output.write(s)
    output.close()
Beispiel #12
0
    def test_floats(self):
        # Test a few floats
        small = 1e-25
        n = sys.maxint * 3.7e250
        while n > small:
            for expected in (-n, n):
                f = float(expected)
                s = marshal.dumps(f)
                got = marshal.loads(s)
                self.assertEqual(f, got)
                marshal.dump(f, file(test_support.TESTFN, "wb"))
                got = marshal.load(file(test_support.TESTFN, "rb"))
                self.assertEqual(f, got)
            n /= 123.4567

        f = 0.0
        s = marshal.dumps(f)
        got = marshal.loads(s)
        self.assertEqual(f, got)

        n = sys.maxint * 3.7e-250
        while n < small:
            for expected in (-n, n):
                f = float(expected)
                s = marshal.dumps(f)
                got = marshal.loads(s)
                self.assertEqual(f, got)
                marshal.dump(f, file(test_support.TESTFN, "wb"))
                got = marshal.load(file(test_support.TESTFN, "rb"))
                self.assertEqual(f, got)
            n *= 123.4567
        os.unlink(test_support.TESTFN)
Beispiel #13
0
 def __init__(self, layers, function, output_shape=None):
     if len(layers) < 2:
         raise Exception("Please specify two or more input layers (or containers) to merge")
     self.layers = layers
     self.params = []
     self.regularizers = []
     self.constraints = []
     self.updates = []
     for l in self.layers:
         params, regs, consts, updates = l.get_params()
         self.regularizers += regs
         self.updates += updates
         # params and constraints have the same size
         for p, c in zip(params, consts):
             if p not in self.params:
                 self.params.append(p)
                 self.constraints.append(c)
     py3 = sys.version_info[0] == 3
     if py3:
         self.function = marshal.dumps(function.__code__)
     else:
         self.function = marshal.dumps(function.func_code)
     if output_shape is None:
         self._output_shape = None
     elif type(output_shape) in {tuple, list}:
         self._output_shape = tuple(output_shape)
     else:
         if py3:
             self._output_shape = marshal.dumps(output_shape.__code__)
         else:
             self._output_shape = marshal.dumps(output_shape.func_code)
 def buy_group_package(self, buy_type):
     if buy_type not in get_group_buy_conf().keys():
         defer.returnValue( BUY_GROUP_TYPE_WRONG )
     _conf = get_group_buy_conf(buy_type)
     _stream = yield redis.hget(DICT_GROUP_BUY_PERSON_INFO, self.cid)
     _data = loads(_stream)
     #[[buy_count, [0,0,0,0]], ......]
     bought_count, _info = _data[buy_type-1]
     if bought_count + 1 > _conf["LimitNum"]:
         defer.returnValue(GROUP_BUY_MAX_COUNT)
     if self.user.credits < _conf["CurrentPrice"]:
         defer.returnValue(CHAR_CREDIT_NOT_ENOUGH)
     yield self.user.consume_credits(_conf["CurrentPrice"], WAY_GROUP_BUY)
     bought_count +=1
     _st = yield redis.hget(DICT_GROUP_BUY_INFO, buy_type)
     _datas = loads(_st)
     #buy_type:buy_num
     _total_buy_count = _datas
     if bought_count == 1:
         _total_buy_count += 1
     _data[buy_type-1] = [bought_count, _info]
     yield redis.hset(DICT_GROUP_BUY_PERSON_INFO, self.cid, dumps(_data))
     yield redis.hset(DICT_GROUP_BUY_INFO, buy_type, dumps(_total_buy_count))
     _item_type, _item_id, _item_num = _conf['ItemType'], _conf['ItemID'], _conf['ItemNum']
     _res = yield item_add(self.user, ItemType=_item_type, ItemID=_item_id, ItemNum = _item_num, AddType=WAY_GROUP_BUY)
     _result = (buy_type, _total_buy_count, bought_count, _res[1][0], self.user.credits)
     defer.returnValue( _result )
  def split(self):
    """Split a RecordIORecordsZipped data into two even chunks.

    :return: lower_entries, higher_entries, middle_entry
    """
    new_zipped_chunks = list(self.get_zipped_chunks_())
    if len(new_zipped_chunks) <= 1:
      raise RecordIOTooSmallToSplitError()
    lo_chunks = []
    hi_chunks = []
    lo_size = 0
    hi_size = 0
    left = -1
    right = len(new_zipped_chunks)
    while left + 1 != right:
      if lo_size <= hi_size:
        left += 1
        lo_chunks.append(new_zipped_chunks[left])
        lo_size += len(new_zipped_chunks[left][2])
      else:
        right -= 1
        hi_chunks.insert(0, new_zipped_chunks[right])
        hi_size += len(new_zipped_chunks[right][2])
    middle_entry_lo = new_zipped_chunks[right][0]
    self.records_ = []
    self.zipped_chunks_ = lo_chunks + hi_chunks
    return (marshal.dumps(lo_chunks, MARSHAL_VERSION),
            marshal.dumps(hi_chunks, MARSHAL_VERSION),
            middle_entry_lo)
 def get_dig_treasure_reward(self, t_type, count):
     if t_type == FREE_DIG:
         if self.free_dig_count >= 1:
             self.free_dig_count -= 1
             self.dig_total_count += 1
             self.last_dig_time = time()
             _value = (self.free_dig_count, self.dig_total_count, self.last_dig_time)
             yield redis.hset(DICT_DIG_TREASURE_INFO, self.cid, dumps(_value))
             _item_rand = yield package_open(self.user, FREE_DIG_PACAKGE)
             if _item_rand:
                 user_item_id = 0
                 _item_type, _item_id, _item_num, _notice = _item_rand
                 _res = yield item_add(self.user, ItemType=_item_type, ItemID=_item_id, ItemNum=_item_num, AddType=WAY_DIG_TREASURE_FREE)
                 defer.returnValue((_res[1], self.free_dig_count, self.dig_total_count, self.user.credits))
     elif t_type == CREDITS_DIG:
         if self.user.credits >= 20 * count and self.dig_total_count <DIG_MAX_COUNT :
             _itemList = []
             for i in xrange(count):
                 _item_rand = yield package_open(self.user, CREDITS_DIG_PACAKGE)
                 if _item_rand:
                     user_item_id = 0
                     _item_type, _item_id, _item_num, _notice = _item_rand
                     _res = yield item_add(self.user, ItemType=_item_type, ItemID=_item_id, ItemNum = _item_num, AddType=WAY_DIG_TREASURE_CREDITS)
                     _itemList.append(_res[1][0])
                 self.dig_total_count += 1
                 yield self.user.consume_credits(20, WAY_DIG_TREASURE_CREDITS)
                 self.last_dig_time = time()
                 _value = (self.free_dig_count, self.dig_total_count, self.last_dig_time)
                 yield redis.hset(DICT_DIG_TREASURE_INFO, self.cid, dumps(_value))
                 if self.dig_total_count >= DIG_MAX_COUNT :
                     break
             defer.returnValue((_itemList, self.free_dig_count, self.dig_total_count, self.user.credits))
         else:
             defer.returnValue(HAD_DIG_MAX_COUNT)
    def get_group_buy_info(self):
        _infos = yield redis.hgetall(DICT_GROUP_BUY_INFO)
        if not _infos:
            _group_buy_info = {1:0,2:0,3:0,4:0}  #buy_type:buy_num
            for buy_type in xrange(1,5):
                yield redis.hset(DICT_GROUP_BUY_INFO, buy_type, dumps(_group_buy_info[buy_type]))
        else:
            _group_buy_info = dict()
            for k, v in _infos.iteritems():
                _group_buy_info[k] = loads(v)

        _res = []
        _ret = []
        for _buy_type, _bought_num in _group_buy_info.iteritems():
           _res.append([_buy_type, _bought_num])

        _stream = yield redis.hget(DICT_GROUP_BUY_PERSON_INFO, self.cid)#[[buy_count, [status,2,3,4]],..]
        if _stream:
            try:
                _data = loads(_stream)
                if _data:
                    # [bought_count, [0,0,0,0]]
                    for _bought_count_info, _info in zip(_data, _res):
                        _info.append(_bought_count_info)
                        _ret.append(_info)
            except:
                log.exception()
        else:
            _value = [[0,[0,0,0,0]]] * 4
            yield redis.hset(DICT_GROUP_BUY_PERSON_INFO, self.cid, dumps(_value))
            for _info in _res:
                _info.append([0,[0,0,0,0]])
                _ret.append(_info)
        defer.returnValue( _ret )
Beispiel #18
0
def save_signature(fname, _id):
    # 현재 날짜와 시간을 구한다.
    ret_date = k2timelib.get_now_date()
    ret_time = k2timelib.get_now_time()

    # 날짜와 시간 값을 2Byte로 변경한다.
    val_date = struct.pack('<H', ret_date)
    val_time = struct.pack('<H', ret_time)

    # 크기 파일 저장 : ex) script.s01
    sname = '%s.s%02d' % (fname, _id)
    t = zlib.compress(marshal.dumps(set(size_sig)))  # 중복된 데이터 삭제 후 저장
    t = 'KAVS' + struct.pack('<L', len(size_sig)) + val_date + val_time + t
    save_file(sname, t)

    # 패턴 p1 파일 저장 : ex) script.i01
    sname = '%s.i%02d' % (fname, _id)
    t = zlib.compress(marshal.dumps(p1_sig))
    t = 'KAVS' + struct.pack('<L', len(p1_sig)) + val_date + val_time + t
    save_file(sname, t)

    # 패턴 p2 파일 저장 : ex) script.c01
    sname = '%s.c%02d' % (fname, _id)
    t = zlib.compress(marshal.dumps(p2_sig))
    t = 'KAVS' + struct.pack('<L', len(p2_sig)) + val_date + val_time + t
    save_file(sname, t)

    # 악성코드 이름 파일 저장 : ex) script.n01
    sname = '%s.n%02d' % (fname, _id)
    t = zlib.compress(marshal.dumps(name_sig))
    t = 'KAVS' + struct.pack('<L', len(name_sig)) + val_date + val_time + t
    save_file(sname, t)
    def get_dig_treasure_info(self):
        flag = 0
        _stream = yield redis.hget(DICT_DIG_TREASURE_INFO, self.cid)
        if _stream:
            try:
                _data = loads(_stream)
                if _data:
                    self.free_dig_count, self.dig_total_count, self.last_dig_time = _data
                    if not timestamp_is_today(self.last_dig_time):
                        self.free_dig_count = get_vip_conf(self.user.vip_level)["FreeDigCount"]
                        self.vipLevel = self.user.vip_level
                        self.last_dig_time = time()
                        _value = (self.free_dig_count, self.dig_total_count, self.last_dig_time)
                        yield redis.hset(DICT_DIG_TREASURE_INFO, self.cid, dumps(_value))
                    else:
                        if self.user.vip_level > self.vipLevel:
                            count = get_vip_conf(self.user.vip_level)["FreeDigCount"] - get_vip_conf(self.vipLevel)["FreeDigCount"]
                            self.free_dig_count += count
                            self.vipLevel = self.user.vip_level
                            _value = (self.free_dig_count, self.dig_total_count, self.last_dig_time)
                            yield redis.hset(DICT_DIG_TREASURE_INFO, self.cid, dumps(_value))

            except:
                log.exception()
        else:
            self.free_dig_count = get_vip_conf(self.user.vip_level)["FreeDigCount"]
            self.dig_total_count = 0
            self.last_dig_time = time()
            self.vipLevel = self.user.vip_level
            _value = (self.free_dig_count, self.dig_total_count, self.last_dig_time)
            yield redis.hset(DICT_DIG_TREASURE_INFO, self.cid, dumps(_value))
        if self.free_dig_count > 0:
            flag = 1
        defer.returnValue(flag)
Beispiel #20
0
def fetchData():
    global od
    global pw
    global lc
    #--------------------
    # Set your Telemachus server's IP address here and only here.
    #--------------------
    ip = "192.168.1.40:8085"

#    url = "http://" + str(ip) + "/telemachus/datalink?long=v.long"
    url = "http://" + str(ip) + "/telemachus/datalink?throt=f.throttle&rcs=v.rcsValue&sas=v.sasValue&light=v.lightValue&pe=o.PeA&ap=o.ApA&ttap=o.timeToAp&ttpe=o.timeToPe&operiod=o.period&sma=o.sma&alt=v.altitude&hat=v.heightFromTerrain&mt=v.missionTime&sfcs=v.surfaceSpeed&sfcv=v.surfaceVelocity&sfcvx=v.surfaceVelocityx&sfcvy=v.surfaceVelocityy&sfcvz=v.surfaceVelocityz&ov=v.orbitalVelocity&vs=v.verticalSpeed&lat=v.lat&long=v.long&body=v.body&o2=r.resource[Oxygen]&co2=r.resource[CarbonDioxide]&h2o=r.resource[Water]&w=r.resource[ElectricCharge]&food=r.resource[Food]&waste=r.resource[Waste]&wastewater=r.resource[WasteWater]&mo2=r.resourceMax[Oxygen]&mco2=r.resourceMax[CarbonDioxide]&mh2o=r.resourceMax[Water]&mw=r.resourceMax[ElectricCharge]&mfood=r.resourceMax[Food]&mwaste=r.resourceMax[Waste]&mwastewater=r.resourceMax[WasteWater]&pitch=n.pitch&roll=n.roll&hdg=n.heading&pstat=p.paused&inc=o.inclination&ecc=o.eccentricity&aoe=o.argumentOfPeriapsis&lan=o.lan&ut=t.universalTime&lf=r.resource[LiquidFuel]&oxidizer=r.resource[Oxidizer]&mono=r.resource[MonoPropellant]&mlf=r.resourceMax[LiquidFuel]&moxidizer=r.resourceMax[Oxidizer]&mmono=r.resourceMax[MonoPropellant]"
    try:
        u = urllib2.urlopen(url)
        d = json.load(u)
        od = d
        if d["w"] >= pw:
            lc = d["mt"]
        d["lc"] = lc
        d["wr"] = d["w"] - pw
        pw = d["w"]
        bytes = marshal.dumps(d)
        print "Got! :)"
    except:
        print "Didn't got :("
        bytes = marshal.dumps(od)
    return bytes
Beispiel #21
0
    def get_campcard_data(self):
        ''' 获取玩家的阵营抽卡信息 '''
        reset_flag = False
        curr_time  = int(time())
        comm_data  = yield redis.hget(HASH_CAMPRAND_COMMON, 'CAMPRAND')
        if comm_data:
            comm_data = loads(comm_data)
            if curr_time >= comm_data[0]:
                reset_flag = True
                comm_data[0] += CAMP_RAND_TIME
                comm_data[1] = 0 if len(CAMP_GROUP_IDS) <= comm_data[1]+1 else comm_data[1] + 1
                yield redis.hset(HASH_CAMPRAND_COMMON, 'CAMPRAND', dumps(comm_data))
            else:
                camp_data = yield redis.hget(HASH_CAMPRAND_COMMON, self.cid)
                if camp_data:
                    camp_data = loads(camp_data)
                    if 1 == timestamp_is_today(camp_data[0]):
                        curr_camp_data, next_camp_data = camp_data[1], camp_data[2]
                    else:
                        reset_flag = True
                else:
                    reset_flag = True
        else:
            reset_flag = True
            comm_data = [get_reset_timestamp() + CAMP_RAND_TIME, 0]
            yield redis.hset(HASH_CAMPRAND_COMMON, 'CAMPRAND', dumps(comm_data))

        if reset_flag:
            curr_camp_data = [[camp_id, 0] for camp_id in CAMP_GROUP_IDS[comm_data[1]]]
            next_group_id  = 0 if len(CAMP_GROUP_IDS) <= comm_data[1]+1 else comm_data[1] + 1
            next_camp_data = [[camp_id, 0] for camp_id in CAMP_GROUP_IDS[next_group_id]]
            yield redis.hset(HASH_CAMPRAND_COMMON, self.cid, dumps([curr_time, curr_camp_data, next_camp_data]))

        defer.returnValue( (comm_data[0], curr_camp_data, next_camp_data) )
Beispiel #22
0
def send_certified_action(actor_name, action, arguments, password, socket):
    from marshal import dumps
    marshaldata = dumps( (action, arguments) )
    cert = certificate(marshaldata, password)
    #print actor_name, cert,  marshaldata
    marshaldata = dumps( (actor_name, cert, marshaldata) )
    send_packet(socket, marshaldata)
Beispiel #23
0
 def addSong(self,vid,imgURL,title,artist):
     if self.active:
         #print "adding"
         conn=sqlite3.connect(self.db)
         x=m.dumps([])
         y=m.dumps([])
         args=(vid,imgURL,title,artist,x,y,self.k,)
         c=conn.cursor()
             #raise e
         L=c.execute("SELECT * FROM songs WHERE videoid=? AND played=1 AND party=? ",(vid,self.k,)).fetchall()
         if len(L)==0:
             L=c.execute("SELECT * FROM songs WHERE videoid=? AND played=0 AND party=? ",(vid,self.k,)).fetchall()
             if len(L)==0:
                 c.execute("INSERT INTO songs (videoid,imgURL,name,artist,upvotes,downvotes,total,upvoteip,downvoteip,played,party) VALUES (?,?,?,?,0,0,0,?,?,0,?)",args)
                 conn.commit()
                 conn.close()
                 return
             else:
                 conn.close()
             #print "in queue!"
                 return "The song is already in the queue!"
         else:
             c.execute("REPLACE INTO songs (videoid,imgURL,name,artist,upvotes,downvotes,total,upvoteip,downvoteip,played,party) VALUES (?,?,?,?,0,0,0,?,?,0,?)",args)
         conn.commit()
         conn.close()
     else:
         return "Party not active."
Beispiel #24
0
	def add(self, document, defer_recalculate = True):
		# Retreive (if known URI) or assign document ID
		known_ID = self.db.get('U_'+ document.uri)
		if known_ID:
			document.id = int(known_ID)
		else:
			self.max_id += 1
			self.db['M_max_id'] = str(self.max_id)
			document.id = self.max_id
			self.db["U_" + document.uri] = str(document.id)
		# Add an entry for each document's metadata
		tokens = document.tokens
		del(document.tokens) # we don't want to store these
		doc_details = document.__dict__
		modified = time.localtime(document.modified)
		doc_details['str_modified'] = time.strftime('%d %B %Y', modified)
		self.db["D_%s" % document.id] = marshal.dumps(doc_details)
		# Add/update the entry for each term in the document
		for term in tokens:
			if self.db.has_key('T_' + term):
				term_data = marshal.loads(self.db['T_' + term])
			else:
				term_data = {}
			term_data[document.id] = (tokens[term], document.length)
			# TODO: optimise by chunking db inserts
			self.db['T_' + term] = marshal.dumps(term_data)
Beispiel #25
0
def setup_default_condor_setup():
	"""
	Checks that all users have a default condor setup in their profile (for every plugin).
	Already existing values will remain untouched, missing default setup will be added.
	"""
	logger.setGroup('condor', 'Checking that all users have a default condor setup in their profile')
	users = User.objects.all()
	for user in users:
		p = user.get_profile()
		if len(p.dflt_condor_setup) == 0:
			# No default Condor setup rules
			logger.log("Missing Condor setup in %s's profile" % user.username)
			setup = {}
			for plugin in manager.plugins:
				# Default is to use the ALL policy
				setup[plugin.id] = {'DB': 'policy', 'DS': '', 'DP': 'ALL'}
			p.dflt_condor_setup = base64.encodestring(marshal.dumps(setup)).replace('\n', '')
			p.save()
			logger.log("Added default Condor setup rules for %s" % user.username)
		else:
			# Ok, existing but maybe default rules for some (newly created?) plugins are missing
			setup = marshal.loads(base64.decodestring(p.dflt_condor_setup))
			updated = False
			for plugin in manager.plugins:
				if not setup.has_key(plugin.id):
					setup[plugin.id] = {'DB': 'policy', 'DS': '', 'DP': 'ALL'}
					updated = True
			p.dflt_condor_setup = base64.encodestring(marshal.dumps(setup)).replace('\n', '')
			p.save()
			if updated:
				logger.log("Updated default Condor setup rules for %s" % user.username)
			else:
				logger.log("Default Condor setup rules for %s look good" % user.username)
Beispiel #26
0
    def save_all_2(self):
        if self.settings.no_save != "True":
            print("Writing dictionary...")

            try:
                zfile = zipfile.ZipFile(self.brain_path, 'r')
                for filename in zfile.namelist():
                    data = zfile.read(filename)
                    f = open(filename, 'w+b')
                    f.write(data)
                    f.close()
            except (OSError, IOError):
                print("no zip found. Is the programm launch for first time ?")

            with open("words.dat", "wb") as f:
                f.write(marshal.dumps(self.words))

            with open("lines.dat", "wb") as f:
                f.write(marshal.dumps(self.lines))

            # save the version
            with open('version', 'w') as f:
                f.write(self.saves_version)

            # zip the files
            with zipfile.ZipFile(self.brain_path, "w") as f:
                f.write('words.dat')
                f.write('lines.dat')
                f.write('version')

            try:
                os.remove('words.dat')
                os.remove('lines.dat')
                os.remove('version')
            except (OSError, IOError):
                print("could not remove the files")

            f = open("words.txt", "w")
            # write each words known
            wordlist = []
            # Sort the list befor to export
            for key in self.words.keys():
                wordlist.append([key, len(self.words[key])])
            wordlist.sort(key=lambda x: x[1])
            list(map((lambda x: f.write(str(x[0]) + "\n\r")), wordlist))
            f.close()

            f = open("sentences.txt", "w")
            # write each words known
            wordlist = []
            # Sort the list befor to export
            for key in self.unfilterd.keys():
                wordlist.append([key, self.unfilterd[key]])
            # wordlist.sort(lambda x, y: cmp(y[1], x[1]))
            wordlist.sort(key=lambda x: x[1])
            list(map((lambda x: f.write(str(x[0]) + "\n")), wordlist))
            f.close()

            # Save settings
            self.settings.save()
Beispiel #27
0
def prepare_value(val, compress):
    flag = 0
    if isinstance(val, str):
        pass
    elif isinstance(val, (bool)):
        flag = FLAG_BOOL
        val = str(int(val))
    elif isinstance(val, (int, long)):
        flag = FLAG_INTEGER
        val = str(val)
    elif isinstance(val, unicode):
        flag = FLAG_MARSHAL
        val = marshal.dumps(val, 2)
    else:
        try:
            val = marshal.dumps(val, 2)
            flag = FLAG_MARSHAL
        except ValueError:
            val = cPickle.dumps(val, -1)
            flag = FLAG_PICKLE

    if compress and len(val) > 1024:
        flag |= FLAG_COMPRESS
        val = quicklz.compress(val)

    return flag, val
Beispiel #28
0
    def upVote(self,vid, ip):
        if self.active:
            conn=sqlite3.connect(self.db)
            c=conn.cursor()
            num=c.execute("SELECT upvotes,total,upvoteip, downvotes, downvoteip FROM songs WHERE videoid=? AND party=?",(vid,self.k,)).fetchone()
            x=m.loads(num[2])
            y=m.loads(num[4])
            if ip not in x:
                if ip in y:
                    x.append(ip)
                    y.remove(ip)
                    x=m.dumps(x)
                    y=m.dumps(y)
                    c.execute("UPDATE songs SET upvotes=?, total=?, upvoteip=?, downvotes=?, downvoteip=? WHERE videoid=? AND party=",(num[0]+1,num[1]+2,x, num[3]-1,y,vid,self.k,))
                else:
                    x.append(ip)
                    x=m.dumps(x)
                    c.execute("UPDATE songs SET upvotes=?, total=?, upvoteip=? WHERE videoid=? AND party=?",(num[0]+1,num[1]+1,x,vid,self.k,))
            elif ip in x:
                x.remove(ip)
                x=m.dumps(x)
                c.execute("UPDATE songs SET upvotes=?, total=?, upvoteip=? WHERE videoid=? AND party=?",(num[0]-1,num[1]-1,x,vid,self.k,))

            conn.commit()
            conn.close()
        else:
            return "Party not active."
Beispiel #29
0
def encode_function(function):
  if type(function) != types.BuiltinFunctionType:
    builtin = False
    return marshal.dumps(((function.func_code, capture_globals(function)), builtin))
  else:
    builtin = True
    return marshal.dumps((function.__name__, builtin))
Beispiel #30
0
def pass1(arg):
    """
  Chunk files into a doc->term mapping,
  and simultaneously build a term->df count.
  The term->df counts are redistributed to
  buckets via python's in-built hash function.
  This is basically an inversion step, so that 
  now we are chunked on the term axis rather
  than the document axis.
  """
    global __maxorder, __b_freq, __b_list, __locks
    chunk_id, chunk_paths = arg

    extractor = Tokenizer(__maxorder)
    term_doc_freq = defaultdict(int)
    term_doc_list = defaultdict(list)

    for doc_index, path in enumerate(chunk_paths):
        with open(path) as f:
            tokenset = set(extractor(f.read()))
            for token in tokenset:
                term_doc_freq[token] += 1
                term_doc_list[token].append(doc_index)

    for key in term_doc_freq:
        bucket_index = hash(key) % len(__locks)
        with __locks[bucket_index]:
            os.write(__b_freq[bucket_index], marshal.dumps((key, term_doc_freq[key])))
            os.write(__b_list[bucket_index], marshal.dumps((key, chunk_id, term_doc_list[key])))

    return len(term_doc_freq)
Beispiel #31
0
 def _to_marshal(c):
     return base64.b64encode(marshal.dumps(get_code(c)))
Beispiel #32
0
    def getExecutorInfo(self, framework_id):
        info = mesos_pb2.ExecutorInfo()
        if hasattr(info, 'framework_id'):
            info.framework_id.value = framework_id

        if self.use_self_as_exec:
            info.command.value = os.path.abspath(sys.argv[0])
            info.executor_id.value = sys.argv[0]
        else:
            info.command.value = '%s %s' % (
                sys.executable,
                os.path.abspath(
                    os.path.join(os.path.dirname(__file__), 'executor.py')))
            info.executor_id.value = "default"

        v = info.command.environment.variables.add()
        v.name = 'UID'
        v.value = str(os.getuid())
        v = info.command.environment.variables.add()
        v.name = 'GID'
        v.value = str(os.getgid())

        if self.options.image and hasattr(info, 'container'):
            info.container.type = mesos_pb2.ContainerInfo.DOCKER
            info.container.docker.image = self.options.image

            for path in ['/etc/passwd', '/etc/group']:
                v = info.container.volumes.add()
                v.host_path = v.container_path = path
                v.mode = mesos_pb2.Volume.RO

            for path in conf.MOOSEFS_MOUNT_POINTS:
                v = info.container.volumes.add()
                v.host_path = v.container_path = path
                v.mode = mesos_pb2.Volume.RW

            for path in conf.DPARK_WORK_DIR.split(','):
                v = info.container.volumes.add()
                v.host_path = v.container_path = path
                v.mode = mesos_pb2.Volume.RW

            if self.options.volumes:
                for volume in self.options.volumes.split(','):
                    fields = volume.split(':')
                    if len(fields) == 3:
                        host_path, container_path, mode = fields
                        mode = mesos_pb2.Volume.RO if mode.lower(
                        ) == 'ro' else mesos_pb2.Volume.RW
                    elif len(fields) == 2:
                        host_path, container_path = fields
                        mode = mesos_pb2.Volume.RW
                    elif len(fields) == 1:
                        container_path, = fields
                        host_path = ''
                        mode = mesos_pb2.Volume.RW
                    else:
                        raise Exception("cannot parse volume %s", volume)

                    mkdir_p(host_path)

                v = info.container.volumes.add()
                v.container_path = container_path
                v.mode = mode
                if host_path:
                    v.host_path = host_path

        mem = info.resources.add()
        mem.name = 'mem'
        mem.type = mesos_pb2.Value.SCALAR
        mem.scalar.value = EXECUTOR_MEMORY
        cpus = info.resources.add()
        cpus.name = 'cpus'
        cpus.type = mesos_pb2.Value.SCALAR
        cpus.scalar.value = EXECUTOR_CPUS

        Script = os.path.realpath(sys.argv[0])
        if hasattr(info, 'name'):
            info.name = Script

        info.data = marshal.dumps(
            (Script, os.getcwd(), sys.path, dict(os.environ),
             self.task_per_node, self.out_logger, self.err_logger,
             self.logLevel, env.environ))
        return info
Beispiel #33
0
 def __loadModule(self, module):
     source = inspect.getsource(module)
     self.__modules[module.__name__] = marshal.dumps(source, 1)
Beispiel #34
0
        except:
            pass
        return object.__getattribute__(self, name)

    def remote_import(self, name):
        return self.__import(name)


###############################################################################

# Read the source for the server into a string. If we're the server, we'll
# have defined __builtin__.pushy_source (by the "realServerLoaderSource").
if not hasattr(__builtin__, "pushy_source"):
    if "__loader__" in locals():
        serverSource = __loader__.get_source(__name__)
        serverSource = marshal.dumps(serverSource, 1)
    else:
        serverSource = open(inspect.getsourcefile(AutoImporter)).read()
        serverSource = marshal.dumps(serverSource, 1)
else:
    serverSource = __builtin__.pushy_source
md5ServerSource = hashlib.md5(serverSource).digest()

# This is the program we run on the command line. It'll read in a
# predetermined number of bytes, and execute them as a program. So once we
# start the process up, we immediately write the "real" server source to it.
realServerLoaderSource = """
import __builtin__, os, marshal, sys
try:
    import hashlib
except ImportError:
Beispiel #35
0
def code_pickler(copy):
    return code_unpickler, (marshal.dumps(code), )
Beispiel #36
0
    def save_index(self):
        # only save if the index is loaded and changed
        if not self.index_loaded() or not self.changed:
            return

        # brutal space saver... delete all the small segments
        for segment in self.segments:
            try:
                os.remove(self.indexdb + segment)
            except OSError, error:
                # probably just nonexistent segment index file
                if error.errno != errno.ENOENT: raise

        # First write the much simpler filename/fileid dictionaries
        dbfil = {'WORDS':None, 'FILES':self.files, 'FILEIDS':self.fileids}
        open(self.indexdb+'-','wb').write(zlib.compress(marshal.dumps(dbfil)))

        # The hard part is splitting the word dictionary up, of course
        letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#_"
        segdicts = {}                           # Need batch of empty dicts
        for segment in letters:
            segdicts[segment] = {}
        for word, entry in self.words.iteritems():  # Split into segment dicts
            initchar = word[0].upper()
            segdicts[initchar][word] = entry

        # save
        for initchar in letters:
            db = {'WORDS':segdicts[initchar], 'FILES':None, 'FILEIDS':None}
            pickle_str = marshal.dumps(db)
            filename = self.indexdb + initchar
Beispiel #37
0
    def _create_script_data(self, target):
        # We create a list of code objects, and return it as a
        # marshaled stream.  The framework code then just exec's these
        # in order.

        ## # First is our common boot script.
        ## boot = self.get_boot_script("common")
        ## boot_code = compile(file(boot, "U").read(),
        ##                     os.path.abspath(boot), "exec")
        ## code_objects = [boot_code]
        ## for var_name, var_val in vars.iteritems():
        ##     code_objects.append(
        ##             compile("%s=%r\n" % (var_name, var_val), var_name, "exec")
        ##     )
        ## if self.custom_boot_script:
        ##     code_object = compile(file(self.custom_boot_script, "U").read() + "\n",
        ##                           os.path.abspath(self.custom_boot_script), "exec")
        ##     code_objects.append(code_object)
        ## code_bytes = marshal.dumps(code_objects)

        code_objects = []

        # sys.executable has already been set in the run-stub

        # XXX should this be done in the exe-stub?
        code_objects.append(
            compile(
                "import os, sys; sys.base_prefix = sys.prefix = os.path.dirname(sys.executable); del os, sys",
                "<bootstrap2>",
                "exec",
                optimize=self.options.optimize))

        if self.options.bundle_files < 3:
            # XXX do we need this one?
            ## obj = compile("import sys, os; sys.path.append(os.path.dirname(sys.path[0])); del sys, os",
            ##               "<bootstrap>", "exec")
            ## code_objects.append(obj)
            obj = compile(
                "import zipextimporter; zipextimporter.install(); del zipextimporter",
                "<install zipextimporter>",
                "exec",
                optimize=self.options.optimize)
            code_objects.append(obj)

        for text in self.mf._boot_code:
            code_objects.append(
                compile(text,
                        "<boot hacks>",
                        "exec",
                        optimize=self.options.optimize))

        if target.exe_type == "service":

            cmdline_style = getattr(target, "cmdline_style", "py2exe")
            if cmdline_style not in ["py2exe", "pywin32", "custom"]:
                raise RuntimeError("cmdline_handler invalid")

            # code for services
            # cmdline_style is one of:
            # py2exe
            # pywin32
            # custom
            code_objects.append(
                compile("cmdline_style = %r; service_module_names = %r" % (
                    cmdline_style,
                    target.modules,
                ),
                        "<service_info>",
                        "exec",
                        optimize=self.options.optimize))

            boot_code = compile(pkgutil.get_data("py2exe", "boot_service.py"),
                                "boot_service.py",
                                "exec",
                                optimize=self.options.optimize)
            code_objects.append(boot_code)

        elif target.exe_type in ("console_exe", "windows_exe"):
            boot_code = compile(pkgutil.get_data("py2exe", "boot_common.py"),
                                "boot_common.py",
                                "exec",
                                optimize=self.options.optimize)

            code_objects.append(boot_code)

            with open(target.script, "rb") as script_file:
                code_objects.append(
                    compile(script_file.read() + b"\n",
                            os.path.basename(target.script),
                            "exec",
                            optimize=self.options.optimize))

        elif target.exe_type == "ctypes_comdll":
            code_objects.append(
                compile("com_module_names = %r" % target.modules,
                        "com_module_names",
                        "exec",
                        optimize=self.options.optimize))

            boot_code = compile(pkgutil.get_data("py2exe",
                                                 "boot_ctypes_com_server.py"),
                                "boot_ctypes_com_server.py",
                                "exec",
                                optimize=self.options.optimize)

            code_objects.append(boot_code)
        else:
            raise RuntimeError("target_type '%s' not yet supported")

        return marshal.dumps(code_objects)
Beispiel #38
0
 def test_version_argument(self):
     # Python 2.4.0 crashes for any call to marshal.dumps(x, y)
     self.assertEqual(marshal.loads(marshal.dumps(5, 0)), 5)
     self.assertEqual(marshal.loads(marshal.dumps(5, 1)), 5)
Beispiel #39
0
 def test_bug_5888452(self):
     # Simple-minded check for SF 588452: Debug build crashes
     marshal.dumps([128] * 1000)
Beispiel #40
0
def write_pyc(f, codeobject, source_size=0, timestamp=0):
    f.write(MAGIC)
    _write32(f, timestamp)
    if tuple(sys.version_info[:2]) >= (3, 3):
        _write32(f, source_size)
    f.write(marshal.dumps(codeobject))
Beispiel #41
0
 def test_code(self):
     co = ExceptionTestCase.test_exceptions.__code__
     new = marshal.loads(marshal.dumps(co))
     self.assertEqual(co, new)
Beispiel #42
0
 def test_patch_873224(self):
     self.assertRaises(Exception, marshal.loads, '0')
     self.assertRaises(Exception, marshal.loads, 'f')
     self.assertRaises(Exception, marshal.loads, marshal.dumps(5)[:-1])
Beispiel #43
0
def _recurseTo(module_package, module_filename, module_relpath, module_kind,
               reason):
    from nuitka.tree import Building
    from nuitka.nodes.ModuleNodes import makeUncompiledPythonModule

    module, source_ref, source_filename = Building.decideModuleTree(
        filename=module_filename,
        package=module_package,
        is_top=False,
        is_main=False,
        is_shlib=module_kind == "shlib")

    # Check if the module name is known. In order to avoid duplicates,
    # learn the new filename, and continue build if its not.
    if not ImportCache.isImportedModuleByName(module.getFullName()):
        logRecursion("Recurse to import '%s' from '%s'. (%s)",
                     module.getFullName(), module_relpath, reason)

        if module_kind == "py" and source_filename is not None:
            try:
                source_code = readSourceCodeFromFilename(
                    module_name=module.getFullName(),
                    source_filename=source_filename)

                Building.createModuleTree(module=module,
                                          source_ref=source_ref,
                                          source_code=source_code,
                                          is_main=False)
            except (SyntaxError, IndentationError) as e:
                if module_filename not in Importing.warned_about:
                    Importing.warned_about.add(module_filename)

                    warning(
                        """\
Cannot recurse to import module '%s' (%s) because of '%s'""", module_relpath,
                        module_filename, e.__class__.__name__)

                return None, False
            except Building.CodeTooComplexCode:
                if module_filename not in Importing.warned_about:
                    Importing.warned_about.add(module_filename)

                    warning(
                        """\
Cannot recurse to import module '%s' (%s) because code is too complex.""",
                        module_relpath,
                        module_filename,
                    )

                    if Options.isStandaloneMode():
                        module = makeUncompiledPythonModule(
                            module_name=module.getFullName(),
                            filename=module_filename,
                            bytecode=marshal.dumps(
                                compile(source_code,
                                        module_filename,
                                        "exec",
                                        dont_inherit=True)),
                            is_package=module.isCompiledPythonPackage(),
                            user_provided=True,
                            technical=False)

                        ModuleRegistry.addUncompiledModule(module)

                return None, False

        ImportCache.addImportedModule(module)

        is_added = True
    else:
        module = ImportCache.getImportedModuleByName(module.getFullName())

        is_added = False

    return module, is_added
Beispiel #44
0
 def test_dict(self):
     new = marshal.loads(marshal.dumps(self.d))
     self.assertEqual(self.d, new)
     new = roundtrip(self.d)
     self.assertEqual(self.d, new)
     os.unlink(support.TESTFN)
Beispiel #45
0
def freeze(dist):
    """Freeze the given distribution data using cx_Freeze."""
    includes = dist.includes
    excludes = dist.excludes
    options = dist.freezer_options
    #  Merge in any encludes/excludes given in freezer_options
    for inc in options.pop("includes", ()):
        includes.append(inc)
    for exc in options.pop("excludes", ()):
        excludes.append(exc)
    if "esky" not in includes and "esky" not in excludes:
        includes.append("esky")
    if "pypy" not in includes and "pypy" not in excludes:
        excludes.append("pypy")
    #  cx_Freeze doesn't seem to respect __path__ properly; hack it so
    #  that the required distutils modules are always found correctly.
    def load_distutils(finder, module):
        module.path = distutils.__path__ + module.path
        finder.IncludeModule("distutils.dist")

    cx_Freeze.hooks.load_distutils = load_distutils
    #  Build kwds arguments out of the given freezer opts.
    kwds = {}
    for (nm, val) in options.iteritems():
        kwds[_normalise_opt_name(nm)] = val
    kwds["includes"] = includes
    kwds["excludes"] = excludes
    kwds["targetDir"] = dist.freeze_dir
    #  Build an Executable object for each script.
    #  To include the esky startup code, we write each to a tempdir.
    executables = []
    for exe in dist.get_executables():
        base = None
        if exe.gui_only and sys.platform == "win32":
            base = "Win32GUI"
        executables.append(
            cx_Freeze.Executable(exe.script,
                                 base=base,
                                 targetName=exe.name,
                                 icon=exe.icon,
                                 **exe._kwds))
    #  Freeze up the executables
    f = cx_Freeze.Freezer(executables, **kwds)
    f.Freeze()
    #  Copy data files into the freeze dir
    for (src, dst) in dist.get_data_files():
        dst = os.path.join(dist.freeze_dir, dst)
        dstdir = os.path.dirname(dst)
        if not os.path.isdir(dstdir):
            dist.mkpath(dstdir)
        dist.copy_file(src, dst)
    #  Copy package data into the library.zip
    #  For now, this only works if there's a shared "library.zip" file.
    if f.createLibraryZip:
        lib = zipfile.ZipFile(os.path.join(dist.freeze_dir, "library.zip"),
                              "a")
        for (src, arcnm) in dist.get_package_data():
            lib.write(src, arcnm)
        lib.close()
    else:
        for (src, arcnm) in dist.get_package_data():
            err = "use of package_data currently requires createLibraryZip=True"
            raise RuntimeError(err)
    #  Create the bootstrap code, using custom code if specified.
    code_source = ["__name__ = '__main__'"]
    esky_name = dist.distribution.get_name()
    code_source.append("__esky_name__ = %r" % (esky_name, ))
    code_source.append(inspect.getsource(esky.bootstrap))
    if dist.compile_bootstrap_exes:
        if sys.platform == "win32":
            #  Unfortunately this doesn't work, because the cxfreeze exe
            #  contains frozen modules that are inaccessible to a bootstrapped
            #  interpreter.  Disabled until I figure out a workaround. :-(
            pass
            #  The pypy-compiled bootstrap exe will try to load a python env
            #  into its own process and run this "take2" code to bootstrap.
            #take2_code = code_source[1:]
            #take2_code.append(_CUSTOM_WIN32_CHAINLOADER)
            #take2_code.append(dist.get_bootstrap_code())
            #take2_code = compile("\n".join(take2_code),"<string>","exec")
            #take2_code = marshal.dumps(take2_code)
            #clscript = "import marshal; "
            #clscript += "exec marshal.loads(%r); " % (take2_code,)
            #clscript = clscript.replace("%","%%")
            #clscript += "chainload(\"%s\")"
            #  Here's the actual source for the compiled bootstrap exe.
            #from esky.bdist_esky import pypy_libpython
            #code_source.append(inspect.getsource(pypy_libpython))
            #code_source.append("_PYPY_CHAINLOADER_SCRIPT = %r" % (clscript,))
            #code_source.append(_CUSTOM_PYPY_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source = "\n".join(code_source)
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue
            bsexe = dist.compile_to_bootstrap_exe(exe, code_source)
            if sys.platform == "win32":
                fexe = os.path.join(dist.freeze_dir, exe.name)
                winres.copy_safe_resources(fexe, bsexe)
    else:
        if sys.platform == "win32":
            code_source.append(_CUSTOM_WIN32_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source.append("bootstrap()")
        code_source = "\n".join(code_source)

        #  Since Python 3.3 the .pyc file format contains the source size.
        #  It's not used for anything at all except to check if the file is up to date.
        #  We can set this value to zero to make Esky also work for Python 3.3
        if sys.version_info[:2] < (3, 3):
            maincode = imp.get_magic() + struct.pack("<i", 0)
            eskycode = imp.get_magic() + struct.pack("<i", 0)
            eskybscode = imp.get_magic() + struct.pack("<i", 0)
        else:
            maincode = imp.get_magic() + struct.pack("<ii", 0, 0)
            eskycode = imp.get_magic() + struct.pack("<ii", 0, 0)
            eskybscode = eskycode = imp.get_magic() + struct.pack("<ii", 0, 0)

        maincode += marshal.dumps(
            compile(code_source, INITNAME + ".py", "exec"))
        eskycode += marshal.dumps(compile("", "esky/__init__.py", "exec"))
        eskybscode += marshal.dumps(compile("", "esky/bootstrap.py", "exec"))

        #  Copy any core dependencies
        if "fcntl" not in sys.builtin_module_names:
            for nm in os.listdir(dist.freeze_dir):
                if nm.startswith("fcntl"):
                    dist.copy_to_bootstrap_env(nm)
        for nm in os.listdir(dist.freeze_dir):
            if is_core_dependency(nm):
                dist.copy_to_bootstrap_env(nm)

        #  Copy the loader program for each script into the bootstrap env, and
        #  append the bootstrapping code to it as a zipfile.
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue

            exepath = dist.copy_to_bootstrap_env(exe.name)
            if not dist.detached_bootstrap_library:
                #append library to the bootstrap exe.
                exepath = dist.copy_to_bootstrap_env(exe.name)
                bslib = zipfile.PyZipFile(exepath, "a", zipfile.ZIP_STORED)
            else:
                #Create a separate library.zip for the bootstrap exe.
                bslib_path = dist.copy_to_bootstrap_env("library.zip")
                bslib = zipfile.PyZipFile(bslib_path, "w", zipfile.ZIP_STORED)
            cdate = (2000, 1, 1, 0, 0, 0)
            bslib.writestr(zipfile.ZipInfo(INITNAME + ".pyc", cdate), maincode)
            bslib.writestr(zipfile.ZipInfo("esky/__init__.pyc", cdate),
                           eskycode)
            bslib.writestr(zipfile.ZipInfo("esky/bootstrap.pyc", cdate),
                           eskybscode)
            bslib.close()
Beispiel #46
0
 def test_exceptions(self):
     new = marshal.loads(marshal.dumps(StopIteration))
     self.assertEqual(StopIteration, new)
s.bind(('', 12346))
s.listen(5)
while True:
    a = s.accept()[0]
    print 'reading'
    txt = ''
    while True:
        txt += a.recv(10240)
        if txt.endswith('\n.\n'):
            break
    name, txt = txt.split('\n', 1)
    assert '/' not in name
    assert '..' not in name
    if name == 'repl':
        try:
            ret = marshal.dumps(compile(txt.split('\n.\n')[0], name, 'single'))
            print 21, ` ret `
            a.send(ret)
            continue
        except Exception:
            import traceback
            a.send(traceback.format_exc())
            continue
    f = open(name, 'w')
    f.write(txt.split('\n.\n')[0])
    f.close()
    success = pexpect.run('python -m compileall %s' % name)
    print success
    if os.path.exists(name + 'c'):
        f = open(name + 'c')
        d = f.read()
Beispiel #48
0
 def test_marshal(self):
     import marshal
     self.assertIs(marshal.loads(marshal.dumps(True)), True)
     self.assertIs(marshal.loads(marshal.dumps(False)), False)
Beispiel #49
0
 def runforked():
     try:
         reports = runtestprotocol(item, log=False)
     except KeyboardInterrupt:
         py.std.os._exit(EXITSTATUS_TESTEXIT)
     return marshal.dumps([serialize_report(x) for x in reports])
Beispiel #50
0
    def found_terminator (self):
        self.buffer, data = [], string.join (self.buffer, '')

        if self.pstate is self.STATE_LENGTH:
            packet_length = string.atoi (data, 16)
            self.set_terminator (packet_length)
            self.pstate = self.STATE_PACKET
        else:

            self.set_terminator (8)
            self.pstate = self.STATE_LENGTH

            oid, kind, arg = marshal.loads (data)

            obj, refcnt = self.proxies[oid]
            e = None
            reply_kind = 2

            try:
                if kind == 0:
                    # __call__
                    result = obj(*arg)
                elif kind == 1:
                    # __getattr__
                    result = getattr (obj, arg)
                elif kind == 2:
                    # __setattr__
                    key, value = arg
                    setattr (obj, key, value)
                    result = None
                elif kind == 3:
                    # __repr__
                    result = repr(obj)
                elif kind == 4:
                    # __del__
                    self.forget_reference (oid)
                    result = None
                elif kind == 5:
                    # __getitem__
                    result = obj[arg]
                elif kind == 6:
                    # __setitem__
                    (key, value) = arg
                    obj[key] = value
                    result = None
                elif kind == 7:
                    # __len__
                    result = len(obj)

            except:
                reply_kind = 1
                (file,fun,line), t, v, tbinfo = asyncore.compact_traceback()
                result = '%s:%s:%s:%s (%s:%s)' % (MY_NAME, file, fun, line, t, str(v))
                self.log_info (result, 'error')
                self.exception_counter.increment()

            self.request_counter.increment()

            # optimize a common case
            if type(result) is types.InstanceType:
                can_marshal = 0
            else:
                can_marshal = 1

            try:
                rb = marshal.dumps ((reply_kind, result))
            except ValueError:
                can_marshal = 0

            if not can_marshal:
                # unmarshallable object, return a reference
                rid = id(result)
                self.new_reference (result)
                rb = marshal.dumps ((0, rid))

            self.push_with_producer (
                    scanning_producer (
                            ('%08x' % len(rb)) + rb,
                            buffer_size = 65536
                            )
                    )
def pickle_code(co):
    "Return unpickle function and tuple with marshalled co code object."
    assert isinstance(co, types.CodeType)
    ms = marshal.dumps(co)
    return unpickle_code, (ms, )
Beispiel #52
0
def process_common(template, progress, code, rsrcname, destname, is_update,
        copy_codefragment, raw=0, others=[], filename=None, destroot=""):
    if MacOS.runtimemodel == 'macho':
        return process_common_macho(template, progress, code, rsrcname, destname,
            is_update, raw, others, filename, destroot)
    if others:
        raise BuildError, "Extra files only allowed for MachoPython applets"
    # Create FSSpecs for the various files
    template_fsr, d1, d2 = Carbon.File.FSResolveAliasFile(template, 1)
    template = template_fsr.as_pathname()

    # Copy data (not resources, yet) from the template
    if progress:
        progress.label("Copy data fork...")
        progress.set(10)

    if copy_codefragment:
        tmpl = open(template, "rb")
        dest = open(destname, "wb")
        data = tmpl.read()
        if data:
            dest.write(data)
        dest.close()
        tmpl.close()
        del dest
        del tmpl

    # Open the output resource fork

    if progress:
        progress.label("Copy resources...")
        progress.set(20)
    try:
        output = Res.FSOpenResourceFile(destname, RESOURCE_FORK_NAME, WRITE)
    except MacOS.Error:
        destdir, destfile = os.path.split(destname)
        Res.FSCreateResourceFile(destdir, unicode(destfile), RESOURCE_FORK_NAME)
        output = Res.FSOpenResourceFile(destname, RESOURCE_FORK_NAME, WRITE)

    # Copy the resources from the target specific resource template, if any
    typesfound, ownertype = [], None
    try:
        input = Res.FSOpenResourceFile(rsrcname, RESOURCE_FORK_NAME, READ)
    except (MacOS.Error, ValueError):
        pass
        if progress:
            progress.inc(50)
    else:
        if is_update:
            skip_oldfile = ['cfrg']
        else:
            skip_oldfile = []
        typesfound, ownertype = copyres(input, output, skip_oldfile, 0, progress)
        Res.CloseResFile(input)

    # Check which resource-types we should not copy from the template
    skiptypes = []
    if 'vers' in typesfound: skiptypes.append('vers')
    if 'SIZE' in typesfound: skiptypes.append('SIZE')
    if 'BNDL' in typesfound: skiptypes = skiptypes + ['BNDL', 'FREF', 'icl4',
            'icl8', 'ics4', 'ics8', 'ICN#', 'ics#']
    if not copy_codefragment:
        skiptypes.append('cfrg')
##  skipowner = (ownertype != None)

    # Copy the resources from the template

    input = Res.FSOpenResourceFile(template, RESOURCE_FORK_NAME, READ)
    dummy, tmplowner = copyres(input, output, skiptypes, 1, progress)

    Res.CloseResFile(input)
##  if ownertype is None:
##      raise BuildError, "No owner resource found in either resource file or template"
    # Make sure we're manipulating the output resource file now

    Res.UseResFile(output)

    if ownertype is None:
        # No owner resource in the template. We have skipped the
        # Python owner resource, so we have to add our own. The relevant
        # bundle stuff is already included in the interpret/applet template.
        newres = Res.Resource('\0')
        newres.AddResource(DEFAULT_APPLET_CREATOR, 0, "Owner resource")
        ownertype = DEFAULT_APPLET_CREATOR

    if code:
        # Delete any existing 'PYC ' resource named __main__

        try:
            res = Res.Get1NamedResource(RESTYPE, RESNAME)
            res.RemoveResource()
        except Res.Error:
            pass

        # Create the raw data for the resource from the code object
        if progress:
            progress.label("Write PYC resource...")
            progress.set(120)

        data = marshal.dumps(code)
        del code
        data = (MAGIC + '\0\0\0\0') + data

        # Create the resource and write it

        id = 0
        while id < 128:
            id = Res.Unique1ID(RESTYPE)
        res = Res.Resource(data)
        res.AddResource(RESTYPE, id, RESNAME)
        attrs = res.GetResAttrs()
        attrs = attrs | 0x04    # set preload
        res.SetResAttrs(attrs)
        res.WriteResource()
        res.ReleaseResource()

    # Close the output file

    Res.CloseResFile(output)

    # Now set the creator, type and bundle bit of the destination.
    # Done with FSSpec's, FSRef FInfo isn't good enough yet (2.3a1+)
    dest_fss = Carbon.File.FSSpec(destname)
    dest_finfo = dest_fss.FSpGetFInfo()
    dest_finfo.Creator = ownertype
    dest_finfo.Type = 'APPL'
    dest_finfo.Flags = dest_finfo.Flags | Carbon.Files.kHasBundle | Carbon.Files.kIsShared
    dest_finfo.Flags = dest_finfo.Flags & ~Carbon.Files.kHasBeenInited
    dest_fss.FSpSetFInfo(dest_finfo)

    macostools.touched(destname)
    if progress:
        progress.label("Done.")
        progress.inc(0)
Beispiel #53
0
            m = RE_CODING.match(line)
            if m:
                encoding = m.group(1).decode('ascii')
                break

        # Someone has set us up the BOM! According to PEP 263 the file should
        # be interpreted as UTF-8.
        if source.startswith(b'\xef\xbb\xbf'):
            encoding = 'utf-8'
            source = source[3:]

        source_bytes = source
        source = source.decode(encoding)

        code = compile(source, name, 'exec', optimize=optimize_level)
        bytecode = marshal.dumps(code)

        if output_mode == b'bytecode':
            out = bytecode
        elif output_mode == b'pyc-checked-hash':
            source_hash = importlib.util.source_hash(source_bytes)
            out = importlib._bootstrap_external._code_to_hash_pyc(
                code,
                source_hash,
                checked=True,
            )
        elif output_mode == b'pyc-unchecked-hash':
            source_hash = importlib.util.source_hash(source_bytes)
            out = importlib._bootstrap_external._code_to_hash_pyc(
                code,
                source_hash,
def main():
    choice = raw_input('\x1b[32;1m(\x1b[37;1menc\x1b[31;1mX\x1b[37;1mrip\x1b[32;1m)\x1b[37;1m> \x1b[33;1m')
    if choice == '1' or choice == '01':
        try:
            file = raw_input('\x1b[37;1m[\x1b[32;1m+\x1b[37;1m] \x1b[32;1mFile: \x1b[37;1m')
            fileopen = open(file).read()
            a = compile(fileopen, 'dg', 'exec')
            m = marshal.dumps(a)
            s = repr(m)
            b = 'import marshal\nexec(marshal.loads(' + s + '))'
            c = file.replace('.py', '_.py')
            d = open(c, 'w')
            d.write(b)
            d.close()
            print '\x1b[37;1m[\x1b[31;1m-\x1b[37;1m] \x1b[37;1mHasil: \x1b[32;1m', c
            main()
        except:
            print '\x1b[32;1m[\x1b[31;1m!\x1b[32;1m] \x1b[31;1mYang Bener Dong'
            sys.exit()
            main()

    if choice == '2' or choice == '02':
        try:
            file = raw_input('\x1b[37;1m[\x1b[32;1m+\x1b[37;1m] \x1b[32;1mFile: \x1b[37;1m')
            fileopen = open(file).read()
            a = base64.b16encode(fileopen)
            b = "import base64\nexec(base64.b16decode('" + a + "'))"
            c = file.replace('.py', '_.py')
            d = open(c, 'w')
            d.write(b)
            d.close()
            print '\x1b[37;1m[\x1b[31;1m-\x1b[37;1m] \x1b[37;1mHasil: \x1b[32;1m', c
            main()
        except:
            print '\x1b[32;1m[\x1b[31;1m!\x1b[32;1m] \x1b[31;1mYang Bener Dong'
            sys.exit()
            main()

    if choice == '3' or choice == '03':
        try:
            file = raw_input('\x1b[37;1m[\x1b[32;1m+\x1b[37;1m] \x1b[32;1mFile: \x1b[37;1m')
            fileopen = open(file).read()
            a = base64.b32encode(fileopen)
            b = "import base64\nexec(base64.b32decode('" + a + "'))"
            c = file.replace('.py', '_.py')
            d = open(c, 'w')
            d.write(b)
            d.close()
            print '\x1b[37;1m[\x1b[31;1m-\x1b[37;1m] \x1b[37;1mHasil: \x1b[32;1m', c
            main()
        except:
            print '\x1b[32;1m[\x1b[31;1m!\x1b[32;1m] \x1b[31;1mYang Bener Dong'
            sys.exit()
            main()

    if choice == '4' or choice == '04':
        try:
            file = raw_input('\x1b[37;1m[\x1b[32;1m+\x1b[37;1m] \x1b[32;1mFile: \x1b[37;1m')
            fileopen = open(file).read()
            a = base64.b64encode(fileopen)
            b = "import base64\nexec(base64.b64decode('" + a + "'))"
            c = file.replace('.py', '_.py')
            d = open(c, 'w')
            d.write(b)
            d.close()
            print '\x1b[37;1m[\x1b[31;1m-\x1b[37;1m] \x1b[37;1mHasil: \x1b[32;1m', c
            main()
        except:
            print '\x1b[32;1m[\x1b[31;1m!\x1b[32;1m] \x1b[31;1mYang Bener Dong'
            sys.exit()
            main()

    if choice == '5' or choice == '05':
        print 'Terima kasih Udah pake'
        time.sleep(2)
        os.system('exit')
Beispiel #55
0
 def test_unicode(self):
     for s in ["", "Andr\xe8 Previn", "abc", " " * 10000]:
         self.helper(marshal.loads(marshal.dumps(s)))
Beispiel #56
0
 def register_task(self, task_name, code_block):
   # Serialize the code_block so we can send it across the wire to the child
   serialized_code = marshal.dumps(code_block.func_code)\
                            .encode('base64')
   self._task_registry.register_task(task_name, serialized_code)
Beispiel #57
0
 def test_eof(self):
     data = marshal.dumps(("hello", "dolly", None))
     for i in range(len(data)):
         self.assertRaises(EOFError, marshal.loads, data[0:i])
Beispiel #58
0
 def dumps(self, obj):
     return marshal.dumps(obj)
Beispiel #59
0
 def test_large_marshal(self):
     size = int(1e6)
     testString = 'abc' * size
     marshal.dumps(testString)
Beispiel #60
0
 def testNoIntern(self):
     s = marshal.loads(marshal.dumps(self.strobj, 2))
     self.assertEqual(s, self.strobj)
     self.assertNotEqual(id(s), id(self.strobj))
     s2 = sys.intern(s)
     self.assertNotEqual(id(s2), id(s))