Esempio n. 1
0
 def fromStream(cls, stream):
     """Return header object from the stream."""
     stream.seek(0)
     _data = stream.read(32)
     _data_array = bytearray(_data)
     (_cnt, _hdrLen, _recLen) = struct.unpack("<I2H", _data[4:12])
     #reserved = _data[12:32]
     _year = _data_array[1]
     if _year < 80:
         # dBase II started at 1980.  It is quite unlikely
         # that actual last update date is before that year.
         _year += 2000
     else:
         _year += 1900
     ## create header object
     _obj = cls(None, _hdrLen, _recLen, _cnt, _data_array[0],
         (_year, _data_array[2], _data_array[3]))
     ## append field definitions
     # position 0 is for the deletion flag
     _pos = 1
     _data = stream.read(1)
     while to_str(_data[0]) != "\x0D":
         _data += stream.read(31)
         c = to_str(_data[11])
         _fld = fields.lookupFor(c).fromString(_data, _pos)
         _obj._addField(_fld)
         _pos = _fld.end
         _data = stream.read(1)
     return _obj
Esempio n. 2
0
def save_uploaded_file_real(fname_list, fprefix, fobj, filename, schd_id, release, environ_items):
    fname = fprefix + to_str(os.path.splitext(filename)[1])
    fname_mail = fname.decode('utf-8').encode('ascii', 'replace') # total hack until I have time to fix encoding
    fname_list.append(fname)
    try:
        metadata = "schedule_id: %s\n" %(schd_id, )
        metadata += "release_to: %s\n" %(",".join(release), )
        metadata += "original_name: %s\n" %(filename, )
        metadata += "\n".join(
            "%s: %s" %(k, v)
            for (k, v) in sorted(environ_items)
            if not (k.startswith("wsgi.") or k.startswith("werkzeug."))
        )
        write_file(fprefix + "-log.txt", io.BytesIO(to_str(metadata)))
        res = write_file(fname, fobj)
        metadata += "\n\nresult: %r" %(res, )
        write_file(fprefix + "-log.txt", io.BytesIO(to_str(metadata)))
    finally:
        fobj.close()

    if not environ.get("NO_DROPBOX"):
        db = get_dropbox_client()
        share = db.sharing_create_shared_link(res.path_lower)
        send_email(
            [x.strip() for x in os.environ["MAIL_RECIPIENTS"].split(",")],
            "New PyCon slide upload",
            "\n".join([
                "New file: %s" %(fname_mail, ),
                "Download: %s" %(share.url, ),
                "Original file name: %s" %(filename, ),
            ]),
        )
Esempio n. 3
0
def save_uploaded_file_real(fname_list, fprefix, fobj, filename, schd_id, release, environ_items):
    fname = fprefix + to_str(os.path.splitext(filename)[1])
    fname_list.append(fname)

    try:
        metadata = "schedule_id: %s\n" %(schd_id, )
        metadata += "release_to: %s\n" %(",".join(release), )
        metadata += "original_name: %s\n" %(filename, )
        metadata += "\n".join(
            "%s: %s" %(k, v)
            for (k, v) in sorted(environ_items)
            if not (k.startswith("wsgi.") or k.startswith("werkzeug."))
        )
        write_file(fprefix + "-log.txt", io.BytesIO(to_str(metadata)))
        res = write_file(fname, fobj)
        metadata += "\n\nresult: %r" %(res, )
        write_file(fprefix + "-log.txt", io.BytesIO(to_str(metadata)))
    finally:
        fobj.close()

    if not environ.get("NO_DROPBOX"):
        db = get_dropbox_client()
        share = db.media(res["path"])
        send_email(
            [x.strip() for x in os.environ["MAIL_RECIPIENTS"].split(",")],
            "New PyCon slide upload",
            "\n".join([
                "New file: %s" %(fname, ),
                "Download: %s" %(share["url"], ),
                "Original file name: %s" %(filename, ),
            ]),
        )
Esempio n. 4
0
def file_generator(boundary, name, file):
    name = to_str(name)
    filename = to_str(os.path.basename(file.name))
    
    yield '--' + boundary
    yield 'Content-Disposition: form-data; name="%s"; filename="%s"' % (name, filename)
    yield 'Content-Type: application/octet-stream'
    yield ''
    yield file.read()
Esempio n. 5
0
 def prepare(self,
             escape_func=html_escape,
             noescape=False,
             syntax=None, **ka):
     self.cache = {}
     enc = self.encoding
     self._str = lambda x: to_str(x, enc)
     self._escape = lambda x: escape_func(to_str(x, enc))
     self.syntax = syntax
     if noescape:
         self._str, self._escape = self._escape, self._str
    def get_cached_items(self, asins):
        """
        Retrieve a dict whose form is {ASIN => item details}.
        The dict does not contain keys which don't exist in the cache.
        """
        keys = [to_bytes(asin) for asin in asins]  # key must be bytes
        cached_json_items = self.cache.get_multi(keys, key_prefix=self.key_prefix)
        cached_items = {}
        for key, value in cached_json_items.items():
            # Although pylibmc automatically pickle dicts or objects,
            # JSON is more portable.
            # Convert key and value into (Py3) str
            cached_items[to_str(key)] = json.loads(to_str(value))

        return cached_items
Esempio n. 7
0
 def __init__(self, source, syntax=None, encoding='utf8'):
     self.source, self.encoding = to_str(source, encoding), encoding
     self.set_syntax(syntax or self.default_syntax)
     self.code_buffer, self.text_buffer = [], []
     self.lineno, self.offset = 1, 0
     self.indent, self.indent_mod = 0, 0
     self.paren_depth = 0
    def get_alternate_versions(self, asins):
        """
        Retrieve a dict whose form is {ASIN => ASIN of Kindle edition}
        The dict does not contain the keys of items which does not have a Kindle edition.

        asins: list of ASINs
        """

        asins_text = ','.join(asins)
        logger.debug('Caling API for AlternateVersions. asins: %s' % asins_text)
        response = self.api.ItemLookup(ItemId=asins_text,
                                       ResponseGroup='AlternateVersions')
        root = _parse_response(response)
        #print(etree.tostring(response, pretty_print = True))

        alternate_asins = {}
        for item in root.Items.Item:
            asin = item.ASIN.text
            alternate_asin = None

            #print(item.ItemAttributes.Title.text.encode('utf-8'))
            if hasattr(item, 'AlternateVersions'):
                for alternate_version in item.AlternateVersions.AlternateVersion:
                    if alternate_version.Binding in KINDLE_BINDINGS:
                        # When Kindle edtion exists
                        alternate_asin = to_str(alternate_version.ASIN.text)
                        break

            if alternate_asin:
                alternate_asins[asin] = alternate_asin

        return alternate_asins
Esempio n. 9
0
 def put(self, location, options):
     logger.debug('Caching: %s', location)
     print options
     return self._insert({
                 'location': location,
                 'as_string': to_str(location),
                 'options': options
             })
Esempio n. 10
0
    def _geocode(self, city):
        """Geocode a city

        Args:
         city: city string, eg "Dallas"
         location_cache: cache that supports get and put methods, both of which return a dictionary
                         {
                            'location': {
                                'city': 'City Name',
                                'state': 'State Name',
                                'country': 'Country Name'
                            },
                            'as_string': to_str(location),
                            'options': [
                                ['Guessed location 1',
                                    [
                                        lat: decimal,
                                        lon: decimal
                                    ]
                                ],
                                ['Guessed location 2',
                                    [
                                        lat: decimal,
                                        lon: decimal
                                    ]
                                ],
                            ]
                         }
        Returns:
         List of guessed locations, value for key options
        """
        location = cities[city]
        loc = to_str(location)

        if self._cache:
            logger.info("Trying location: %s from cache", loc)
            coordinates = self._cache.get(loc)
            if coordinates:
                logger.info("Location: %s found in cache", loc)
                return coordinates

        try:
            logger.info("Grabbing coordinates from Google geocoder")
            coordinates = self._geocode_external(loc)
            coordinates = self._process_raw_locations(coordinates)

            if self._cache and coordinates:
                logger.info("Saving location: %s to cache", location)
                self._cache.put(location, coordinates)
        except (TypeError, UnicodeEncodeError, ValueError) as e:
            logger.error("Failed to geocode %s", loc)
            coordinates = []

        return {
                'as_string': loc,
                'location': location,
                'options': coordinates
            }
Esempio n. 11
0
    def get_form_data(self) -> str:
        content_length_as_str = self.headers.get("Content-Length", 0)
        content_length = int(content_length_as_str)

        if not content_length:
            return ""

        payload_as_bytes = self.rfile.read(content_length)
        payload = utils.to_str(payload_as_bytes)

        return payload
Esempio n. 12
0
 def _get_old_cert(commonname):
     certfile = os.path.join(CertUtil.ca_certdir,
                             utils.to_str(commonname) + '.crt')
     if os.path.exists(certfile):
         with open(certfile, 'rb') as fp:
             cert = OpenSSL.crypto.load_certificate(
                 OpenSSL.crypto.FILETYPE_PEM, fp.read())
         if datetime.datetime.strptime(
                 utils.to_str(cert.get_notAfter()), '%Y%m%d%H%M%SZ'
         ) < datetime.datetime.utcnow() + datetime.timedelta(days=30):
             try:
                 os.remove(certfile)
             except OSError as e:
                 xlog.warning(
                     'CertUtil._get_old_cert failed: unable to remove outdated cert, %r',
                     e)
             else:
                 return
             # well, have to use the old one
         return certfile
Esempio n. 13
0
    def do_POST(self):
        self.headers = utils.to_str(self.headers)
        self.path = utils.to_str(self.path)

        refer = self.headers.get('Referer')
        if refer:
            refer_loc = urllib.parse.urlparse(refer).netloc
            host = self.headers.get('Host')
            if refer_loc != host:
                xlog.warn("web control ref:%s host:%s", refer_loc, host)
                return

        try:
            ctype, pdict = cgi.parse_header(self.headers.get('Content-Type', ""))
            if ctype == 'multipart/form-data':
                self.postvars = cgi.parse_multipart(self.rfile, pdict)
            elif ctype == 'application/x-www-form-urlencoded':
                length = int(self.headers.get('Content-Length'))
                self.postvars = urllib.parse.parse_qs(self.rfile.read(length), keep_blank_values=True)
            else:
                self.postvars = {}
        except Exception as e:
            xlog.exception("do_POST %s except:%r", self.path, e)
            self.postvars = {}

        #url_path = urlparse.urlparse(self.path).path
        url_path_list = self.path.split('/')
        if len(url_path_list) >= 3 and url_path_list[1] == "module":
            module = url_path_list[2]
            if len(url_path_list) >= 4 and url_path_list[3] == "control":
                if module not in module_init.proc_handler:
                    xlog.warn("request %s no module in path", self.path)
                    return self.send_not_found()

                path = '/' + '/'.join(url_path_list[4:])
                controler = module_init.proc_handler[module]["imp"].local.web_control.ControlHandler(self.client_address, self.headers, self.command, path, self.rfile, self.wfile)
                controler.postvars = utils.to_str(self.postvars)
                return controler.do_POST()

        self.send_not_found()
        xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
Esempio n. 14
0
    def get_new_lines(self, from_no):
        self.buffer_lock.acquire()
        jd = {}
        first_no = self.last_no - len(self.buffer) + 1
        if from_no < first_no:
            from_no = first_no

        if self.last_no >= from_no:
            for i in range(from_no, self.last_no + 1):
                jd[i] = utils.to_str(self.buffer[i])
        self.buffer_lock.release()
        return json.dumps(jd)
    def get_item_details(self, asins):
        """
        Retrieve a dict whose form is {ASIN => dict of Item detail}
        """

        asins_text = ','.join(asins)
        logger.debug('Caling API for Small. asins: %s' % asins_text)
        response = self.api.ItemLookup(ItemId=asins_text, ResponseGroup='Small')
        root = _parse_response(response)
        #print(etree.tostring(response, pretty_print = True))

        items = {}
        for item in root.Items.Item:
            asin = to_str(item.ASIN.text)
            items[asin] = {
                'asin': asin,
                'title': to_str(item.ItemAttributes.Title.text),
                'url': to_str(item.DetailPageURL.text),
            }

        return items
Esempio n. 16
0
def get_line_value(r, n):
    r = utils.to_str(r)
    rls = r.split("\r\n")
    if len(rls) < n + 1:
        return None

    lp = rls[n].split(":")
    if len(lp) < 2:
        return None

    value = lp[1].strip()
    return value
Esempio n. 17
0
    def do_GET(self):
        path = urllib.parse.urlparse(self.path).path # '/proxy.pac'
        path = utils.to_str(path)
        self.headers = utils.to_str(self.headers)

        filename = os.path.normpath('./' + path)
        if filename != 'proxy.pac':
            xlog.warn("pac_server GET %s fail", self.path)
            return self.send_not_found()

        host = self.headers.get('Host')
        host, _, port = host.rpartition(":")

        if g.config.pac_policy == "black_GAE":
            content = self.policy_black_port(host, "%s" % g.gae_proxy_listen_port)
        elif g.config.pac_policy == "black_X-Tunnel":
            content = self.policy_black_port(host, "%s" % g.x_tunnel_socks_port)
        else:
            content = self.policy_smart_router(host)

        self.send_response('application/x-ns-proxy-autoconfig', content)
Esempio n. 18
0
    def _get_cert(commonname, isip=False, sans=None):
        cert = OpenSSL.crypto.X509()
        cert.set_version(2)
        # setting the only serial number, the browser will refused fixed serial number when cert updated.
        serial_number = int(
            (int(time.time() - CertUtil.serial_reduce) + random.random()) *
            100)
        while 1:
            try:
                cert.set_serial_number(serial_number)
            except OpenSSL.SSL.Error:
                serial_number += 1
            else:
                break
        subj = cert.get_subject()
        subj.countryName = 'CN'
        subj.stateOrProvinceName = 'Internet'
        subj.localityName = 'Cernet'
        subj.organizationalUnitName = '%s Branch' % CertUtil.ca_vendor
        subj.commonName = commonname
        subj.organizationName = commonname
        cert.gmtime_adj_notBefore(-600)  #avoid crt time error warning
        cert.gmtime_adj_notAfter(CertUtil.cert_validity)
        cert.set_issuer(CertUtil.ca_subject)
        if CertUtil.cert_publickey:
            pkey = CertUtil.cert_publickey
        else:
            pkey = OpenSSL.crypto.PKey()
            pkey.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
        cert.set_pubkey(pkey)

        sans = set(sans) if sans else set()
        sans.add(commonname)
        if isip:
            sans = b'IP: ' + commonname
        else:
            sans = b'DNS: %s, DNS: *.%s' % (commonname, commonname)
        cert.add_extensions(
            [OpenSSL.crypto.X509Extension(b'subjectAltName', True, sans)])

        cert.sign(CertUtil.ca_privatekey, CertUtil.ca_digest)

        certfile = os.path.join(CertUtil.ca_certdir,
                                utils.to_str(commonname) + '.crt')
        with open(certfile, 'wb') as fp:
            fp.write(
                OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
                                                cert))
            if CertUtil.cert_publickey is None:
                fp.write(
                    OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
                                                   pkey))
        return certfile
Esempio n. 19
0
def upload_logs_thread():
    sleep(3 * 60)
    while g.running:
        if not g.running or not g.server_host or not g.session or g.session.last_receive_time == 0:
            time.sleep(1)
        else:
            break

    sleep(30)
    session_id = utils.to_str(g.session.session_id)
    data = pack_logs()
    upload(session_id, data)
Esempio n. 20
0
    def __str__(self):
        s = utils.to_str(self.id, " ", self.x, " ", self.y, " ",
                         self.health, " ", self.max_health, " ",
                         self.shield, " ", self.max_shield, " ",
                         self.energy, " ", self.maxCD, " ",
                         self.groundCD, " ", self.airCD, " ", self.idle, " ",
                         self.visible, " ", self.type, " ", self.armor, " ",
                         self.shieldArmor, " ", self.size, " ",
                         self.pixel_x, " ", self.pixel_y, " ",
                         self.pixel_size_x, " ", self.pixel_size_y, " ",
                         self.groundATK, " ", self.airATK, " ",
                         self.groundDmgType, " ",
                         self.airDmgType, " ", self.groundRange, " ",
                         self.airRange, " ")

        s += utils.to_str(len(self.orders), " ")
        for c in self.orders:
            s += utils.to_str(c.first_frame, " ",
                              c.type, " ", c.targetId, " ",
                              c.targetX, " ", c.targetY, " ")

        s += utils.to_str(self.velocityX, " ", self.velocityY)
        s += utils.to_str(" ", self.playerId)
        s += utils.to_str(" ", self.resources)
        return s
Esempio n. 21
0
 def code(self):
     source = self.source
     if not source:
         with open(self.filename, 'rb') as f:
             source = f.read()
     try:
         source, encoding = to_str(source), 'utf8'
     except UnicodeError:
         raise depr(0, 11, 'Unsupported template encodings.', 'Use utf-8 for templates.')
     parser = StplParser(source, encoding=encoding, syntax=self.syntax)
     code = parser.translate()
     self.encoding = parser.encoding
     return code
Esempio n. 22
0
    def check_log(self):
        if not self.log_fn:
            # Debugging mode, running xxnet manually, check by human.
            return

        with open(self.log_fn, "r") as fp:
            for line in fp.readlines():
                line = line.strip()
                line = utils.to_str(line)

                self.assertNotIn("[ERROR]", line)

        xlog.info("Log Check passed!")
Esempio n. 23
0
File: SHA3.py Progetto: huyle84/SHA3
def keccak(word, c=512, d=256):
    r = int(1600 - c)

    P = utils.pad_word(word, r / 8)
    P = utils.split_every(P, init_data.w / 8)
    P = map(lambda x: utils.word_to_int(x), P)
    P = utils.split_every(P, r / init_data.w)
    P = map(partial(map, utils.little_endian), P)

    S = [[0] * 5 for i in range(init_data.box_size)]

    for Pi in P:
        for x in range(init_data.box_size):
            for y in range(init_data.box_size):
                if ((x + 5 * y) < (r / init_data.w)):
                    S[x][y] ^= Pi[x + 5 * y]
        S = keccak_f(S)

    Z = utils.to_str(
        map(partial(map, utils.little_endian), (matrix(S).transpose().rows())))

    return utils.to_str(map(lambda x: format(x, '016x'), Z))[:d / 4]
Esempio n. 24
0
def direct_callback(channel, method_frame, header_frame, body):
    todo = to_str(body)
    # Pretend theres some processing in the background...
    # Once its complete send to complete coroutine for
    if not todo['completed']:
        random_sleep_time = random.randint(2, 4)
        print(
            f"Todo with id: {todo['id']} by User: {todo['userId']} is not complete... waiting for {random_sleep_time} seconds..."
        )
        time.sleep(random_sleep_time)
        todo['completed'] = True
    complete().send(todo)
    channel.basic_ack(delivery_tag=method_frame.delivery_tag)
Esempio n. 25
0
def get_all_talks_from_room(room):
    for session_name in cf.session_names:
        talks = cf.talk_sessions[session_name]
        talks = [talks[talk_id] for talk_id in talks if in_room(room, talks[talk_id])]
        talks = sorted(talks, key=itemgetter('title'))
        for talk in talks:
            speakers  = ut.to_str(talk['speakers'])
            title     = ut.to_str(talk['title'])
            timerange = ut.to_str(talk.get('timerange', '').split(';')[0])
            try:
                start_date, start_hour, end_day, end_hour = talk_schedule_2(*timerange.split(', '))
            except:
                start, end = ('', '')

            yield {'speaker': speakers,
                   'title': title,
                   'talk': title,
                   'track_title': room,
                   'start_date': start_date,
                   'start_time': start_hour,
                   'end_date': end_day,
                   'end_time': end_hour}
Esempio n. 26
0
def test_to_str():
    input_data_set = ["x", b"x", 1, [], None]
    expected_data_set = ["x", "x", "1", "[]", "None"]

    for i in range(len(input_data_set)):
        input_data = input_data_set[i]
        expected_data = expected_data_set[i]
        output_data = to_str(input_data)

        error = (f"failed to convert {input_data!r} to str:"
                 f" got {output_data!r}, while expected {expected_data!r}")

        assert output_data == expected_data, error
Esempio n. 27
0
    def test_get(self):
        server = simple_http_server.HTTPServer(
            ('', 8880), simple_http_server.TestHttpServer, ".")
        server.start()

        client = simple_http_client.Client(timeout=5)
        url = "http://localhost:8880/test"
        res = client.request("GET", url)
        self.assertEqual(res.status, 200)
        content = utils.to_str(res.text)
        print(content)

        server.shutdown()
Esempio n. 28
0
    def set_proxy_applist(self):
        xlog.debug("set_proxy_applist %r", self.postvars)
        config.proxy_by_app = int(
            self.postvars.get(b'proxy_by_app') == [b"true"])
        config.enabled_app_list = utils.to_str(
            self.postvars.get(b"enabled_app_list[]", []))
        xlog.debug("set_proxy_applist proxy_by_app:%s", config.proxy_by_app)
        xlog.debug("set_proxy_applist enabled_app_list:%s",
                   config.enabled_app_list)
        config.save()

        data = {"res": "success"}
        self.send_response("text/html", json.dumps(data))
Esempio n. 29
0
def do_failover(cfg, tags):
    """
    :param cfg(dict): json data
    :param tags(list): tag list
    """
    create_user_chains(cfg)
    for tag in tags:
        preUserChainName = cfg[tag]['prerouting']['chain']
        postUserChainName = cfg[tag]['postrouting']['chain']
        preUserChain = NATChain(preUserChainName)
        postUserChain = NATChain(postUserChainName)
        primaryIp = cfg[tag]['dstip']['primary']
        secondaryIp = cfg[tag]['dstip']['secondary']

        foip = None  # failover dstip
        dstip = preUserChain.fetch_dst_ip()
        # we won't check failover ip is available or not
        # just do failover if rules are found
        if dstip is None:
            print('{tag}: chain is empty, apply rules first.'.format(
                tag=to_str(tag)))
            continue
        elif dstip == primaryIp:
            foip = secondaryIp
        elif dstip == secondaryIp:
            foip = primaryIp

        if foip is not None:
            # reload iptables rules
            preUserChain.flush()
            for rule in cfg[tag]['prerouting']['rules']:
                to_destination = ':'.join((foip, rule['to_ports']))
                preUserChain.append_pre_rule(rule['proto'], rule['dport'],
                                             rule['target'], to_destination)
            postUserChain.flush()
            for rule in cfg[tag]['postrouting']['rules']:
                postUserChain.append_post_rule(rule['proto'], foip,
                                               rule['target'])
            print('{tag}: succeed to failover'.format(tag=to_str(tag)))
Esempio n. 30
0
def call_api(path, req_info):
    if not path.startswith("/"):
        path = "/" + path

    try:
        upload_post_data = json.dumps(req_info)
        upload_post_data = encrypt_data(upload_post_data)

        start_time = time.time()
        while time.time() - start_time < 30:
            content, status, response = g.http_client.request(method="POST", host=g.config.api_server, path=path,
                                                              headers={"Content-Type": "application/json"},
                                                              data=upload_post_data, timeout=5)
            if status >= 400:
                time.sleep(1)
                continue
            else:
                break

        time_cost = time.time() - start_time
        if status != 200:
            reason = "status:%r" % status
            xlog.warn("api:%s fail:%s t:%d", path, reason, time_cost)
            g.last_api_error = reason
            return False, reason

        content = decrypt_data(content)
        if isinstance(content, memoryview):
            content = content.tobytes()

        content = utils.to_str(content)
        try:
            info = json.loads(content)
        except Exception as e:
            g.last_api_error = "parse json fail"
            xlog.warn("api:%s parse json:%s fail:%r", path, content, e)
            return False, "parse json fail"

        res = info["res"]
        if res != "success":
            g.last_api_error = info["reason"]
            xlog.warn("api:%s fail:%s", path, info["reason"])
            return False, info["reason"]

        xlog.info("api:%s success t:%d", path, time_cost * 1000)
        g.last_api_error = ""
        return True, info
    except Exception as e:
        xlog.exception("order e:%r", e)
        g.last_api_error = "%r" % e
        return False, "except:%r" % e
Esempio n. 31
0
def query_dns_from_xxnet(domain, dns_type=None):
    if not g.x_tunnel:
        return []

    t0 = time.time()
    content, status, response = g.x_tunnel.front_dispatcher.request(
        "GET",
        "dns.xx-net.org",
        path="/query?domain=%s" % (utils.to_str(domain)),
        timeout=5)
    t1 = time.time()

    if status != 200:
        xlog.warn("query_dns_from_xxnet fail status:%d, cost=%f", status,
                  t1 - t0)
        return []

    if isinstance(content, memoryview):
        content = content.tobytes()

    content = utils.to_str(content)

    try:
        rs = json.loads(content)
        ips = rs["ip"]
        xlog.debug("query_dns_from_xxnet %s cost:%f return:%s", domain,
                   t1 - t0, ips)
        #if dns_type == 1:
        #    ips = [ip for ip in ips if "." in ip]
        ips_out = []
        for ip_cn in ips:
            ip, cn = ip_cn.split("|")
            ips_out.append(ip)
        return ips_out
    except Exception as e:
        xlog.warn("query_dns_from_xxnet %s json:%s parse fail:%s", domain,
                  content, e)
        return []
Esempio n. 32
0
    def get_last_lines(self, max_lines):
        self.buffer_lock.acquire()
        buffer_len = len(self.buffer)
        if buffer_len > max_lines:
            first_no = self.last_no - max_lines
        else:
            first_no = self.last_no - buffer_len + 1

        jd = {}
        if buffer_len > 0:
            for i in range(first_no, self.last_no + 1):
                jd[i] = utils.to_str(self.buffer[i])
        self.buffer_lock.release()
        return json.dumps(jd)
Esempio n. 33
0
 def set_header(self, name, value):
     """
     给指定的header 赋值
     >>> r = Response()
     >>> r.header('content-type')
     'text/html; charset=utf-8'
     >>> r.set_header('CONTENT-type', 'image/png')
     >>> r.header('content-TYPE')
     'image/png'
     """
     key = name.upper()
     if key not in _RESPONSE_HEADER_DICT:
         key = name
     self._headers[key] = utils.to_str(value)
Esempio n. 34
0
    def set_header(self, name, value):
        """
        给指定的header 赋值

        >>> r = Response()
        >>> r.header('content-type')
        'text/html; charset=utf-8'
        >>> r.set_header('CONTENT-type', 'image/png')
        >>> r.header('content-TYPE')
        'image/png'
        """
        key = name.upper()
        if key not in _RESPONSE_HEADER_DICT:
            key = name
        self._headers[key] = utils.to_str(value)
Esempio n. 35
0
    def set_header(self, name, value):
        '''
        Set header by name and value.

        >>> r = Response()
        >>> r.header('content-type')
        'text/html; charset=utf-8'
        >>> r.set_header('CONTENT-type', 'image/png')
        >>> r.header('content-TYPE')
        'image/png'
        '''
        key = name.upper()
        if not key in _RESPONSE_HEADER_DICT:
            key = name
        self._headers[key] = to_str(value)
Esempio n. 36
0
    def set_ips(self, domain, ips, type=None, rule="direct"):
        record = self.get(domain)
        if rule != "direct":
            record["r"] = rule

        for ipd in ips:
            ipd = utils.to_str(ipd)
            ipl = ipd.split("|")
            ip = ipl[0]
            cn = ipl[1]
            record["ip"][ip] = cn

        record["update"] = time.time()

        self.set(domain, record)
Esempio n. 37
0
  def process(self, str):
    if isinstance(str, dict):
      str = json.dumps(str)

    str = to_str(str)
    if len(str) > MAX_SIZE:
      return {}

    self.connect()
    self.sendall(str)
    size = len(str) * 10 if not self.force_max else MAX_SIZE

    response_str = self.receive(size)
    self.close()

    return self.format_fun(response_str)
Esempio n. 38
0
def os_detail():
    if sys.platform == "win32":
        return win32_version_string()
    elif sys.platform.startswith("linux"):
        distribution = linux_distribution()
        if distribution is None:
            return "plat:%s release:%s ver:%s" % (
                platform.platform(), platform.release(), platform.version())
        else:
            return utils.to_str(distribution)
    elif sys.platform == "darwin":
        release, versioninfo, machine = platform.mac_ver()
        return "Release:%s; Version:%s Machine:%s" % (release, versioninfo,
                                                      machine)
    else:
        return "None"
Esempio n. 39
0
    def policy_black_port(self, host, port):
        content = get_serving_pacfile()

        proxy = host + ":" + str(port)
        content = content.replace(self.PROXY_LISTEN, proxy)

        black_list = tuple([domain[1:] for domain in g.gfwlist.gfw_black_list])
        white_list = tuple([domain[1:] for domain in g.gfwlist.gfw_white_list])

        black = b'",\n"'.join(black_list
                             + g.user_rules.rule_lists["gae"]
                             + g.user_rules.rule_lists["socks"]
                             )
        white = b'",\n"'.join(white_list + g.user_rules.rule_lists["direct"])

        content = content.replace("BLACK_LIST", utils.to_str(black)).replace("WHITE_LIST", utils.to_str(white))
        return content
Esempio n. 40
0
    def __repr__(self):

        return to_str(
               '\n'\
               'ID:          %s\n'\
               'Continent:   %s\n'\
               'Country:     %s\n'\
               'City:        %s\n'\
               'Min Cost:    %s\n'\
               'Max Cost:    %s\n'\
               % (self.provider['id'],
                  self.provider['continent'],
                  self.provider['country'],
                  self.provider['city'],
                  self.provider['min_cost'],
                  self.provider['max_cost'])
        )
Esempio n. 41
0
 def __repr__(self):
     
     return to_str(
            '\n'\
            'ID:          %s\n'\
            'Continent:   %s\n'\
            'Country:     %s\n'\
            'City:        %s\n'\
            'Min Cost:    %s\n'\
            'Max Cost:    %s\n'\
            % (self.provider['id'],
               self.provider['continent'],
               self.provider['country'],
               self.provider['city'],
               self.provider['min_cost'],
               self.provider['max_cost'])
     )
Esempio n. 42
0
def run(cmd):
    cmd = shlex.split(cmd)

    try:
        # hide console in MS windows
        startupinfo = subprocess.STARTUPINFO()
        startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
        startupinfo.wShowWindow = subprocess.SW_HIDE

        #out = subprocess.check_output(cmd, startupinfo=startupinfo)
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, startupinfo=startupinfo)
        out, unused_err = process.communicate()
        retcode = process.poll()
        if retcode:
            return out + b"\n retcode:%s\n unused_err:%s\n" % (retcode, unused_err)
    except Exception as e:
        out = "Exception:%r" % e

    return utils.to_str(out, coding="gb18030")
Esempio n. 43
0
def parse_update_versions(readme_file):
    versions = []
    try:
        fd = open(readme_file, "rb")
        lines = fd.readlines()
        p = re.compile(br'https://codeload.github.com/XX-net/XX-Net/zip/([0-9]+)\.([0-9]+)\.([0-9]+) ([0-9a-fA-F]*)')
        for line in lines:
            m = p.match(line)
            if m:
                version = m.group(1) + b"." + m.group(2) + b"." + m.group(3)
                hashsum = m.group(4).lower()
                versions.append([m.group(0), version, hashsum])
                versions = utils.to_str(versions)
                if len(versions) == 2:
                    return versions
    except Exception as e:
        xlog.exception("xxnet_version fail:%r", e)

    raise Exception("get_version_fail:%s" % readme_file)
Esempio n. 44
0
def check_push_update():
    global update_content, update_dict
    try:
        opener = get_opener()

        req_url = update_url + "?uuid=" + get_uuid() \
                  + "&version=" + update_from_github.current_version() \
                  + "&platform=" + platform.platform()
        try:
            update_content = opener.open(req_url).read()
        except Exception as e:
            xlog.exception("check_update fail:%r", e)
            return False

        update_dict = json.loads(utils.to_str(update_content))
        return True
    except Exception as e:
        xlog.exception("check_update except:%s", e)
        return
Esempio n. 45
0
    def save(self, force=False):
        time_now = time.time()
        if not force:
            if not self.need_save:
                return

            if time_now - self.last_save_time < 10:
                return

        with self.lock:
            with open(self.file_path, "w") as fd:
                for host, record in self.cache.items():
                    line = utils.to_str(host) + " " + record["r"] + " " + str(
                        record["g"]) + " "

                    fd.write(line + "\n")

        self.last_save_time = time.time()
        self.need_save = False
Esempio n. 46
0
    def test_good(self):
        good_cases = [
            (b'my bytes', 'my bytes'),
            ('no error', b'no error'),
            ('other str', 'other str'),
        ]
        for value, expected in good_cases:
            with self.subTest(value):
                self.assertEqual(expected, to_str(value))

        def test_bad(self):
            bad_cases = [
                (object(), TypeError),
                (b'\xfa\xfa', UnicodeDecodeError),
            ]
            for value, exception in bad_cases:
                with self.subTest(value):
                    with self.assertRaises(exception):
                        to_str(value)
Esempio n. 47
0
    def load_module_menus(self):
        global module_menus
        new_module_menus = {}

        modules = config.all_modules
        for module in modules:
            if getattr(config, "enable_" + module) != 1:
                continue

            menu_path = os.path.join(root_path, module, "web_ui", "menu.json")  # launcher & gae_proxy modules
            if not os.path.isfile(menu_path):
                continue

            # i18n code lines (Both the locale dir & the template dir are module-dependent)
            locale_dir = os.path.abspath(os.path.join(root_path, module, 'lang'))
            stream = i18n_translator.render(locale_dir, menu_path)
            module_menu = json.loads(utils.to_str(stream))
            new_module_menus[module] = module_menu

        module_menus = sorted(iter(new_module_menus.items()), key=lambda k_and_v: (k_and_v[1]['menu_sort_id']))
Esempio n. 48
0
def in_room(room, talk):
    return ut.to_str(talk.get('track_title', '').split(', ')[0]) == room
Esempio n. 49
0
def field_generator(boundary, name, value):
    yield '--' + boundary
    yield 'Content-Disposition: form-data; name="%s"' % to_str(name)
    yield ''
    yield to_str(value)
 def test_to_str_bytes(self):
     self.assertEqual('hello', to_str(b'hello'))
 def test_to_str_str(self):
     self.assertEqual('hello', to_str('hello'))